def main():

    # Invoke argparse argument parsing.
    arg_parser = argparser_init()
    try:
        args = script_utils.ArgumentPasser(PYTHON_EXE, SCRIPT_FILE, arg_parser,
                                           sys.argv)
    except ScriptArgumentError as e:
        arg_parser.error(e)

    ## Further parse/adjust argument values.

    src = args.get(ARGSTR_SRC)

    if args.get(ARGSTR_ADD_RES_SUFFIX):
        global STRIP_DEM_SUFFIX
        if args.get(ARGSTR_TARGET_RESOLUTION) is None:
            arg_parser.error(
                "{} argument must be provided to use {} option".format(
                    ARGSTR_TARGET_RESOLUTION, ARGSTR_ADD_RES_SUFFIX))
        dem_suffix_base, dem_suffix_ext = os.path.splitext(STRIP_DEM_SUFFIX)
        STRIP_DEM_SUFFIX = '{}_{}m{}'.format(
            dem_suffix_base, args.get(ARGSTR_TARGET_RESOLUTION),
            dem_suffix_ext)

    if args.get(ARGSTR_STRIPS_NO_BROWSE) or args.get(ARGSTR_STRIPS_BUILD_AUX):
        if args.get(ARGSTR_ADD_RES_SUFFIX):
            arg_parser.error("{}/{} and {} options are incompatible".format(
                ARGSTR_STRIPS_NO_BROWSE, ARGSTR_STRIPS_BUILD_AUX,
                ARGSTR_ADD_RES_SUFFIX))
        if args.get(ARGSTR_TARGET_RESOLUTION) not in (None, 2):
            arg_parser.error(
                "{}/{} options are only compatible with a target resolution of 2 meters"
                .format(ARGSTR_STRIPS_NO_BROWSE, ARGSTR_STRIPS_BUILD_AUX))

    if args.get(ARGSTR_SCHEDULER) is not None:
        if args.get(ARGSTR_JOBSCRIPT) is None:
            jobscript_default = os.path.join(
                JOBSCRIPT_DIR, 'head_{}.sh'.format(args.get(ARGSTR_SCHEDULER)))
            if not os.path.isfile(jobscript_default):
                arg_parser.error("Default jobscript ({}) does not exist, ".
                                 format(jobscript_default) +
                                 "please specify one with {} argument".format(
                                     ARGSTR_JOBSCRIPT))
            else:
                args.set(ARGSTR_JOBSCRIPT, jobscript_default)
                print("argument {} set automatically to: {}".format(
                    ARGSTR_JOBSCRIPT, args.get(ARGSTR_JOBSCRIPT)))

    ## Validate argument values.

    if args.get(ARGSTR_TASKS_PER_JOB
                ) is not None and not args.get(ARGSTR_SCHEDULER):
        arg_parser.error("{} option requires {} option".format(
            ARGSTR_TASKS_PER_JOB, ARGSTR_SCHEDULER))

    # Gather list of tasks.
    src_tasks_all = None

    if os.path.isfile(src) and src.endswith(SETSM_META_SUFFIX):
        src_tasks_all = [src]

    elif os.path.isfile(src) and src.endswith('.txt'):
        bundle_file = src
        src_tasks_all = script_utils.read_task_bundle(bundle_file,
                                                      args_delim=' ')

    elif os.path.isdir(src):
        srcdir = src
        src_tasks_all = []
        for root, dnames, fnames in os.walk(srcdir):
            for fn in fnames:
                if fn.endswith(SETSM_META_SUFFIX):
                    src_tasks_all.append(os.path.join(root, fn))
        src_tasks_all.sort()

    if src_tasks_all is None:
        arg_parser.error(
            "argument {} must be a path to either a directory or a file ending with '{}', "
            "but was '{}'".format(ARGSTR_SRC, SETSM_META_SUFFIX, src))

    src_tasks_incomplete = []

    test_srs = osr.SpatialReference()
    for task in src_tasks_all:
        task_metafile_src = ''
        task_dstdir = args.get(ARGSTR_DSTDIR)
        task_target_epsg = args.get(ARGSTR_TARGET_EPSG)

        if type(task) is list:
            if len(task) == 2:
                task_metafile_src, task_dstdir = task
            elif len(task) == 3:
                task_metafile_src, task_dstdir, task_target_epsg = task
                task_target_epsg = int(task_target_epsg)
                if task_dstdir in ('psn', 'pss') or re.match(
                        RE_UTM_PROJNAME, task_dstdir) is not None:
                    shelved_strip_match = re.match(RE_SHELVED_STRIP,
                                                   task_metafile_src)
                    if shelved_strip_match is not None:
                        strips_res_dir = shelved_strip_match.groups()[0]
                        task_dstdir = "{}_{}".format(strips_res_dir,
                                                     task_dstdir)
                        task[1] = task_dstdir
            else:
                arg_parser.error(
                    "Source task list can only have up to three columns: "
                    "src_metafile, dstdir, target_epsg")
        else:
            task_metafile_src = task

        task = [task_metafile_src, task_dstdir, task_target_epsg]

        if not os.path.isfile(task_metafile_src):
            arg_parser.error(
                "Source metafile does not exist: {}".format(task_metafile_src))
        if task_dstdir is None:
            arg_parser.error("Destination directory must be specified")
        if task_target_epsg is None:
            arg_parser.error("Target EPSG code must be specified")
        status = test_srs.ImportFromEPSG(task_target_epsg)
        if status != 0:
            arg_parser.error(
                "Target EPSG code is invalid: {}".format(task_target_epsg))

        task_dstdir = get_task_dstdir(task_metafile_src, task_dstdir)
        task_metafile_dst = os.path.join(task_dstdir,
                                         os.path.basename(task_metafile_src))
        if not os.path.isfile(task_metafile_dst) or args.get(ARGSTR_OVERWRITE):
            src_tasks_incomplete.append(task)

    num_tasks = len(src_tasks_incomplete)

    print("Number of source SETSM metafiles found: {}".format(
        len(src_tasks_all)))
    print("Number of incomplete reprojection tasks: {}".format(num_tasks))

    if num_tasks == 0:
        sys.exit(0)
    elif args.get(ARGSTR_DRYRUN) and args.get(ARGSTR_SCHEDULER) is not None:
        print("Exiting dryrun")
        sys.exit(0)

    # Pause for user review.
    print("-----")
    wait_seconds = 8
    print("Sleeping {} seconds before task submission".format(wait_seconds))
    sleep(wait_seconds)
    print("-----")

    ## Create output directories if they don't already exist.
    if not args.get(ARGSTR_DRYRUN):
        for dir_argstr, dir_path in list(
                zip(ARGGRP_OUTDIR, args.get_as_list(ARGGRP_OUTDIR))):
            if dir_path is not None and not os.path.isdir(dir_path):
                print("Creating argument {} directory: {}".format(
                    dir_argstr, dir_path))
                os.makedirs(dir_path, exist_ok=True)

    ## Process tasks.

    if args.get(ARGSTR_SCHEDULER) is not None:
        # Process tasks in batch.

        tasks_per_job = args.get(ARGSTR_TASKS_PER_JOB)
        src_tasks = (src_tasks_incomplete if tasks_per_job is None else
                     script_utils.write_task_bundles(
                         src_tasks_incomplete,
                         tasks_per_job,
                         args.get(ARGSTR_SCRATCH),
                         '{}_{}'.format(JOB_ABBREV, ARGSTR_SRC),
                         task_delim=' '))

        jobnum_fmt = script_utils.get_jobnum_fmtstr(src_tasks)
        last_job_email = args.get(ARGSTR_EMAIL)

        args_batch = args
        args_single = copy.deepcopy(args)
        args_single.unset(*ARGGRP_BATCH)

        job_num = 0
        num_jobs = len(src_tasks)
        for task in src_tasks:
            job_num += 1

            if type(task) is list:
                task_metafile_src, task_dstdir, task_target_epsg = task
                args_single.set(ARGSTR_SRC, task_metafile_src)
                args_single.set(
                    ARGSTR_DSTDIR, task_dstdir
                    if task_dstdir is not None else args.get(ARGSTR_DSTDIR))
                args_single.set(
                    ARGSTR_TARGET_EPSG, task_target_epsg if task_target_epsg
                    is not None else args.get(ARGSTR_TARGET_EPSG))
            else:
                task_bundlefile_src = task
                args_single.set(ARGSTR_SRC, task_bundlefile_src)

            if last_job_email and job_num == num_jobs:
                args_single.set(ARGSTR_EMAIL, last_job_email)

            cmd_single = args_single.get_cmd()

            job_name = JOB_ABBREV + jobnum_fmt.format(job_num)
            cmd = args_single.get_jobsubmit_cmd(
                args_batch.get(ARGSTR_SCHEDULER),
                jobscript=args_batch.get(ARGSTR_JOBSCRIPT),
                jobname=job_name,
                time_hr=JOB_WALLTIME_HR,
                memory_gb=JOB_MEMORY_GB,
                email=args.get(ARGSTR_EMAIL),
                envvars=[
                    args_batch.get(ARGSTR_JOBSCRIPT), JOB_ABBREV, cmd_single,
                    PYTHON_VERSION_ACCEPTED_MIN
                ])

            print(cmd)
            if not args_batch.get(ARGSTR_DRYRUN):
                subprocess.call(cmd,
                                shell=True,
                                cwd=args_batch.get(ARGSTR_LOGDIR))

    else:
        error_trace = None
        try:
            # Process tasks in serial.

            for i, task in enumerate(src_tasks_incomplete):

                task_metafile_src, task_dstdir, task_target_epsg = task

                print("Reproject ({}/{}): {}".format(i + 1, num_tasks,
                                                     task_metafile_src))
                # if not args.get(ARGSTR_DRYRUN):
                reproject_setsm(task_metafile_src,
                                task_dstdir,
                                task_target_epsg,
                                args=args)

        except KeyboardInterrupt:
            raise

        except Exception as e:
            with script_utils.capture_stdout_stderr() as out:
                traceback.print_exc()
            caught_out, caught_err = out
            error_trace = caught_err
            print(error_trace)
            if e.__class__ is ImportError:
                print("\nFailed to import necessary module(s)")
                print(
                    "If running on a Linux system where the jobscripts/init.sh file has been properly"
                    " set up, try running the following command to activate a working environment"
                    " in your current shell session:\n{}".format(
                        "source {} {}".format(JOBSCRIPT_INIT, JOB_ABBREV)))
                print('')

        if type(args.get(ARGSTR_EMAIL)) is str:
            # Send email notification of script completion.
            email_body = SCRIPT_RUNCMD
            if error_trace is not None:
                email_status = "ERROR"
                email_body += "\n{}\n".format(error_trace)
            else:
                email_status = "COMPLETE"
            email_subj = "{} - {}".format(email_status, SCRIPT_FNAME)
            script_utils.send_email(args.get(ARGSTR_EMAIL), email_subj,
                                    email_body)

        if error_trace is not None:
            sys.exit(1)
Beispiel #2
0
def main():

    # Invoke argparse argument parsing.
    arg_parser = argparser_init()
    try:
        args = script_utils.ArgumentPasser(PYTHON_EXE, SCRIPT_FILE, arg_parser,
                                           sys.argv)
    except ScriptArgumentError as e:
        arg_parser.error(e)

    ## Further parse/adjust argument values.

    src = args.get(ARGSTR_SRC)

    global SUFFIX_PRIORITY_BITMASK
    mask_suffix_raw = args.get(ARGSTR_MASK_SUFFIX)
    mask_suffix_fixed = '_' + mask_suffix_raw.lstrip('_')
    use_raw_suffixes = False
    if args.provided(ARGSTR_MASK_SUFFIX):
        if mask_suffix_fixed in SUFFIX_PRIORITY_BITMASK:
            SUFFIX_PRIORITY_BITMASK.remove(mask_suffix_fixed)
            SUFFIX_PRIORITY_BITMASK.insert(0, mask_suffix_fixed)
        else:
            use_raw_suffixes = True
            SUFFIX_PRIORITY_BITMASK = [args.get(ARGSTR_MASK_SUFFIX)]
    if not use_raw_suffixes and mask_suffix_fixed != mask_suffix_raw:
        args.set(ARGSTR_MASK_SUFFIX, mask_suffix_fixed)

    if args.get(ARGSTR_SRC_SUFFIX) is None or args.get(
            ARGSTR_SRC_SUFFIX).strip() == '':
        args.set(ARGSTR_SRC_SUFFIX, SRC_SUFFIX_CATCH_ALL)
        print(
            "argument {} set automatically from null value to catch-all default: '{}'"
            .format(ARGSTR_SRC_SUFFIX, ARGSTR_SRC_SUFFIX,
                    args.get(ARGSTR_SRC_SUFFIX)))

    if args.get(ARGSTR_SRC_SUFFIX) is None:
        suffix_maskval_dict = {SRC_SUFFIX_CATCH_ALL: None}
    else:
        src_suffixToptmaskval = [[
            ss.strip() for ss in s.strip().split('=')
        ] for s in args.get(ARGSTR_SRC_SUFFIX).split('/')]
        suffix_maskval_dict = {}
        for suffixToptmaskval in src_suffixToptmaskval:
            suffix = suffixToptmaskval[0]
            if not use_raw_suffixes and not suffix.startswith('*'):
                suffix = '_' + suffix.lstrip('_')
            maskval = suffixToptmaskval[1] if len(
                suffixToptmaskval) == 2 else args.get(ARGSTR_MASK_VALUE)
            if maskval is not None:
                if startswith_one_of_coll(maskval, ['nodata', 'no-data'],
                                          case_sensitive=False):
                    maskval = None
                else:
                    try:
                        maskval_num = float(maskval)
                        maskval = maskval_num
                    except ValueError:
                        arg_parser.error(
                            "argument {} masking value '{}' is invalid".format(
                                ARGSTR_SRC_SUFFIX, maskval))
            suffix_maskval_dict[suffix] = maskval

    if args.get(ARGGRP_FILTER_COMP).count(True) > 0 and args.get(
            ARGSTR_FILTER_OFF):
        arg_parser.error(
            "argument {} is incompatible with filter options {}".format(
                ARGSTR_FILTER_OFF, ARGGRP_FILTER_COMP))

    if args.get(ARGSTR_DST_SUFFIX) is None:
        if args.get(ARGGRP_FILTER_COMP).count(True) > 0:
            args.set(
                ARGSTR_DST_SUFFIX, MASKED_SUFFIX_DEFAULT +
                get_mask_bitstring(*args.get(ARGGRP_FILTER_COMP)))
        else:
            args.set(ARGSTR_DST_SUFFIX,
                     MASKED_SUFFIX_DEFAULT * (not args.get(ARGSTR_FILTER_OFF)))
        print("argument {} set automatically to: '{}'".format(
            ARGSTR_DST_SUFFIX, args.get(ARGSTR_DST_SUFFIX)))
    dst_suffix_raw = args.get(ARGSTR_DST_SUFFIX)
    dst_suffix_fixed = '_' + dst_suffix_raw.lstrip(
        '_') if dst_suffix_raw != '' else ''
    if dst_suffix_fixed != dst_suffix_raw:
        args.set(ARGSTR_DST_SUFFIX, dst_suffix_fixed)

    if args.get(ARGGRP_FILTER_COMP).count(
            True) == 0 and not args.get(ARGSTR_FILTER_OFF):
        args.set(ARGGRP_FILTER_COMP)

    if args.get(ARGSTR_SCHEDULER) is not None:
        if args.get(ARGSTR_JOBSCRIPT) is None:
            jobscript_default = os.path.join(
                JOBSCRIPT_DIR, 'head_{}.sh'.format(args.get(ARGSTR_SCHEDULER)))
            if not os.path.isfile(jobscript_default):
                arg_parser.error("Default jobscript ({}) does not exist, ".
                                 format(jobscript_default) +
                                 "please specify one with {} argument".format(
                                     ARGSTR_JOBSCRIPT))
            else:
                args.set(ARGSTR_JOBSCRIPT, jobscript_default)
                print("argument {} set automatically to: {}".format(
                    ARGSTR_JOBSCRIPT, args.get(ARGSTR_JOBSCRIPT)))

    ## Validate argument values.

    if args.get(ARGSTR_TASKS_PER_JOB
                ) is not None and not args.get(ARGSTR_SCHEDULER):
        arg_parser.error("{} option requires {} option".format(
            ARGSTR_TASKS_PER_JOB, ARGSTR_SCHEDULER))

    # Gather list of masks to apply to a (range of) source raster suffix(es).
    src_bitmasks = None

    if os.path.isfile(src) and src.endswith(args.get(ARGSTR_MASK_SUFFIX)):
        src_bitmask = src
        src_bitmasks = [src_bitmask]
        if suffix_maskval_dict is None:
            # maskFile_base = src_bitmask.replace(args.get(ARGSTR_MASK_SUFFIX), '')
            # suffix_maskval_dict = {src_rasterFile.replace(maskFile_base, ''): None
            #                        for src_rasterFile in glob.glob(maskFile_base+'*.tif')
            #                        if not src_rasterFile.endswith(args.get(ARGSTR_MASK_SUFFIX))}
            suffix_maskval_dict = {SRC_SUFFIX_CATCH_ALL: None}

    elif os.path.isfile(src) and not src.endswith('.txt'):
        if args.provided(ARGSTR_SRC_SUFFIX):
            raise ScriptArgumentError(
                "{} option cannot be used when argument {} is a path to "
                "a source raster file".format(ARGSTR_SRC_SUFFIX, ARGSTR_SRC))
        args.set(ARGSTR_SRC_SUFFIX, None)
        suffix_maskval_dict = None
        src_raster = src
        src_suffix = None

        src_raster_dir = os.path.dirname(src_raster)
        src_raster_fname = os.path.basename(src_raster)
        beg, end = 0, len(src_raster_fname)
        # end = None
        while end >= 0:
            # end = max(src_raster_fname.rfind('_', beg, end), src_raster_fname.rfind('.', beg, end))
            # if os.path.isfile(os.path.join(src_raster_dir, src_raster_fname[beg:end].rstrip('_')+args.get(ARGSTR_MASK_SUFFIX))):
            if os.path.isfile(
                    os.path.join(
                        src_raster_dir, src_raster_fname[beg:end] +
                        args.get(ARGSTR_MASK_SUFFIX))):
                src_suffix = src_raster_fname[end:]
                break
            end -= 1
        if src_suffix is None:
            arg_parser.error(
                "Path of {} component for argument {} raster file "
                "could not be determined".format(args.get(ARGSTR_MASK_SUFFIX),
                                                 ARGSTR_SRC))

        src_bitmask = src_raster.replace(src_suffix,
                                         args.get(ARGSTR_MASK_SUFFIX))
        bitmaskSuffix = getBitmaskSuffix(src_bitmask)
        maskFile_base = src_bitmask.replace(bitmaskSuffix, '')
        src_suffix = src_raster.replace(maskFile_base, '')

        args.set(ARGSTR_MASK_SUFFIX, bitmaskSuffix)
        args.set(ARGSTR_SRC_SUFFIX, src_suffix)
        suffix_maskval_dict = {src_suffix: None}

        if not os.path.isfile(src_bitmask):
            arg_parser.error(
                "{} mask component for argument {} raster file does not exist: {}"
                .format(args.get(ARGSTR_MASK_SUFFIX), ARGSTR_SRC, src_bitmask))
        src_bitmasks = [src_bitmask]

    elif os.path.isfile(src) and src.endswith('.txt'):
        bundle_file = src
        src_bitmasks = script_utils.read_task_bundle(bundle_file)

    elif os.path.isdir(src):
        srcdir = src
        src_bitmasks = []
        for root, dnames, fnames in os.walk(srcdir):
            for fn in fnames:
                if fn.endswith(args.get(ARGSTR_MASK_SUFFIX)):
                    src_bitmasks.append(os.path.join(root, fn))
        src_bitmasks.sort()

    if src_bitmasks is None:
        arg_parser.error(
            "argument {} must be a path to either a directory or a file, "
            "but was '{}'".format(ARGSTR_SRC, src))

    print("-----")
    print("Mask file suffix: {}".format(args.get(ARGSTR_MASK_SUFFIX)))
    print("""Selected bitmask components to mask:
[{}] EDGE
[{}] WATER
[{}] CLOUD""".format(*[
        'X' if opt is True else ' '
        for opt in args.get(ARGSTR_EDGE, ARGSTR_WATER, ARGSTR_CLOUD)
    ]))
    print("Output file suffix: '{}[.EXT]'".format(args.get(ARGSTR_DST_SUFFIX)))

    print("-----")
    print("Any warnings would appear here:")

    ## Create processing list.
    if suffix_maskval_dict is None:
        masks_to_apply = src_bitmasks
        num_src_rasters = None
    else:
        # Build processing list by only adding bitmasks for which
        # an output masked raster image(s) with the specified mask settings
        # does not already exist in the destination directory.
        src_suffixes = list(suffix_maskval_dict.keys())
        num_src_rasters = 0

        masks_to_apply = []
        for maskFile in src_bitmasks:
            bitmaskSuffix = getBitmaskSuffix(maskFile)
            bitmask_is_strip_lsf = (bitmaskSuffix == SUFFIX_STRIP_BITMASK_LSF)
            rasterFiles_checklist = set()
            switched_to_glob = False

            for rasterSuffix in src_suffixes:
                bitmaskSuffix_temp = bitmaskSuffix
                if rasterSuffix.startswith(STRIP_LSF_PREFIX):
                    if not bitmask_is_strip_lsf:
                        continue
                elif bitmask_is_strip_lsf:
                    bitmaskSuffix_temp = SUFFIX_STRIP_BITMASK
                src_rasterFile = maskFile.replace(bitmaskSuffix_temp,
                                                  rasterSuffix)

                if '*' in rasterSuffix:
                    src_rasterffile_glob = set(glob.glob(src_rasterFile))
                    num_src_rasters += len(src_rasterffile_glob)
                    double_dipped = src_rasterffile_glob.intersection(
                        rasterFiles_checklist)
                    if double_dipped:
                        raise ScriptArgumentError(
                            "{} option '{}' matches source raster files that "
                            "are already caught by another provided suffix!".
                            format(ARGSTR_SRC_SUFFIX, rasterSuffix))
                    rasterFiles_checklist = rasterFiles_checklist.union(
                        src_rasterffile_glob)
                    mask_added = False
                    for src_rasterFile in src_rasterffile_glob:
                        if src_rasterFile.endswith(
                                args.get(ARGSTR_MASK_SUFFIX)):
                            continue
                        if args.get(ARGSTR_OVERWRITE):
                            pass
                        else:
                            dst_rasterFile = get_dstFile(src_rasterFile, args)
                            if not os.path.isfile(dst_rasterFile):
                                pass
                            else:
                                continue
                        masks_to_apply.append(maskFile)
                        mask_added = True
                        break
                    if len(src_rasterffile_glob) == 0:
                        warnings.warn(
                            "{} option '{}' did not match any source raster files "
                            "corresponding to at least one mask file".format(
                                ARGSTR_SRC_SUFFIX, rasterSuffix))
                    if mask_added:
                        break

                elif os.path.isfile(src_rasterFile):
                    num_src_rasters += 1
                    double_dipped = (src_rasterFile in rasterFiles_checklist)
                    if double_dipped:
                        raise ScriptArgumentError(
                            "{} option '{}' matches source raster files that "
                            "are already caught by another provided suffix!".
                            format(ARGSTR_SRC_SUFFIX, rasterSuffix))
                    rasterFiles_checklist.add(src_rasterFile)
                    if args.get(ARGSTR_OVERWRITE):
                        pass
                    else:
                        dst_rasterFile = get_dstFile(src_rasterFile, args)
                        if not os.path.isfile(dst_rasterFile):
                            pass
                        else:
                            continue
                    masks_to_apply.append(maskFile)
                    break

                else:
                    rasterSuffix_glob = '*' + rasterSuffix.lstrip('_')
                    src_rasterFile = maskFile.replace(bitmaskSuffix_temp,
                                                      rasterSuffix_glob)
                    src_rasterffile_glob = set(glob.glob(src_rasterFile))
                    num_src_rasters += len(src_rasterffile_glob)
                    double_dipped = src_rasterffile_glob.intersection(
                        rasterFiles_checklist)
                    if double_dipped:
                        raise ScriptArgumentError(
                            "{} option '{}' (auto-globbed) matches source raster files that "
                            "are already caught by another provided suffix!".
                            format(ARGSTR_SRC_SUFFIX, rasterSuffix_glob))
                    rasterFiles_checklist = rasterFiles_checklist.union(
                        src_rasterffile_glob)
                    mask_added = False
                    for src_rasterFile in src_rasterffile_glob:
                        if src_rasterFile.endswith(
                                args.get(ARGSTR_MASK_SUFFIX)):
                            continue
                        if args.get(ARGSTR_OVERWRITE):
                            pass
                        else:
                            dst_rasterFile = get_dstFile(src_rasterFile, args)
                            if not os.path.isfile(dst_rasterFile):
                                pass
                            else:
                                continue
                        masks_to_apply.append(maskFile)
                        mask_added = True
                        break
                    if len(src_rasterffile_glob) == 0:
                        warnings.warn(
                            "{} option '{}' (auto-globbed) did not match any source raster files "
                            "corresponding to at least one mask file".format(
                                ARGSTR_SRC_SUFFIX, rasterSuffix_glob))
                    else:
                        suffix_maskval_dict[
                            rasterSuffix_glob] = suffix_maskval_dict[
                                rasterSuffix]
                        del suffix_maskval_dict[rasterSuffix]
                        switched_to_glob = True
                    if mask_added:
                        break
            if switched_to_glob:
                src_suffixes = list(suffix_maskval_dict.keys())

    print("-----")

    if suffix_maskval_dict is None:
        print(
            "Masking all *_[RASTER-SUFFIX(.tif)] raster components corresponding to "
            "source *{} file(s), using source NoData values".format(
                args.get(ARGSTR_MASK_SUFFIX)))
    else:
        print("[Raster Suffix, Masking Value]")
        for suffix, maskval in suffix_maskval_dict.items():
            print("{}, {}".format(
                suffix,
                maskval if maskval is not None else '(source NoDataVal)'))
        print("-----")

    num_tasks = len(masks_to_apply)

    print("Number of source mask files found: {}".format(len(src_bitmasks)))
    if num_src_rasters is not None:
        print(
            "Number of source raster files found: {}".format(num_src_rasters))
    print("Number of incomplete masking tasks: {}".format(num_tasks))

    if num_tasks == 0:
        sys.exit(0)

    # Pause for user review.
    print("-----")
    wait_seconds = 10
    print("Sleeping {} seconds before task submission".format(wait_seconds))
    sleep(wait_seconds)
    print("-----")

    ## Create output directories if they don't already exist.
    if not args.get(ARGSTR_DRYRUN):
        for dir_argstr, dir_path in list(
                zip(ARGGRP_OUTDIR, args.get_as_list(ARGGRP_OUTDIR))):
            if dir_path is not None and not os.path.isdir(dir_path):
                print("Creating argument {} directory: {}".format(
                    dir_argstr, dir_path))
                os.makedirs(dir_path)

    ## Process masks.

    if args.get(ARGSTR_SCHEDULER) is not None:
        # Process masks in batch.

        tasks_per_job = args.get(ARGSTR_TASKS_PER_JOB)
        src_files = (masks_to_apply if tasks_per_job is None else
                     script_utils.write_task_bundles(
                         masks_to_apply, tasks_per_job,
                         args.get(ARGSTR_SCRATCH), '{}_{}'.format(
                             JOB_ABBREV, ARGSTR_SRC)))

        jobnum_fmt = script_utils.get_jobnum_fmtstr(src_files)
        last_job_email = args.get(ARGSTR_EMAIL)

        args_batch = args
        args_single = copy.deepcopy(args)
        args_single.unset(*ARGGRP_BATCH)

        job_num = 0
        num_jobs = len(src_files)
        for srcfp in src_files:
            job_num += 1

            args_single.set(ARGSTR_SRC, srcfp)
            if last_job_email and job_num == num_jobs:
                args_single.set(ARGSTR_EMAIL, last_job_email)
            cmd_single = args_single.get_cmd()

            job_name = JOB_ABBREV + jobnum_fmt.format(job_num)
            cmd = args_single.get_jobsubmit_cmd(
                args_batch.get(ARGSTR_SCHEDULER),
                jobscript=args_batch.get(ARGSTR_JOBSCRIPT),
                jobname=job_name,
                time_hr=JOB_WALLTIME_HR,
                memory_gb=JOB_MEMORY_GB,
                email=args.get(ARGSTR_EMAIL),
                envvars=[
                    args_batch.get(ARGSTR_JOBSCRIPT), JOB_ABBREV, cmd_single,
                    PYTHON_VERSION_ACCEPTED_MIN
                ])

            print(cmd)
            if not args_batch.get(ARGSTR_DRYRUN):
                subprocess.call(cmd,
                                shell=True,
                                cwd=args_batch.get(ARGSTR_LOGDIR))

    else:
        error_trace = None
        try:
            # Process masks in serial.

            for i, maskFile in enumerate(masks_to_apply):
                print("Mask ({}/{}): {}".format(i + 1, num_tasks, maskFile))
                if not args.get(ARGSTR_DRYRUN):
                    mask_rasters(maskFile, suffix_maskval_dict, args)

        except KeyboardInterrupt:
            raise

        except Exception as e:
            with script_utils.capture_stdout_stderr() as out:
                traceback.print_exc()
            caught_out, caught_err = out
            error_trace = caught_err
            print(error_trace)
            if e.__class__ is ImportError:
                print("\nFailed to import necessary module(s)")
                print(
                    "If running on a Linux system where the jobscripts/init.sh file has been properly"
                    " set up, try running the following command to activate a working environment"
                    " in your current shell session:\n{}".format(
                        "source {} {}".format(JOBSCRIPT_INIT, JOB_ABBREV)))
                print('')

        if type(args.get(ARGSTR_EMAIL)) is str:
            # Send email notification of script completion.
            email_body = SCRIPT_RUNCMD
            if error_trace is not None:
                email_status = "ERROR"
                email_body += "\n{}\n".format(error_trace)
            else:
                email_status = "COMPLETE"
            email_subj = "{} - {}".format(email_status, SCRIPT_FNAME)
            script_utils.send_email(args.get(ARGSTR_EMAIL), email_subj,
                                    email_body)

        if error_trace is not None:
            sys.exit(1)