Exemple #1
0
 def __init__(self):
     self.epsg = 32629
     self.resolution = None
     self.rgb = False
     self.bgrn = False
     self.stretch = 'rf'
     self.spatial_ref = utils.SpatialRef(self.epsg)
    def test_spatial_ref(self):
        sref_np = utils.SpatialRef(self.epsg_npole)
        sref_sp = utils.SpatialRef(self.epsg_spole)
        with self.assertRaises(RuntimeError) as cm:
            utils.SpatialRef(self.epsg_bad_1)  # breaks for not being an integer
        with self.assertRaises(RuntimeError) as cm:
            utils.SpatialRef(self.epsg_bad_2)  # break for invalid EPSG code

        self.assertTrue(isinstance(sref_np.srs, osgeo.osr.SpatialReference))
        self.assertTrue(isinstance(sref_sp.srs, osgeo.osr.SpatialReference))

        self.assertTrue(sref_np.proj4, '+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs ')
        self.assertTrue(sref_sp.proj4, '+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs ')

        self.assertTrue(sref_np.epsg, 3413)
        self.assertTrue(sref_sp.epsg, 3031)
 def setUp(self):
     self.srcdir = os.path.join(test_dir, 'output')
     self.spatial_ref = utils.SpatialRef(3031)
     self.dem = os.path.join(test_dir, 'dem', 'ramp_lowres.tif')
     self.dem_none = None
     self.ortho_height = None
     self.wd = None
     self.skip_dem_overlap_check = True
     self.resolution = None
     self.rgb = False
     self.bgrn = False
     self.stretch = 'rf'
 def __init__(self,epsg,stretch):
     self.epsg = epsg
     self.resolution = None
     self.rgb = False
     self.bgrn = False
     self.skip_warp = False
     self.dem = None
     self.ortho_height = None
     self.resample = 'near'
     self.outtype = 'Byte'
     self.format = "GTiff"
     self.gtiff_compression = "lzw"
     self.stretch = stretch
     self.spatial_ref = utils.SpatialRef(self.epsg)
Exemple #5
0
def main():

    #### Set Up Arguments
    parent_parser, pos_arg_keys = ortho_functions.buildParentArgumentParser()
    parser = argparse.ArgumentParser(
        parents=[parent_parser],
        description="Run/submit batch image ortho and conversion tasks")

    parser.add_argument("--pbs",
                        action='store_true',
                        default=False,
                        help="submit tasks to PBS")
    parser.add_argument("--slurm",
                        action='store_true',
                        default=False,
                        help="submit tasks to SLURM")
    parser.add_argument(
        "--parallel-processes",
        type=int,
        default=1,
        help="number of parallel processes to spawn (default 1)")
    parser.add_argument(
        "--qsubscript",
        help=
        "submission script to use in PBS/SLURM submission (PBS default is qsub_ortho.sh, SLURM "
        "default is slurm_ortho.py, in script root folder)")
    parser.add_argument(
        "-l", help="PBS resources requested (mimicks qsub syntax, PBS only)")
    parser.add_argument("--dryrun",
                        action='store_true',
                        default=False,
                        help='print actions without executing')

    #### Parse Arguments
    args = parser.parse_args()
    scriptpath = os.path.abspath(sys.argv[0])
    src = os.path.abspath(args.src)
    dstdir = os.path.abspath(args.dst)

    #### Validate Required Arguments
    if os.path.isdir(src):
        srctype = 'dir'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() == '.txt':
        srctype = 'textfile'
    elif os.path.isfile(src) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    elif os.path.isfile(src.replace('msi', 'blu')) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    else:
        parser.error(
            "Error arg1 is not a recognized file path or file type: {}".format(
                src))

    if not os.path.isdir(dstdir):
        parser.error("Error arg2 is not a valid file path: {}".format(dstdir))

    ## Verify qsubscript
    if args.pbs or args.slurm:
        if args.qsubscript is None:
            if args.pbs:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'qsub_ortho.sh')
            if args.slurm:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'slurm_ortho.sh')
        else:
            qsubpath = os.path.abspath(args.qsubscript)
        if not os.path.isfile(qsubpath):
            parser.error("qsub script path is not valid: {}".format(qsubpath))

    ## Verify processing options do not conflict
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive"
        )

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError as e:
        parser.error(e)

    #### Verify that dem and ortho_height are not both specified
    if args.dem is not None and args.ortho_height is not None:
        parser.error(
            "--dem and --ortho_height options are mutually exclusive.  Please choose only one."
        )

    #### Test if DEM exists
    if args.dem:
        if not os.path.isfile(args.dem):
            parser.error("DEM does not exist: {}".format(args.dem))

    #### Set up console logging handler
    lso = logging.StreamHandler()
    lso.setLevel(logging.INFO)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                  '%m-%d-%Y %H:%M:%S')
    lso.setFormatter(formatter)
    logger.addHandler(lso)

    #### Get args ready to pass to task handler
    arg_keys_to_remove = ('l', 'qsubscript', 'dryrun', 'pbs', 'slurm',
                          'parallel_processes')
    arg_str_base = taskhandler.convert_optional_args_to_string(
        args, pos_arg_keys, arg_keys_to_remove)

    ## Identify source images
    if srctype == 'dir':
        image_list1 = utils.find_images(src, False, ortho_functions.exts)
    elif srctype == 'textfile':
        image_list1 = utils.find_images(src, True, ortho_functions.exts)
    else:
        image_list1 = [src]

    ## Group Ikonos
    image_list2 = []
    for srcfp in image_list1:
        srcdir, srcfn = os.path.split(srcfp)
        if "IK01" in srcfn and sum(
            [b in srcfn for b in ortho_functions.ikMsiBands]) > 0:
            for b in ortho_functions.ikMsiBands:
                if b in srcfn:
                    newname = os.path.join(srcdir, srcfn.replace(b, "msi"))
                    break
            image_list2.append(newname)

        else:
            image_list2.append(srcfp)

    image_list = list(set(image_list2))
    logger.info('Number of src images: %i', len(image_list))

    ## Build task queue
    i = 0
    task_queue = []
    for srcfp in image_list:
        srcdir, srcfn = os.path.split(srcfp)
        dstfp = os.path.join(
            dstdir, "{}_{}{}{}{}".format(
                os.path.splitext(srcfn)[0], utils.get_bit_depth(args.outtype),
                args.stretch, spatial_ref.epsg,
                ortho_functions.formats[args.format]))

        done = os.path.isfile(dstfp)
        if done is False:
            i += 1
            task = taskhandler.Task(
                srcfn, 'Or{:04g}'.format(i), 'python',
                '{} {} {} {}'.format(scriptpath, arg_str_base, srcfp, dstdir),
                ortho_functions.process_image, [srcfp, dstfp, args])
            task_queue.append(task)

    logger.info('Number of incomplete tasks: %i', i)

    ## Run tasks
    if len(task_queue) > 0:
        logger.info("Submitting Tasks")
        if args.pbs:
            l = "-l {}".format(args.l) if args.l else ""
            try:
                task_handler = taskhandler.PBSTaskHandler(qsubpath, l)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.slurm:
            try:
                task_handler = taskhandler.SLURMTaskHandler(qsubpath)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.parallel_processes > 1:
            try:
                task_handler = taskhandler.ParallelTaskHandler(
                    args.parallel_processes)
            except RuntimeError as e:
                logger.error(e)
            else:
                logger.info("Number of child processes to spawn: %i",
                            task_handler.num_processes)
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        else:

            results = {}
            for task in task_queue:

                src, dstfp, task_arg_obj = task.method_arg_list

                #### Set up processing log handler
                logfile = os.path.splitext(dstfp)[0] + ".log"
                lfh = logging.FileHandler(logfile)
                lfh.setLevel(logging.DEBUG)
                formatter = logging.Formatter(
                    '%(asctime)s %(levelname)s- %(message)s',
                    '%m-%d-%Y %H:%M:%S')
                lfh.setFormatter(formatter)
                logger.addHandler(lfh)

                if not args.dryrun:
                    results[task.name] = task.method(src, dstfp, task_arg_obj)

                #### remove existing file handler
                logger.removeHandler(lfh)

            #### Print Images with Errors
            for k, v in results.items():
                if v != 0:
                    logger.warning("Failed Image: %s", k)

        logger.info("Done")

    else:
        logger.info("No images found to process")
Exemple #6
0
 def setUp(self):
     self.dem = os.path.join(
         os.path.join(test_dir, 'dem', 'ramp_lowres.tif'))
     self.srs = utils.SpatialRef(4326)
Exemple #7
0
def main():

    #### Set Up Arguments
    parent_parser, pos_arg_keys = ortho_functions.buildParentArgumentParser()
    parser = argparse.ArgumentParser(
        parents=[parent_parser],
        description="Run/submit batch image ortho and conversion tasks")

    parser.add_argument("--pbs",
                        action='store_true',
                        default=False,
                        help="submit tasks to PBS")
    parser.add_argument("--slurm",
                        action='store_true',
                        default=False,
                        help="submit tasks to SLURM")
    parser.add_argument(
        "--parallel-processes",
        type=int,
        default=1,
        help="number of parallel processes to spawn (default 1)")
    parser.add_argument(
        "--qsubscript",
        help=
        "submission script to use in PBS/SLURM submission (PBS default is qsub_ortho.sh, SLURM default is slurm_ortho.py, in script root folder)"
    )
    parser.add_argument(
        "-l", help="PBS resources requested (mimicks qsub syntax, PBS only)")
    parser.add_argument("--dryrun",
                        action='store_true',
                        default=False,
                        help='print actions without executing')

    #### Parse Arguments
    args = parser.parse_args()
    scriptpath = os.path.abspath(sys.argv[0])
    src = os.path.abspath(args.src)
    dstdir = os.path.abspath(args.dst)

    #### Validate Required Arguments
    if os.path.isdir(src):
        srctype = 'dir'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() == '.txt':
        srctype = 'textfile'
    elif os.path.isfile(src) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    elif os.path.isfile(src.replace('msi', 'blu')) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    else:
        parser.error(
            "Error arg1 is not a recognized file path or file type: %s" %
            (src))

    if not os.path.isdir(dstdir):
        parser.error("Error arg2 is not a valid file path: %s" % (dstdir))

    ## Verify qsubscript
    if args.pbs or args.slurm:
        if args.qsubscript is None:
            if args.pbs:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'qsub_ortho.sh')
            if args.slurm:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'slurm_ortho.sh')
        else:
            qsubpath = os.path.abspath(args.qsubscript)
        if not os.path.isfile(qsubpath):
            parser.error("qsub script path is not valid: %s" % qsubpath)

    ## Verify processing options do not conflict
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive"
        )

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError, e:
        parser.error(e)
Exemple #8
0
def main():

    #### Set Up Arguments
    parser = argparse.ArgumentParser(
        description="build setsm DEM index"
        )

    #### Positional Arguments
    parser.add_argument('src', help="source directory or image")
    parser.add_argument('dst', help="destination index dataset (use PG:<config.ini section name>:<layer name> for a postgresql DB")

    #### Optional Arguments
    parser.add_argument('--mode', choices=MODES.keys(), default='scene',
                        help="type of items to index {} default=scene".format(MODES.keys()))
    parser.add_argument('--config', default=os.path.join(os.path.dirname(sys.argv[0]),'config.ini'),
                        help="config file (default is config.ini in script dir")
    parser.add_argument('--epsg', type=int, default=4326,
                        help="egsg code for output index projection (default wgs85 geographic epsg:4326)")
    parser.add_argument('--dsp-original-res', action='store_true', default=False,
                        help='write index of downsampled product (dsp) scenes as original resolution (mode=scene only)')
    parser.add_argument('--status', help='custom value for status field')
    parser.add_argument('--include-registration', action='store_true', default=False,
                        help='include registration info if present (mode=strip and tile only)')
    parser.add_argument('--search-masked', action='store_true', default=False,
                        help='search for masked and unmasked DEMs (mode=strip only)')
    parser.add_argument('--read-json', action='store_true', default=False,
                        help='search for json files instead of images to populate the index')
    parser.add_argument('--write-json', action='store_true', default=False,
                        help='write results to json files in dst folder')
    parser.add_argument('--log', help="directory for log output")
    parser.add_argument('--overwrite', action='store_true', default=False,
                        help="overwrite existing index")
    parser.add_argument('--append', action='store_true', default=False,
                        help="append records to existing index")
    parser.add_argument('--check', action='store_true', default=False,
                        help='verify new records exist in target index (not compatible with --write-json)')
    parser.add_argument('--skip-region-lookup', action='store_true', default=False,
                        help="skip region lookup on danco (used for testing)")
    parser.add_argument("--write-pickle", help="store region lookup in a pickle file. skipped if --write-json is used")
    parser.add_argument("--read-pickle", help='read region lookup from a pickle file. skipped if --write-json is used')
    parser.add_argument("--bp-paths", action='store_true', default=False, help='Use BlackPearl path schema')
    parser.add_argument("--tnva-paths", action='store_true', default=False, help='Use Terranova path schema')
    parser.add_argument('--project', choices=PROJECTS.keys(), help='project name (required when writing tiles)')
    parser.add_argument('--dryrun', action='store_true', default=False, help='run script without inserting records')


    #### Parse Arguments
    args = parser.parse_args()

    #### Verify Arguments
    if not os.path.isdir(args.src) and not os.path.isfile(args.src):
        parser.error("Source directory or file does not exist: %s" %args.src)

    #src = os.path.abspath(args.src)
    src = args.src
    dst = args.dst

    if args.overwrite and args.append:
        parser.error('--append and --overwrite are mutually exclusive')

    if args.write_json and args.append:
        parser.error('--append cannot be used with the --write-json option')

    if (args.write_json or args.read_json) and args.search_masked:
        parser.error('--search-masked cannot be used with the --write-json or --read-json options')

    if args.mode != 'strip' and args.search_masked:
        parser.error('--search-masked applies only to mode=strip')

    if args.write_json and args.check:
        parser.error('--check cannot be used with the --write-json option')

    ## Check project
    if args.mode == 'tile' and not args.project:
        parser.error("--project option is required if when mode=tile")

    if args.write_pickle:
        if not os.path.isdir(os.path.dirname(args.write_pickle)):
            parser.error("Pickle file must be in an existing directory")
    if args.read_pickle:
        if not os.path.isfile(args.read_pickle):
            parser.error("Pickle file must be an existing file")

    if args.status and args.bp_paths:
        parser.error("--bp-paths sets status field to 'tape' and cannot be used with --status")

    if args.tnva_paths and args.bp_paths:
        parser.error("--bp-paths and --tnva-paths are incompatible options")

    if args.mode != 'scene' and args.dsp_original_res:
        parser.error("--dsp-original-res is applicable only when mode = scene")

    if args.bp_paths:
        db_path_prefix = BP_PATH_PREFIX
    elif args.tnva_paths:
        db_path_prefix = TNVA_PATH_PREFIX
    else:
        db_path_prefix = None

    #### Set up loggers
    lsh = logging.StreamHandler()
    lsh.setLevel(logging.INFO)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s','%m-%d-%Y %H:%M:%S')
    lsh.setFormatter(formatter)
    logger.addHandler(lsh)

    if args.log:
        if os.path.isdir(args.log):
            tm = datetime.datetime.now()
            logfile = os.path.join(args.log,"index_setsm_{}.log".format(tm.strftime("%Y%m%d%H%M%S")))
        else:
            parser.error('log folder does not exist: {}'.format(args.log))

        lfh = logging.FileHandler(logfile)
        lfh.setLevel(logging.INFO)
        formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s','%m-%d-%Y %H:%M:%S')
        lfh.setFormatter(formatter)
        logger.addHandler(lfh)

    ## Check path if --write-json is invoked
    if args.write_json:
        if not os.path.isdir(dst):
            parser.error("Destination must be an existing directory with --write-json option")
        ogr_driver_str = None

        if args.epsg:
            logger.warning('--epsg and --dsp-original-res will be ignored with the --write-json option')

    ## If not writing to JSON, get OGR driver, ds name, and layer name
    else:
        try:
            ogr_driver_str, dst_dsp, dst_lyr = utils.get_source_names2(dst)
        except RuntimeError as e:
            parser.error(e)

        ogrDriver = ogr.GetDriverByName(ogr_driver_str)
        if ogrDriver is None:
            parser.error("Driver is not available: {}".format(ogr_driver_str))

        #### Get Config file contents
        try:
            config = ConfigParser.SafeConfigParser()
        except NameError:
            config = ConfigParser.ConfigParser()  # ConfigParser() replaces SafeConfigParser() in Python >=3.2
        config.read(args.config)

        #### Get output DB connection if specified
        if ogr_driver_str in ("PostgreSQL"):

            section = dst_dsp
            if section in config.sections():
                conn_info = {
                    'host':config.get(section,'host'),
                    'port':config.getint(section,'port'),
                    'name':config.get(section,'name'),
                    'schema':config.get(section,'schema'),
                    'user':config.get(section,'user'),
                    'pw':config.get(section,'pw'),
                }
                dst_ds = "PG:host={host} port={port} dbname={name} user={user} password={pw} active_schema={schema}".format(**conn_info)

            else:
                logger.error('Config.ini file must contain credentials to connect to {}'.format(section))
                sys.exit(-1)

        #### Set dataset path is SHP or GDB
        elif ogr_driver_str in ("ESRI Shapefile","FileGDB"):
            dst_ds = dst_dsp

        else:
            logger.error("Format {} is not supported".format(ogr_driver_str))

        ## Get pairname-region dict
        if args.skip_region_lookup or args.mode == 'tile':
            pairs = {}
        else:
            if args.read_pickle:
                logger.info("Fetching region lookup from pickle file")
                pairs = pickle.load(open(args.read_pickle, "rb"))

            else:
                #### Get Danco connection if available
                section = 'danco'
                if section in config.sections():
                    danco_conn_info = {
                        'host':config.get(section,'host'),
                        'port':config.getint(section,'port'),
                        'name':config.get(section,'name'),
                        'schema':config.get(section,'schema'),
                        'user':config.get(section,'user'),
                        'pw':config.get(section,'pw'),
                    }

                    logger.info("Fetching region lookup from Danco")
                    pairs = get_pair_region_dict(danco_conn_info)
                else:
                    logger.warning('Config file does not contain credentials to connect to Danco. Region cannot be determined.')
                    pairs = {}

            if len(pairs) == 0:
                logger.warning("Cannot get region-pair lookup")

                if args.tnva_paths:
                    logger.error("Region-pair lookup required for --tnva-paths option")
                    sys.exit()

        ## Save pickle if selected
        if args.write_pickle:
            logger.info("Pickling region lookup")
            pickle.dump(pairs, open(args.write_pickle, "wb"))

        #### Test epsg
        try:
            spatial_ref = utils.SpatialRef(args.epsg)
        except RuntimeError as e:
            parser.error(e)

        #### Test if dst table exists
        if ogr_driver_str == 'ESRI Shapefile' and os.path.isfile(dst_ds):
            if args.overwrite:
                logger.info("Removing old index... %s" %os.path.basename(dst_ds))
                if not args.dryrun:
                    ogrDriver.DeleteDataSource(dst_ds)
            elif not args.append:
                logger.error("Dst shapefile exists.  Use the --overwrite or --append options.")
                sys.exit(-1)

        if ogr_driver_str == 'FileGDB' and os.path.isdir(dst_ds):
            ds = ogrDriver.Open(dst_ds,1)
            if ds:
                for i in range(ds.GetLayerCount()):
                    lyr = ds.GetLayer(i)
                    if lyr.GetName() == dst_lyr:
                        if args.overwrite:
                            logger.info("Removing old index layer: {}".format(dst_lyr))
                            del lyr
                            ds.DeleteLayer(i)
                            break
                        elif not args.append:
                            logger.error("Dst GDB layer exists.  Use the --overwrite or --append options.")
                            sys.exit(-1)
                ds = None

        ## Postgres check - do not overwrite
        if ogr_driver_str == 'PostgreSQL':
            ds = ogrDriver.Open(dst_ds,1)
            if ds:
                for i in range(ds.GetLayerCount()):
                    lyr = ds.GetLayer(i)
                    if lyr.GetName() == dst_lyr:
                        if args.overwrite:
                            logger.info("Removing old index layer: {}".format(dst_lyr))
                            del lyr
                            ds.DeleteLayer(i)
                            break
                        elif not args.append:
                            logger.error("Dst DB layer exists.  Use the --overwrite or --append options.")
                            sys.exit(-1)
                ds = None


    #### ID records
    dem_class, suffix, groupid_fld, fld_defs_base, reg_fld_defs = MODES[args.mode]
    if args.mode == 'strip' and args.search_masked:
        suffix = mask_strip_suffixes + tuple([suffix])
    fld_defs = fld_defs_base + reg_fld_defs if args.include_registration else fld_defs_base
    src_fps = []
    records = []
    logger.info('Identifying DEMs')
    if os.path.isfile(src):
        logger.info(src)
        src_fps.append(src)
    else:
        for root,dirs,files in os.walk(src):
            for f in files:
                if (f.endswith('.json') and args.read_json) or (f.endswith(suffix)):
                    logger.debug(os.path.join(root,f))
                    src_fps.append(os.path.join(root,f))

    for src_fp in src_fps:
        if args.read_json:
            temp_records = read_json(os.path.join(src_fp),args.mode)
            records.extend(temp_records)
        else:
            try:
                record = dem_class(src_fp)
                record.get_dem_info()
            except RuntimeError as e:
                logger.error( e )
            else:
                if args.mode == 'scene' and not os.path.isfile(record.dspinfo) and args.dsp_original_res:
                    logger.error("Record {} has no Dsp downsample info file: {}, skipping".format(record.id,record.dspinfo))
                else:
                    records.append(record)

    total = len(records)

    if total == 0:
        logger.info("No valid records found")
    else:
        logger.info("{} records found".format(total))
        ## Group into strips or tiles for json writing
        groups = {}
        for record in records:
            groupid = getattr(record,groupid_fld)
            if groupid in groups:
                groups[groupid].append(record)
            else:
                groups[groupid] = [record]

        #### Write index
        if args.write_json:
            write_to_json(dst, groups, total, args)
        else:
            write_result = write_to_ogr_dataset(ogr_driver_str, ogrDriver, dst_ds, dst_lyr, groups,
                                                pairs, total, db_path_prefix, fld_defs, args)
            if write_result:
                sys.exit(0)
            else:
                sys.exit(-1)
Exemple #9
0
def main():
    #### Set Up Arguments
    parent_parser, pos_arg_keys = ortho_functions.buildParentArgumentParser()
    parser = argparse.ArgumentParser(
        parents=[parent_parser],
        description="Run/submit batch image ortho and conversion tasks")

    parser.add_argument("--pbs",
                        action='store_true',
                        default=False,
                        help="submit tasks to PBS")
    parser.add_argument("--slurm",
                        action='store_true',
                        default=False,
                        help="submit tasks to SLURM")
    parser.add_argument(
        "--tasks-per-job",
        type=int,
        help=
        "Number of tasks to bundle into a single job. (requires --pbs or --slurm option) (Warning:"
        " a higher number of tasks per job may require modification of default wallclock limit.)"
    )
    parser.add_argument(
        '--scratch',
        default=ARGDEF_SCRATCH,
        help="Scratch space to build task bundle text files. (default={})".
        format(ARGDEF_SCRATCH))
    parser.add_argument(
        "--parallel-processes",
        type=int,
        default=1,
        help="number of parallel processes to spawn (default 1)")
    parser.add_argument(
        "--qsubscript",
        help=
        "submission script to use in PBS/SLURM submission (PBS default is qsub_ortho.sh, SLURM "
        "default is slurm_ortho.py, in script root folder)")
    parser.add_argument(
        "-l", help="PBS resources requested (mimicks qsub syntax, PBS only)")
    parser.add_argument("--dryrun",
                        action='store_true',
                        default=False,
                        help='print actions without executing')

    #### Parse Arguments
    args = parser.parse_args()
    scriptpath = os.path.abspath(sys.argv[0])
    src = os.path.abspath(args.src)
    dstdir = os.path.abspath(args.dst)
    scratch = os.path.abspath(args.scratch)
    bittype = utils.get_bit_depth(args.outtype)

    #### Validate Required Arguments
    if os.path.isdir(src):
        srctype = 'dir'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() == '.txt':
        srctype = 'textfile'
    elif os.path.isfile(src) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    elif os.path.isfile(src.replace('msi', 'blu')) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    else:
        parser.error(
            "Error arg1 is not a recognized file path or file type: {}".format(
                src))

    if not os.path.isdir(dstdir):
        parser.error("Error arg2 is not a valid file path: {}".format(dstdir))

    ## Verify qsubscript
    if args.pbs or args.slurm:
        if args.qsubscript is None:
            if args.pbs:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'qsub_ortho.sh')
            if args.slurm:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'slurm_ortho.sh')
        else:
            qsubpath = os.path.abspath(args.qsubscript)
        if not os.path.isfile(qsubpath):
            parser.error("qsub script path is not valid: {}".format(qsubpath))

    ## Verify processing options do not conflict
    requested_threads = ortho_functions.ARGDEF_CPUS_AVAIL if args.threads == "ALL_CPUS" else args.threads
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive"
        )
    if (args.pbs or args.slurm) and requested_threads > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --threads > 1 are mutually exclusive"
        )
    if requested_threads < 1:
        parser.error(
            "--threads count must be positive, nonzero integer or ALL_CPUS")
    if args.parallel_processes > 1:
        total_proc_count = requested_threads * args.parallel_processes
        if total_proc_count > ortho_functions.ARGDEF_CPUS_AVAIL:
            parser.error(
                "the (threads * number of processes requested) ({0}) exceeds number of available threads "
                "({1}); reduce --threads and/or --parallel-processes count".
                format(total_proc_count, ortho_functions.ARGDEF_CPUS_AVAIL))

    if args.tasks_per_job:
        if not (args.pbs or args.slurm):
            parser.error(
                "--tasks-per-job option requires the (--pbs or --slurm) option"
            )
        if not os.path.isdir(args.scratch):
            print("Creating --scratch directory: {}".format(args.scratch))
            os.makedirs(args.scratch)

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError as e:
        parser.error(e)

    #### Verify that dem and ortho_height are not both specified
    if args.dem is not None and args.ortho_height is not None:
        parser.error(
            "--dem and --ortho_height options are mutually exclusive.  Please choose only one."
        )

    #### Test if DEM exists
    if args.dem:
        if not os.path.isfile(args.dem):
            parser.error("DEM does not exist: {}".format(args.dem))
        if args.l is None:
            if args.dem.endswith('.vrt'):
                total_dem_filesz_gb = 0.0
                tree = ET.parse(args.dem)
                root = tree.getroot()
                for sourceFilename in root.iter('SourceFilename'):
                    dem_filename = sourceFilename.text
                    if not os.path.isfile(dem_filename):
                        parser.error(
                            "VRT DEM component raster does not exist: {}".
                            format(dem_filename))
                    dem_filesz_gb = os.path.getsize(
                        dem_filename) / 1024.0 / 1024 / 1024
                    total_dem_filesz_gb += dem_filesz_gb
                dem_filesz_gb = total_dem_filesz_gb
            else:
                dem_filesz_gb = os.path.getsize(
                    args.dem) / 1024.0 / 1024 / 1024

            pbs_req_mem_gb = int(max(math.ceil(dem_filesz_gb) + 2, 4))
            args.l = 'mem={}gb'.format(pbs_req_mem_gb)

    #### Set up console logging handler
    lso = logging.StreamHandler()
    lso.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                  '%m-%d-%Y %H:%M:%S')
    lso.setFormatter(formatter)
    logger.addHandler(lso)

    #### Handle thread count that exceeds system limits
    if requested_threads > ortho_functions.ARGDEF_CPUS_AVAIL:
        logger.info(
            "threads requested ({0}) exceeds number available on system ({1}), setting thread count to "
            "'ALL_CPUS'".format(requested_threads,
                                ortho_functions.ARGDEF_CPUS_AVAIL))
        args.threads = 'ALL_CPUS'

    #### Get args ready to pass to task handler
    arg_keys_to_remove = ('l', 'qsubscript', 'dryrun', 'pbs', 'slurm',
                          'parallel_processes', 'tasks_per_job')
    arg_str_base = taskhandler.convert_optional_args_to_string(
        args, pos_arg_keys, arg_keys_to_remove)

    ## Identify source images
    if srctype == 'dir':
        image_list1 = utils.find_images(src, False, ortho_functions.exts)
    elif srctype == 'textfile':
        image_list1 = utils.find_images(src, True, ortho_functions.exts)
    else:
        image_list1 = [src]

    ## Group Ikonos
    image_list2 = []
    for srcfp in image_list1:
        srcdir, srcfn = os.path.split(srcfp)
        if "IK01" in srcfn and sum(
            [b in srcfn for b in ortho_functions.ikMsiBands]) > 0:
            for b in ortho_functions.ikMsiBands:
                if b in srcfn:
                    newname = os.path.join(srcdir, srcfn.replace(b, "msi"))
                    break
            image_list2.append(newname)

        else:
            image_list2.append(srcfp)

    image_list = list(set(image_list2))
    logger.info('Number of src images: %i', len(image_list))

    ## Build task queue
    i = 0
    images_to_process = []
    for srcfp in image_list:
        srcdir, srcfn = os.path.split(srcfp)
        dstfp = os.path.join(
            dstdir, "{}_{}{}{}{}".format(
                os.path.splitext(srcfn)[0], bittype, args.stretch,
                spatial_ref.epsg, ortho_functions.formats[args.format]))

        done = os.path.isfile(dstfp)
        if done is False:
            i += 1
            images_to_process.append(srcfp)

    logger.info('Number of incomplete tasks: %i', i)

    if len(images_to_process) == 0:
        logger.info("No images found to process")
        sys.exit(0)

    task_queue = []

    if args.tasks_per_job and args.tasks_per_job > 1:
        task_srcfp_list = utils.write_task_bundles(images_to_process,
                                                   args.tasks_per_job, scratch,
                                                   'Or_src')
    else:
        task_srcfp_list = images_to_process

    for job_count, srcfp in enumerate(task_srcfp_list, 1):

        srcdir, srcfn = os.path.split(srcfp)

        if task_srcfp_list is images_to_process:
            dstfp = os.path.join(
                dstdir, "{}_{}{}{}{}".format(
                    os.path.splitext(srcfn)[0], bittype, args.stretch,
                    spatial_ref.epsg, ortho_functions.formats[args.format]))
        else:
            dstfp = None

        task = taskhandler.Task(
            srcfn, 'Or{:04g}'.format(job_count), 'python',
            '{} {} {} {}'.format(argval2str(scriptpath), arg_str_base,
                                 argval2str(srcfp), argval2str(dstdir)),
            ortho_functions.process_image, [srcfp, dstfp, args])
        task_queue.append(task)

    ## Run tasks
    if len(task_queue) > 0:
        logger.info("Submitting Tasks")
        if args.pbs:
            l = "-l {}".format(args.l) if args.l else ""
            try:
                task_handler = taskhandler.PBSTaskHandler(qsubpath, l)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue, dryrun=args.dryrun)

        elif args.slurm:
            try:
                task_handler = taskhandler.SLURMTaskHandler(qsubpath)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.parallel_processes > 1:
            try:
                task_handler = taskhandler.ParallelTaskHandler(
                    args.parallel_processes)
            except RuntimeError as e:
                logger.error(e)
            else:
                logger.info("Number of child processes to spawn: %i",
                            task_handler.num_processes)
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        else:

            results = {}
            for task in task_queue:

                src, dstfp, task_arg_obj = task.method_arg_list

                #### Set up processing log handler
                logfile = os.path.splitext(dstfp)[0] + ".log"
                lfh = logging.FileHandler(logfile)
                lfh.setLevel(logging.DEBUG)
                formatter = logging.Formatter(
                    '%(asctime)s %(levelname)s- %(message)s',
                    '%m-%d-%Y %H:%M:%S')
                lfh.setFormatter(formatter)
                logger.addHandler(lfh)

                if not args.dryrun:
                    results[task.name] = task.method(src, dstfp, task_arg_obj)

                #### remove existing file handler
                logger.removeHandler(lfh)

            #### Print Images with Errors
            for k, v in results.items():
                if v != 0:
                    logger.warning("Failed Image: %s", k)

        logger.info("Done")

    else:
        logger.info("No images found to process")
def main():

    #### Set Up Arguments
    parent_parser, pos_arg_keys = ortho_functions.buildParentArgumentParser()
    parser = argparse.ArgumentParser(
        parents=[parent_parser],
        description="Run/Submit batch pansharpening in parallel")

    parser.add_argument("--pbs",
                        action='store_true',
                        default=False,
                        help="submit tasks to PBS")
    parser.add_argument("--slurm",
                        action='store_true',
                        default=False,
                        help="submit tasks to SLURM")
    parser.add_argument(
        "--parallel-processes",
        type=int,
        default=1,
        help="number of parallel processes to spawn (default 1)")
    parser.add_argument(
        "--qsubscript",
        help=
        "submission script to use in PBS/SLURM submission (PBS default is qsub_pansharpen.sh, "
        "SLURM default is slurm_pansharpen.py, in script root folder)")
    parser.add_argument(
        "-l", help="PBS resources requested (mimicks qsub syntax, PBS only)")
    parser.add_argument("--dryrun",
                        action="store_true",
                        default=False,
                        help="print actions without executing")

    #### Parse Arguments
    args = parser.parse_args()
    scriptpath = os.path.abspath(sys.argv[0])
    src = os.path.abspath(args.src)
    dstdir = os.path.abspath(args.dst)

    #### Validate Required Arguments
    if os.path.isdir(src):
        srctype = 'dir'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() == '.txt':
        srctype = 'textfile'
    elif os.path.isfile(src) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    elif os.path.isfile(src.replace('msi', 'blu')) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    else:
        parser.error(
            "Error arg1 is not a recognized file path or file type: {}".format(
                src))

    if not os.path.isdir(dstdir):
        parser.error("Error arg2 is not a valid file path: {}".format(dstdir))

    # Verify qsubscript
    if args.pbs or args.slurm:
        if args.qsubscript is None:
            if args.pbs:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'qsub_pansharpen.sh')
            if args.slurm:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'slurm_pansharpen.sh')
        else:
            qsubpath = os.path.abspath(args.qsubscript)
        if not os.path.isfile(qsubpath):
            parser.error("qsub script path is not valid: {}".format(qsubpath))

    ### Verify processing options do not conflict
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive"
        )

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError as e:
        parser.error(e)

    ## Check GDAL version (2.1.0 minimum)
    gdal_version = gdal.VersionInfo()
    try:
        if int(gdal_version) < 2010000:
            parser.error(
                "gdal_pansharpen requires GDAL version 2.1.0 or higher")
    except ValueError:
        parser.error("Cannot parse GDAL version: {}".format(gdal_version))

    #### Set up console logging handler
    lso = logging.StreamHandler()
    lso.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                  '%m-%d-%Y %H:%M:%S')
    lso.setFormatter(formatter)
    logger.addHandler(lso)

    #### Get args ready to pass to task handler
    arg_keys_to_remove = ('l', 'qsubscript', 'dryrun', 'pbs', 'slurm',
                          'parallel_processes')
    arg_str_base = taskhandler.convert_optional_args_to_string(
        args, pos_arg_keys, arg_keys_to_remove)

    ## Identify source images
    if srctype == 'dir':
        image_list1 = utils.find_images(src, False, ortho_functions.exts)
    elif srctype == 'textfile':
        image_list1 = utils.find_images(src, True, ortho_functions.exts)
    else:
        image_list1 = [src]

    pair_list = []
    for srcfp in image_list1:
        #print(srcfp)
        try:
            image_pair = ImagePair(srcfp, spatial_ref)
        except RuntimeError as e:
            logger.error(e)
        else:
            logger.info("Image: %s, Sensor: %s", image_pair.mul_srcfn,
                        image_pair.sensor)
            pair_list.append(image_pair)

    logger.info('Number of src image pairs: %i', len(pair_list))

    ## Build task queue
    i = 0
    task_queue = []
    for image_pair in pair_list:

        bittype = utils.get_bit_depth(args.outtype)
        pansh_dstfp = os.path.join(
            dstdir, "{}_{}{}{}_pansh.tif".format(
                os.path.splitext(image_pair.mul_srcfn)[0], bittype,
                args.stretch, args.epsg))

        if not os.path.isfile(pansh_dstfp):
            i += 1
            task = taskhandler.Task(
                image_pair.mul_srcfn, 'Psh{:04g}'.format(i), 'python',
                '{} {} {} {}'.format(scriptpath, arg_str_base,
                                     image_pair.mul_srcfp, dstdir),
                exec_pansharpen, [image_pair, pansh_dstfp, args])
            task_queue.append(task)

    logger.info('Number of incomplete tasks: %i', i)

    ## Run tasks
    if len(task_queue) > 0:
        logger.info("Submitting Tasks")
        if args.pbs:
            l = "-l {}".format(args.l) if args.l else ""
            try:
                task_handler = taskhandler.PBSTaskHandler(qsubpath, l)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.slurm:
            try:
                task_handler = taskhandler.SLURMTaskHandler(qsubpath)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.parallel_processes > 1:
            try:
                task_handler = taskhandler.ParallelTaskHandler(
                    args.parallel_processes)
            except RuntimeError as e:
                logger.error(e)
            else:
                logger.info("Number of child processes to spawn: %i",
                            task_handler.num_processes)
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        else:
            results = {}
            lfh = None
            for task in task_queue:

                src, dstfp, task_arg_obj = task.method_arg_list

                #### Set up processing log handler
                logfile = os.path.splitext(dstfp)[0] + ".log"
                lfh = logging.FileHandler(logfile)
                lfh.setLevel(logging.DEBUG)
                formatter = logging.Formatter(
                    '%(asctime)s %(levelname)s- %(message)s',
                    '%m-%d-%Y %H:%M:%S')
                lfh.setFormatter(formatter)
                logger.addHandler(lfh)

                if not args.dryrun:
                    results[task.name] = task.method(src, dstfp, task_arg_obj)

                #### remove existing file handler
                logger.removeHandler(lfh)

                #### remove existing file handler
                logger.removeHandler(lfh)

            #### Print Images with Errors
            for k, v in results.items():
                if v != 0:
                    logger.warning("Failed Image: %s", k)

        logger.info("Done")

    else:
        logger.info("No images found to process")
Exemple #11
0
def main():

    #### Set Up Arguments
    parser = argparse.ArgumentParser(description="build setsm DEM index")

    #### Positional Arguments
    parser.add_argument('src', help="source directory or image")
    parser.add_argument(
        'dst',
        help=
        "destination index dataset (use PG:<config.ini section name>:<layer name> for a postgresql DB"
    )

    #### Optional Arguments
    parser.add_argument('--mode',
                        choices=MODES.keys(),
                        default='scene',
                        help="type of items to index {} default=scene".format(
                            MODES.keys()))
    parser.add_argument(
        '--config',
        default=os.path.join(os.path.dirname(sys.argv[0]), 'config.ini'),
        help="config file (default is config.ini in script dir")
    parser.add_argument(
        '--epsg',
        type=int,
        default=4326,
        help=
        "egsg code for output index projection (default wgs85 geographic epsg:4326)"
    )
    parser.add_argument(
        '--read-json',
        action='store_true',
        default=False,
        help='search for json files instead of images to populate the index')
    parser.add_argument('--write-json',
                        action='store_true',
                        default=False,
                        help='write results to json files in dst folder')
    parser.add_argument('--log', help="directory for log output")
    parser.add_argument('--overwrite',
                        action='store_true',
                        default=False,
                        help="overwrite existing index")
    parser.add_argument('--append',
                        action='store_true',
                        default=False,
                        help="append records to existing index")
    parser.add_argument('--skip-region-lookup',
                        action='store_true',
                        default=False,
                        help="skip region lookup on danco (used for testing)")
    parser.add_argument(
        "--write-pickle",
        help=
        "store region lookup in a pickle file. skipped if --write-json is used"
    )
    parser.add_argument(
        "--read-pickle",
        help=
        'read region lookup from a pickle file. skipped if --write-json is used'
    )
    parser.add_argument("--bp-paths",
                        action='store_true',
                        default=False,
                        help='Use BlackPearl path schema')
    parser.add_argument('--project',
                        choices=PROJECTS,
                        help='project name (required when writing tiles)')
    parser.add_argument('--dryrun',
                        action='store_true',
                        default=False,
                        help='run script without inserting records')

    #### Parse Arguments
    args = parser.parse_args()

    #### Verify Arguments
    if not os.path.isdir(args.src) and not os.path.isfile(args.src):
        parser.error("Source directory or file does not exist: %s" % args.src)

    src = os.path.abspath(args.src)
    dst = args.dst

    if args.overwrite and args.append:
        parser.error('--append and --overwrite are mutually exclusive')

    if args.write_json and args.append:
        parser.error('--append cannot be used with the --write-json option')

    ## Check project
    if args.mode == 'tile' and not args.project:
        parser.error("--project option is required if when mode=tile")

    if args.write_pickle:
        if not os.path.isdir(os.path.dirname(args.write_pickle)):
            parser.error("Pickle file must be in an existing directory")
    if args.read_pickle:
        if not os.path.isfile(args.read_pickle):
            parser.error("Pickle file must be an existing file")

    if args.bp_paths:
        db_path_prefix = BP_PATH_PREFIX
    else:
        db_path_prefix = None

    #### Set up loggers
    lsh = logging.StreamHandler()
    lsh.setLevel(logging.INFO)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                  '%m-%d-%Y %H:%M:%S')
    lsh.setFormatter(formatter)
    logger.addHandler(lsh)

    if args.log:
        if os.path.isdir(args.log):
            tm = datetime.datetime.now()
            logfile = os.path.join(
                args.log,
                "index_setsm_{}.log".format(tm.strftime("%Y%m%d%H%M%S")))
        else:
            parser.error('log folder does not exist: {}'.format(args.log))

        lfh = logging.FileHandler(logfile)
        lfh.setLevel(logging.INFO)
        formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                      '%m-%d-%Y %H:%M:%S')
        lfh.setFormatter(formatter)
        logger.addHandler(lfh)

    ## Check path if --write-json is invoked
    if args.write_json:
        if not os.path.isdir(dst):
            parser.error(
                "Destination must be an existing directory with --write-json option"
            )
        ogr_driver_str = None

        if args.epsg:
            logger.warning(
                '--epsg will be ignored with the --write-json option')

    ## If not writing to JSON, get OGR driver, ds name, and layer name
    else:
        try:
            ogr_driver_str, dst_dsp, dst_lyr = utils.get_source_names2(dst)
        except RuntimeError as e:
            parser.error(e)

        ogrDriver = ogr.GetDriverByName(ogr_driver_str)
        if ogrDriver is None:
            parser.error("Driver is not available: {}".format(ogr_driver_str))

        #### Get Config file contents
        try:
            config = ConfigParser.SafeConfigParser()
        except NameError:
            config = ConfigParser.ConfigParser(
            )  # ConfigParser() replaces SafeConfigParser() in Python >=3.2
        config.read(args.config)

        #### Get output DB connection if specified
        if ogr_driver_str in ("PostgreSQL"):

            section = dst_dsp
            if section in config.sections():
                conn_info = {
                    'host': config.get(section, 'host'),
                    'port': config.getint(section, 'port'),
                    'name': config.get(section, 'name'),
                    'schema': config.get(section, 'schema'),
                    'user': config.get(section, 'user'),
                    'pw': config.get(section, 'pw'),
                }
                dst_ds = "PG:host={host} port={port} dbname={name} user={user} password={pw} active_schema={schema}".format(
                    **conn_info)

            else:
                logger.error(
                    'Config.ini file must contain credentials to connect to {}'
                    .format(section))
                sys.exit(-1)

        #### Set dataset path is SHP or GDB
        elif ogr_driver_str in ("ESRI Shapefile", "FileGDB"):
            dst_ds = dst_dsp

        else:
            logger.error("Format {} is not supported".format(ogr_driver_str))

        ## Get pairname-region dict
        if args.skip_region_lookup or args.mode == 'tile':
            pairs = {}
        else:
            if args.read_pickle:
                logger.info("Fetching region lookup from pickle file")
                pairs = pickle.load(open(args.read_pickle, "rb"))

            else:
                #### Get Danco connection if available
                section = 'danco'
                if section in config.sections():
                    danco_conn_info = {
                        'host': config.get(section, 'host'),
                        'port': config.getint(section, 'port'),
                        'name': config.get(section, 'name'),
                        'schema': config.get(section, 'schema'),
                        'user': config.get(section, 'user'),
                        'pw': config.get(section, 'pw'),
                    }

                    logger.info("Fetching region lookup from Danco")
                    pairs = get_pair_region_dict(danco_conn_info)
                else:
                    logger.warning(
                        'Config file does not contain credentials to connect to Danco. Region cannot be determined.'
                    )
                    pairs = {}

            if len(pairs) == 0:
                logger.warning("Cannot get region-pair lookup")

        ## Save pickle if selected
        if args.write_pickle:
            logger.info("Pickling region lookup")
            pickle.dump(pairs, open(args.write_pickle, "wb"))

        #### Test epsg
        try:
            spatial_ref = utils.SpatialRef(args.epsg)
        except RuntimeError, e:
            parser.error(e)

        #### Test if dst table exists
        if ogr_driver_str == 'ESRI Shapefile' and os.path.isfile(dst_ds):
            if args.overwrite:
                logger.info("Removing old index... %s" %
                            os.path.basename(dst_ds))
                if not args.dryrun:
                    ogrDriver.DeleteDataSource(dst_ds)
            elif not args.append:
                logger.error(
                    "Dst shapefile exists.  Use the --overwrite or --append options."
                )
                sys.exit()

        if ogr_driver_str == 'FileGDB' and os.path.isdir(dst_ds):
            ds = ogrDriver.Open(dst_ds, 1)
            if ds:
                for i in range(ds.GetLayerCount()):
                    lyr = ds.GetLayer(i)
                    if lyr.GetName() == dst_lyr:
                        if args.overwrite:
                            logger.info(
                                "Removing old index layer: {}".format(dst_lyr))
                            del lyr
                            ds.DeleteLayer(i)
                            break
                        elif not args.append:
                            logger.error(
                                "Dst GDB layer exists.  Use the --overwrite or --append options."
                            )
                            sys.exit()
                ds = None

        ## Postgres check - do not overwrite
        if ogr_driver_str == 'PostgreSQL':
            ds = ogrDriver.Open(dst_ds, 1)
            if ds:
                for i in range(ds.GetLayerCount()):
                    lyr = ds.GetLayer(i)
                    if lyr.GetName() == dst_lyr:
                        if args.overwrite:
                            logger.info(
                                "Removing old index layer: {}".format(dst_lyr))
                            del lyr
                            ds.DeleteLayer(i)
                            break
                        elif not args.append:
                            logger.error(
                                "Dst DB layer exists.  Use the --overwrite or --append options."
                            )
                            sys.exit()
                ds = None