Exemplo n.º 1
0
    misc_group.add_argument('--override_defaults', action='store_true',
                            help="""Use this flag to not use any default attribute selection 
                            parameters: prod_code, sensors, spec_type, max_cc, max_off_nadir""")
    misc_group.add_argument('--dryrun', action='store_true',
                            help='Print information about selection, but do not write.')
    misc_group.add_argument('-v', '--verbose', action='store_true',
                            help='Set logging level to DEBUG.')

    args = parser.parse_args()

    # Logging
    if args.verbose:
        handler_level = 'DEBUG'
    else:
        handler_level = 'INFO'
    logger = create_logger(__name__, 'sh', handler_level)

    # Parse args variables
    out_path = args.out_path
    selector = args.selector
    input_ids = args.ids
    id_field = args.id_field
    selector_field = args.selector_field
    join_field = args.join_field
    secondary_selector = args.secondary_selector
    sjoin = args.sjoin
    prod_code = args.prod_code
    sensors = args.sensors
    spec_type = args.spec_type
    min_date = args.min_date
    max_date = args.max_date
Exemplo n.º 2
0
    max_date = args.max_date
    multispec = args.multispectral
    out_filepaths = args.out_filepaths
    out_dem_fp = args.out_dem_footprint
    copy_to = args.copy_to
    dems_only = args.dems_only
    skip_ortho = args.skip_ortho
    flat = args.flat
    dryrun = args.dryrun
    verbose = args.verbose

    if verbose:
        log_lvl = 'DEBUG'
    else:
        log_lvl = 'INFO'
    logger = create_logger(__name__, 'sh', log_lvl)

    # Param
    dem_path = 'dem_path'
    if v4_only:
        strip_types = ['strips_v4']
    else:
        strip_types = ['strips', 'strips_v4']

    # Locate DEMs that match
    logger.info('Locating matching DEMs...')
    dems = dems_from_stereo(aoi_path=aoi_path,
                            coords=coords,
                            strip_types=strip_types,
                            months=months,
                            min_date=min_date,
Exemplo n.º 3
0
from tqdm import tqdm

from dem_utils.dem_selector import dem_selector
# from dem_selector import dem_selector
from dem_utils.dem_utils import (dems2aoi_ovlp, dems2dems_ovlp,
                                 get_matchtag_path, combined_density,
                                 get_dem_path, get_filepath_field,
                                 nunatak2windows)
# from dem_utils import (dems2aoi_ovlp, dems2dems_ovlp,
#                        get_matchtag_path, combined_density,
#                        get_dem_path, get_filepath_field)
from misc_utils.raster_clip import clip_rasters
from misc_utils.logging_utils import create_logger
from misc_utils.gpd_utils import remove_unused_geometries, write_gdf

logger = create_logger(__name__, 'sh', 'DEBUG')
sub_logger = create_logger('dem_utils', 'sh', 'INFO')

# Params
id_col = 'pair'
lsuffix = 'd1'
rsuffix = 'd2'
dem_name = 'dem_name'
dem_path = 'dem_filepath'
combo_dens = 'combo_dens'
date_diff = 'date_diff'
doy_diff = 'DOY_diff'
inters_geom = 'inters_geom'
ovlp_perc = 'aoi_ovlp_perc'
rank = 'rank'
mtp = 'matchtag_filepath'
# -*- coding: utf-8 -*-
"""
Created on Tue May 12 15:09:00 2020

@author: disbr007
"""

from dem_utils.dem_utils import combined_density
from misc_utils.logging_utils import create_logger

sublog = create_logger('dem_utils.dem_utils', 'sh', 'DEBUG')

mt1 = r'V:\pgc\data\scratch\jeff\ms\2020apr30\dems\raw\WV02_20120729_103001001A29A200_103001001B348300\WV02_20120729_103001001A29A200_103001001B348300_seg1_2m_matchtag.tif'
mt2 = r'V:\pgc\data\scratch\jeff\ms\2020apr30\dems\raw\W2W2_20100720_103001000677DF00_1030010006ACB500\W2W2_20100720_103001000677DF00_1030010006ACB500_seg2_2m_matchtag.tif'
aoi = r'V:\pgc\data\scratch\jeff\ms\2020apr30\aois\aoi1.shp'

cd = combined_density(mt1, mt2, aoi, clip=True)
Exemplo n.º 5
0
import time
import os

from tqdm import tqdm
from pprint import pprint

from otb_grm import otb_grm, create_outname
from misc_utils.logging_utils import create_logger

logger = create_logger(__name__, 'sh', 'INFO')
create_logger('misc_utils.gdal_tools', 'sh', 'WARNING')
create_logger('otb_grm', 'sh', 'WARNING')

thesholds = [750]
iterations = [0]
specs = [0.5, 0.7]
spats = [100, 300]

od = r'E:\disbr007\umn\2020sep27_eureka\otb_grm_testing'
fmt = 'vector'
img = r'E:\disbr007\umn' \
      r'\2020sep27_eureka\img\ortho_WV02_20140703_test_aoi' \
      r'\WV02_20140703013631_1030010032B54F00_14JUL03013631-' \
      r'M1BS-500287602150_01_P009_u16mr3413_pansh_test_aoi.tif'
criterion = 'bs'

logger.info('starting...')
for s in tqdm(specs, desc='spectral'):
    for t in tqdm(thesholds, desc='thresholds'):
        for i in tqdm(iterations, desc='iterations'):
            for p in tqdm(spats, desc='spatial'):
Exemplo n.º 6
0
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 21 15:35:32 2020

@author: disbr007
"""

import argparse
import os

from archive_analysis_utils import grid_aoi
from misc_utils.logging_utils import create_logger

logger = create_logger(__file__, 'sh')
logger = create_logger('archive_analysis_utils', 'sh', 'DEBUG')

if __name__ == '__main__':

    parser = argparse.ArgumentParser()

    parser.add_argument('aoi',
                        type=os.path.abspath,
                        help='Path to AOI to create grid of.')
    parser.add_argument('out_path',
                        type=os.path.abspath,
                        help='Path to write grid to.')
    parser.add_argument('--n_pts_x',
                        type=int,
                        help='Number of rows to create.')
    parser.add_argument('--n_pts_y',
                        type=int,
"""
Created on Wed Jul  3 14:50:14 2019

@author: disbr007
"""
import argparse
import os

import pandas as pd
import geopandas as gpd
from shapely.geometry import Point

from coord_converter import remove_symbols
from misc_utils.logging_utils import create_logger

logger = create_logger(__name__, 'sh')


def process_tasking(xlsx, out_name):
    '''
    Takes an excel workbook with a sheet named 'Targets' and converts the latitude and longitude points into a shapefile.
    Also write out a renamed excel file.
    xlsx: path to excel file
    out_name: name according to convention [first_initial][last_name][award_number][year] e.g.: bsmith_123456_2019-20
    '''
    # Read excel as pandas dataframe, store original column names for writing out
    logger.info("Reading excel sheet...")
    request = pd.read_excel(xlsx, sheet_name='Targets', dtype=str)
    cols = list(request)

    # Remove any degrees symbols
Exemplo n.º 8
0
def main(selection, destination, source_loc, high_res, med_res, tm,
         list_drives, list_missing_paths, write_copied, write_footprint,
         exclude_list, dryrun, verbose):

    #### Logging setup
    if verbose == False:
        log_level = 'INFO'
    else:
        log_level = 'DEBUG'
    logger = create_logger('ahap_copier.py', 'sh', handler_level=log_level)

    ## Params
    if platform.system() == 'Windows':
        SERVER_LOC = os.path.normpath(r'V:\pgc\data\aerial\usgs\ahap\photos')
        FOOTPRINT_LOC = r'E:\disbr007\general\aerial\AHAP\AHAP_Photo_Extents\AHAP_Photo_Extents.shp'
    elif platform.system() == 'Linux':
        SERVER_LOC = os.path.normpath(r'/mnt/pgc/data/aerial/usgs/ahap/photos')
        FOOTPRINT_LOC = r'/mnt/pgc/data/aerial/usgs/ahap/index/AHAP_Photo_Extents.shp'
    PHOTO_EXTENTS = 'Photo_Extents'
    FLIGHTLINES = 'Flightlines'
    CAMPAIGN = 'AHAP'
    JOIN_SEL = 'PHOTO_ID'
    JOIN_FP = 'unique_id'
    FP = 'usgs_index_aerial_image_archive'
    DB = 'imagery'
    FILEPATH = 'filepath'
    FILENAME = 'filename'
    SRC_DRIVE = 'src_drive'
    SERIES_FIELD = 'series'
    SERIES_BOTH = 'both'
    SERIES_HIGH = 'high_res'
    SERIES_MED = 'medium_res'

    DRIVE_PATH = 'drive_path'
    MNT_PATH = 'mounted_path'

    RELATIVE_PATH = 'relative_path'
    SERVER_PATH = 'server_path'
    FROM_DRIVE = 'drives'
    FROM_SERVER = 'server'
    if source_loc == FROM_DRIVE:
        NOT_MOUNTED = 'NOT_MOUNTED'
    elif source_loc == FROM_SERVER:
        NOT_MOUNTED = 'NOT_ON_SERVER'
    DST_PATH = 'dst'
    DST_EXISTS = 'dst_exists'
    SRC_PATH = 'src'

    # transfer methods
    tm_copy = 'copy'
    tm_link = 'link'

    def get_active_drives():
        """
        List all active drives.
        """
        drive_list = []
        for drive in range(ord('A'), ord('Z')):
            if os.path.exists(chr(drive) + ':'):
                drive_list.append(chr(drive) + ':')
        return drive_list

    ## Build server paths
    def create_relative_paths(row, FILENAME):
        """
        Create relative path for each row
        """
        # Determine series subdir (high or low)
        if row['series'] == 'high_res':
            resolution = 'high'
        elif row['series'] == 'medium_res':
            resolution = 'med'

        roll_dir = 'AB{}ROLL'.format(row[FILENAME][:-8])

        relative_path = os.path.join(resolution, roll_dir, row[FILENAME])

        return relative_path

    def find_drive_location(row, active_drives, DRIVE_PATH):
        """
        Check if each row (file) is on any of the active drives.
        If so return that drive letter.
        """
        # TODO: Figure out how to handle missing files -> subset aia before copying
        possible_filepaths = [
            os.path.join(letter, row[DRIVE_PATH]) for letter in active_drives
        ]
        actual_filepath = [
            fp for fp in possible_filepaths if os.path.exists(fp)
        ]
        if len(actual_filepath) == 0:
            filepath = NOT_MOUNTED
        elif len(actual_filepath) == 1:
            filepath = actual_filepath[0]
        else:
            filepath = 'TWO LOCATIONS?'

        return filepath

    def find_server_location(row):
        if os.path.exists(row[SERVER_PATH]):
            filepath = row[SERVER_PATH]
        else:
            filepath = NOT_MOUNTED

        return filepath

    def load_selection(input_path, JOIN_SEL, exclude_list=exclude_list):
        """
        Loads the selection and returns a list of ids and count.
        """
        logger.info('Loading selection...')
        ## Load selection footprint
        if input_path.endswith('shp'):
            selection = gpd.read_file(input_path)
            # selection_count = len(selection)
            if exclude_list:
                with open(exclude_list, 'r') as el:
                    exclude_ids = el.readlines()
                    exclude_ids = [ei.strip('\n') for ei in exclude_ids]
                    logger.debug('Excluding IDs:\n{}'.format(
                        '\n'.join(exclude_ids)))
                selection = selection[~selection[JOIN_SEL].isin(exclude_ids)]
            selection_unique_ids = list(selection[JOIN_SEL].unique())
        elif input_path.endswith('txt'):
            selection_unique_ids = read_ids(input_path)

        selection_count = len(set(selection_unique_ids))
        logger.info('Scenes in selection: {:,}'.format(selection_count))
        selection_unique_ids_str = str(selection_unique_ids).replace(
            '[', '').replace(']', '')

        return selection_unique_ids_str, selection_count

    def load_table(FP, JOIN_FP, selection_unique_ids_str, SERIES, SERIES_BOTH,
                   SERIES_FIELD):
        logger.debug('Loading danco AHAP table...')
        ## Load aerial source table
        # Build where clause, including selecting only ids in selection
        # where = "(sde.{}.{} IN ({}))".format(FP, JOIN_FP, selection_unique_ids_str)
        where = "({}.{} IN ({}))".format(FP, JOIN_FP, selection_unique_ids_str)
        # Add series if only medium or high is desired, else add nothing and load both
        if SERIES != SERIES_BOTH:
            # where += " AND (sde.{}.{} = '{}')".format(FP, SERIES_FIELD, SERIES)
            where += " AND ({}.{} = '{}')".format(FP, SERIES_FIELD, SERIES)
        aia = query_footprint(FP, db=DB, table=True, where=where)
        aia_ct = len(aia)
        # Remove duplicates - there are identical records, but on different src_drives
        # Mainly seen on src_drives: USGS_s31 and USGS_s71
        # If this actually removes anything, a debug message will be logged.
        # TODO: Add option to keep all locations, only useful for copying from drives
        #       as there should be one of each file on the server
        aia = aia.drop_duplicates(subset=JOIN_FP)
        aia_dd = len(aia)
        if aia_dd != aia_ct:
            logger.debug(
                'Duplicates dropped, identical records on multiples drives.')

        logger.info('Records loaded in AHAP table: {:,}'.format(len(aia)))

        return aia

    ## TODO: Add support for FLIGHTLINES selection inputs
    ## Determine type of selection input
    #selection_fields = list(selection)
    #if "PHOTO_ID" in selection_fields:
    #    selection_type = PHOTO_EXTENTS
    #else:
    #    selection_type = FLIGHTLINES

    ## Check arguments
    if source_loc != FROM_DRIVE and source_loc != FROM_SERVER:
        logger.error('''Invalid "source_loc" argument. 
                            Must be one of {} or {}'''.format(
            FROM_DRIVE, FROM_SERVER))
        raise ValueError()
    if not os.path.exists(selection):
        logger.error(
            '''Selection path does not exist.\n{}'''.format(selection))
        raise ValueError()

    if not os.path.exists(destination) and not os.path.isdir(destination):
        logger.error('''Destination path does not exist or is not a directory,
                         please provide an existing directory.\n{}'''.format(
            destination))
        raise ValueError()
    if high_res is True and med_res is True:
        SERIES = SERIES_BOTH
    elif high_res is True:
        SERIES = SERIES_HIGH
    elif med_res is True:
        SERIES = SERIES_MED
    else:
        logger.error(
            'Please specify one of high_resolution or med_resolution.')
        raise ValueError()

    ### Get drive paths
    ## Load input table and join to danco table to create filepaths
    selection_unique_ids_str, selection_count = load_selection(
        selection, JOIN_SEL=JOIN_SEL)
    aia = load_table(FP, JOIN_FP, selection_unique_ids_str, SERIES,
                     SERIES_BOTH, SERIES_FIELD)

    #### Create source paths: if they existed on the drive and server
    # Convert unix path to os style -- only necessary/does anything for Windows
    aia[DRIVE_PATH] = aia[FILEPATH].apply(os.path.normpath)
    # Create a relative path for the destination directory, eg: 'high/ABxxxxROLL/xxxx.tif.gz
    aia[RELATIVE_PATH] = aia.apply(
        lambda x: create_relative_paths(x, FILENAME), axis=1)
    # Create the location the file would be at if it existed on the server
    aia[SERVER_PATH] = aia.apply(
        lambda x: os.path.join(SERVER_LOC, x[RELATIVE_PATH]), axis=1)

    #### Create destination path
    # Create full destination path
    aia[DST_PATH] = aia.apply(
        lambda x: os.path.join(destination, x[RELATIVE_PATH]), axis=1)
    # Check if destination exists
    aia[DST_EXISTS] = aia[DST_PATH].apply(lambda x: os.path.exists(x))

    if source_loc == FROM_DRIVE:
        ## Get all active drives to use in check for files mounted on drives
        active_drives = get_active_drives()
        active_drives = [d for d in active_drives if os.path.ismount(d)]
        #        ## Get letters of mounted aerial imagery drives
        #        # first level subdirectories on drives are './hsm' or './AHAP Tif files'
        #        aerial_imagery_subdirs = ['hsm{}'.format(x) for x in range(0,10)]
        #        aerial_imagery_subdirs.append('hsm')
        #        aerial_imagery_subdirs.append('AHAP Tif files')
        #        # Check if either of the subdir patterns exist on all mounted drives to
        #        # decide if the mounted drive is a drive containing AHAP, thus skipping
        #        # C and other drives (..unless the above are at prefixes are at the root)
        #        ahap_drives = [d for d in active_drives
        #                         if True in [os.path.exists(os.path.join(d, sd))
        #                                     for sd in aerial_imagery_subdirs]]
        aia[MNT_PATH] = aia.apply(
            lambda x: find_drive_location(x, active_drives, DRIVE_PATH),
            axis=1)
        SRC_PATH = MNT_PATH
        ahap_drives = set(list(aia[SRC_DRIVE]))
        logger.debug('Drives containing AHAP imagery:\n{}'.format(
            '\n'.join(ahap_drives)))
    elif source_loc == FROM_SERVER:
        aia[MNT_PATH] = aia.apply(lambda x: find_server_location(x), axis=1)
        SRC_PATH = SERVER_PATH

    #### Selected files for copying:
    #### only files that have a valid source drive mounted
    #### (n/a for FROM_SERVER -- all should be 'mounted', but this will skip missing)
    aia_mounted = copy.deepcopy(aia[aia[MNT_PATH] != NOT_MOUNTED])
    # aia_mounted = aia_mounted[aia_mounted[DST_EXISTS] == True]

    high_status = 'Located {:,}/{:,} {} from selection on {}...'.format(
        len(aia_mounted[aia_mounted[SERIES_FIELD.lower()] == SERIES_HIGH]),
        selection_count, SERIES_HIGH, source_loc)
    med_status = 'Located {:,}/{:,} {} from selection on {}...'.format(
        len(aia_mounted[aia_mounted[SERIES_FIELD.lower()] == SERIES_MED]),
        selection_count, SERIES_MED, source_loc)

    # Print status messages
    if SERIES == SERIES_BOTH:
        logger.info(high_status)
        logger.info(med_status)
    elif SERIES == SERIES_HIGH:
        logger.info(high_status)
    elif SERIES == SERIES_MED:
        logger.info(med_status)

    ## If list drives, get src drive names, like 'USGS_s74'.
    ## These are labeled on the drives.
    ## Then exit
    if list_drives is True:
        src_drives = list(aia[SRC_DRIVE].unique())
        logger.info('Drives required for copying selection to\n{}:\n{}'.format(
            destination, '\n'.join(src_drives)))
        if list_missing_paths is True:
            for sd in src_drives:
                missing_paths = list(aia[aia[DST_EXISTS] == False][FILEPATH])
                logger.info('Drive: {}'.format(sd))
                logger.info('Missing paths:\n{}'.format(
                    '\n'.join(missing_paths)))
        sys.exit()

    ###  Do copying
    # Create file of already copied to be use for excluding in subsequent copying
    if write_copied:
        if os.path.exists(write_copied):
            wc_open_mode = 'a'
        else:
            wc_open_mode = 'w'
        wc = open(write_copied, wc_open_mode)

    ## Copy loop
    # progress bar setup
    # manager = enlighten.get_manager()
    # pbar = manager.counter(total=len(aia_mounted[SRC_PATH]), desc='Copying:', unit='files')
    logger.info('Copying files from {} to {}...'.format(
        source_loc, destination))
    for src, dst in zip(aia_mounted[SRC_PATH], aia_mounted[DST_PATH]):
        # Make directory tree if necessary
        dst_dir = os.path.dirname(dst)
        if not os.path.exists(dst_dir):
            logger.debug('Making directories: \n{}'.format(dst_dir))
            os.makedirs(dst_dir)
        if not os.path.exists(dst):
            logger.debug('Copying \n{} -> \n{}\n'.format(src, dst))
            if not dryrun:
                if tm == tm_copy:
                    shutil.copyfile(src, dst)
                elif tm == tm_link:
                    os.link(src, dst)
                if write_copied:
                    wc.write(os.path.basename(src).split('.')[0])
                    wc.write('\n')
        else:
            logger.debug(
                'Destination file already exists, skipping: \n{}\n{}'.format(
                    src, dst))
        # pbar.update()
    if write_copied:
        wc.close()

    if write_footprint:
        logger.info('Writing footprints...')
        aia_footprints = gpd.read_file(FOOTPRINT_LOC)
        aia_mounted = aia_mounted[[
            'unique_id',
            'campaign',
            'series',
            'filename',
            'flightline',
            'file_sz_mb',
            'photo_id',
            'relative_path',
        ]]
        aia_footprints = aia_footprints.merge(aia_mounted,
                                              left_on='PHOTO_ID',
                                              right_on='unique_id',
                                              how='inner')
        aia_footprints.to_file(write_footprint)

    logger.debug('Done')
Exemplo n.º 9
0
@author: disbr007
Creates a shapefile that is a refresh in the region selected 
"""
import argparse
import datetime
import os

import geopandas as gpd

from selection_utils.query_danco import query_footprint, mono_noh, stereo_noh, generate_rough_aoi_where
from misc_utils.id_parse_utils import date_words, remove_onhand, onhand_ids
from misc_utils.logging_utils import create_logger
from misc_utils.gpd_utils import select_in_aoi

logger = create_logger(__name__, 'sh', 'DEBUG')

# Params
loc_name_fld = 'project'


def refresh_region_lut(refresh_region='polar_hma_above'):
    '''
    Uses a refresh region shortname to return relevent region names in AOI shapefile.
    refresh_region: string, supported types ['polar_hma_above', 'nonpolar', 'global']
    '''
    logger.debug('Refresh region: {}'.format(refresh_region))
    supported_refreshes = ['polar_hma_above', 'nonpolar', 'global', 'polar']
    # TODO: Check refresh_region = nonpolar to make sure it covers everything (HMA etc.)
    if refresh_region not in supported_refreshes:
        logger.warning(
Exemplo n.º 10
0
    args = parser.parse_args()

    image_source = args.image_source
    out = args.out
    out_dir = args.out_dir
    mode = args.mode
    spatialr = args.spatial_radius
    ranger = args.range_radius
    minsize = args.minsize
    tilesize_x = args.tilesize_x
    tilesize_y = args.tilesize_y
    overwrite = args.overwrite

    # Set up console logger
    handler_level = 'INFO'
    logger = create_logger(__name__, 'sh', handler_level=handler_level)

    # Build out path and determine mode
    if out:
        mode_lut = {'.shp': 'vector', '.tif': 'raster'}
        ext = os.path.splittext(out)[1]
        if not mode:
            mode = mode_lut[ext]
        else:
            if mode != mode_lut[ext]:
                logger.error(
                    """Selected mode does not match out file extension:
                                mode: {} != {}""".format(mode, ext))
                sys.exit()

    if out is None:
Exemplo n.º 11
0
Finds imagery in a directory based on an input list of 
scene_ids or catalog_ids. Moves the imagery files to
destination directory.
***SKIPPING BROWSE FILES CURRENTLY***
TODO: Incorporate BROWSE files into parse_filename function
"""

import argparse
import os
import shutil
import tqdm

from misc_utils.id_parse_utils import parse_filename, read_ids, write_ids
from misc_utils.logging_utils import create_logger

logger = create_logger(os.path.basename(__file__), 'sh', handler_level='INFO')


def match_and_move(ids,
                   src_dir,
                   dst_dir,
                   match_field='catalog_id',
                   dryrun=False):

    logger.info('{} IDs found.'.format(len(set(ids))))
    logger.debug(ids)
    matches = []
    catids_found = []
    logger.info('Locating matching files...')
    for root, dirs, files in os.walk(src_dir):
        for f in files:
Exemplo n.º 12
0
import obia_utils.otb_grm as otb_grm
import obia_utils.otb_edge_extraction as otb_ee
# from obia_utils.otb_edge_extraction import otb_edge_extraction
from obia_utils.cleanup_objects import cleanup_objects
from obia_utils.calc_zonal_stats import calc_zonal_stats
from obia_utils.ImageObjects import ImageObjects

from classify_rts import classify_rts, grow_rts_candidates, grow_rts_simple

# TODO:
#  Standardize naming - make functions:
#   -seg_name() (exists)
#   -clean_name()
#   -zonal_stats_name()
# %%
logger = create_logger(__name__, 'sh', 'INFO')

# External py scripts
PANSH_PY = r'C:\code\imagery_utils\pgc_pansharpen.py'
NDVI_PY = r'C:\code\imagery_utils\pgc_ndvi.py'

# Config keys
seg = 'seg'
alg = 'algorithm'
params = 'params'
cleanup = 'cleanup'
out_objects = 'out_objects'
out_dir = 'out_dir'
out_seg = 'out_seg'
mask_on = 'mask_on'
zonal_stats = 'zonal_stats'
Exemplo n.º 13
0
import argparse
import os
from pathlib import Path

from gdal_tools import clip_minbb
from misc_utils.logging_utils import create_logger

logger = create_logger(__name__, 'sh', 'INFO')
sublogger = create_logger('gdal_tools', 'sh', 'INFO')

if __name__ == '__main__':

    parser = argparse.ArgumentParser()

    parser.add_argument('-r', '--rasters', nargs='+', type=os.path.abspath,
                        help='Paths to rasters to clip. Can be multiple'
                             'arguments, a text file of rasters, a '
                             'directory of rasters, in which case "--ext" '
                             'will be used to identify rasters, or a '
                             'combination.')
    parser.add_argument('-o', '--out_dir', type=os.path.abspath,
                        help='Directory to write clipped rasters to.')
    parser.add_argument('-s', '--suffix', type=str,
                        help='Suffix to add to raster names when writing.')
    parser.add_argument('--ext', type=str,
                        help='The extension to identify rasters when '
                             'providing a directory.')

    args = parser.parse_args()

    args.rasters
Exemplo n.º 14
0
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 11 12:11:46 2019

@author: disbr007
"""

import argparse
import os
import sys

from misc_utils.id_parse_utils import read_ids, write_ids, parse_filename
from misc_utils.logging_utils import create_logger

logger = create_logger('id_verify.py', 'sh')


def imagery_directory_IDs(img_dir, id_of_int):
    """
    Parses the filenames of imagery in a given directory and returns a list of
    the specified ID type.
    
    Parameters:
    img_dir   (str) : path to directory of imagery
    id_of_int (str) : the type of ID to return, one of CATALOG_ID or SCENE_ID
    
    Returns:
    (set) : list of IDs
    """
    # PARSE IDS FROM IMG_DIR
    dir_ids = []
Exemplo n.º 15
0
Created on Fri Mar  6 14:22:54 2020

@author: disbr007
"""
import argparse
import datetime
import os
from pathlib import Path
import platform
import subprocess
from subprocess import PIPE

from misc_utils.logging_utils import create_logger, create_logfile_path
from misc_utils.gdal_tools import gdal_polygonize

logger = create_logger(__name__, 'sh', 'DEBUG')

# Constants
# Init OTB env
if platform.system() == 'Windows':
    otb_init = r"C:\OTB-7.1.0-Win64\OTB-7.1.0-Win64\otbenv.bat"
elif platform.system() == 'Linux':
    otb_init = r"module load OTB"

GRADIENT = 'gradient'
SOBEL = 'sobel'
TOUZI = 'touzi'


# Function definition
def run_subprocess(command):
Exemplo n.º 16
0
import argparse
import os
from pathlib import Path, PurePath
import subprocess
from subprocess import PIPE

from misc_utils.logging_utils import create_logger


logger = create_logger(__name__, 'sh', 'INFO')

# Params
wbt = 'whitebox_tools.exe'
med = 'MaxElevationDeviation'


def run_subprocess(command):
    proc = subprocess.Popen(command, stdout=PIPE, stderr=PIPE, shell=True)
    # proc.wait()
    output, error = proc.communicate()
    logger.debug('Output: {}'.format(output.decode()))
    logger.debug('Err: {}'.format(error.decode()))


def wbt_med(dem, out_dir=None, out_mag=None, out_scale=None,
            min_scale=1, max_scale=50, step=5, vw=False,
            dryrun=False):
    logger.info('Setting up whitebox_tool.exe MaxElevationDeviation')
    if not isinstance(dem, PurePath):
        dem = Path(dem)
    if not isinstance(out_dir, PurePath):
Exemplo n.º 17
0
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 20 10:45:25 2019

@author: disbr007
"""
import os

import arcpy

#from misc_utils.id_parse_utils import pgc_index_path
from misc_utils.logging_utils import create_logger

logger = create_logger(__name__, 'sh', 'DEBUG')

arcpy.env.overwriteOutput = True


def pgc_index_path(ids=False):
    '''
    Returns the path to the most recent pgc index from a manually updated
    text file containing the path.
    '''
    with open(r'C:\code\pgc-code-all\config\pgc_index_path.txt', 'r') as src:
        content = src.readlines()
    if not ids:
        index_path = content[0].strip('\n')
    if ids:
        index_path = content[1].strip('\n')
    logger.debug('PGC index path loaded: {}'.format(index_path))
Exemplo n.º 18
0
@author: disbr007
"""

import os
import logging
import numpy as np

from osgeo import gdal, ogr, osr

from misc_utils.raster_clip import clip_rasters
from misc_utils.gdal_tools import auto_detect_ogr_driver, remove_shp
from misc_utils.logging_utils import create_logger


#### Logging setup
logger = create_logger('valid_data', 'sh', 'DEBUG')


def valid_data(gdal_ds, band_number=1, valid_value=None, write_valid=False, out_path=None):
    """
    Takes a gdal datasource and determines the number of
    valid pixels in it. Optionally, writing out the valid
    data as a binary raster.
    gdal_ds      (osgeo.gdal.Dataset):    osgeo.gdal.Dataset
    write_valid  (boolean)           :    True to write binary raster, 
                                          must supply out_path
    out_path     (str)               :    Path to write binary raster

    Writes     (Optional) Valid data mask as raster

    Returns
Exemplo n.º 19
0
import argparse
import os
from pathlib import Path

import geopandas as gpd
import pandas as pd
from shapely.geometry import Point, box

from misc_utils.logging_utils import create_logger

logger = create_logger(__name__, 'sh', 'INFO')

if __name__ == '__main__':
    parser = argparse.ArgumentParser()

    geom_type_group = parser.add_mutually_exclusive_group(required=True)
    src_file_args = parser.add_argument_group('Source file args.')
    parser.add_argument('-o',
                        '--out_lyr',
                        type=os.path.abspath,
                        required=True,
                        help='Path to write layer out to.')
    geom_type_group.add_argument('-p',
                                 '--point',
                                 nargs=2,
                                 action='append',
                                 type=float,
                                 help='Point to add: x y')
    geom_type_group.add_argument(
        '-bb',
        '--bounding_box',
Exemplo n.º 20
0
import argparse
import os

from id_parse_utils import combine_ids
from misc_utils.logging_utils import create_logger

logger = create_logger(__name__, 'sh', 'INFO')
sublogger = create_logger('id_parse_utils', 'sh', 'DEBUG')


def main(args):
    id_lists = args.id_lists
    fields = args.fields
    out_ids = args.out_ids

    combine_ids(id_lists, fields=fields, write_path=out_ids)


if __name__ == '__main__':
    parser = argparse.ArgumentParser()

    parser.add_argument('--id_lists', nargs='+', type=os.path.abspath,
                        help='Paths to files with IDs to combine.')
    parser.add_argument('--fields', nargs='+', help='Ordered list of fields to use to locate IDs in files. "None" if just text file.')
    parser.add_argument('--out_ids', type=os.path.abspath, help='Path to write combined list of IDs .')

    args = parser.parse_args()
    print(args)
    main(args)