Пример #1
0
def main(aoi_path, out_mosaic, dryrun=False, verbose=False):
    
    # Logging setup
    if verbose:
        handler_level = 'DEBUG'
    else:
        handler_level = 'INFO'

    logging.config.dictConfig(LOGGING_CONFIG(handler_level))
    logger = logging.getLogger(__name__)
    
    def run_subprocess(command):
        proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
        output, error = proc.communicate()
        logger.info('Output: {}'.format(output))
        logger.info('Err: {}'.format(error))

    # Parameters
    if platform.system() == 'Windows':
        tandemx_dir = r'V:\pgc\data\elev\dem\tandem-x\90m'
    elif platform.system() == 'Linux':
        tandemx_dir = r'/mnt/pgc/data/elev/dem/tandem-x/90m'

    tiles_dir = os.path.join(tandemx_dir, '1deg_cells')
    tiles_idx = os.path.join(tandemx_dir, 'index', 'tandem-x_90m.shp')

    logger.info('Loading tiles index...')
    ti = gpd.read_file(tiles_idx)
    logger.info('Loading AOI...')
    aoi = gpd.read_file(aoi_path)
    if aoi.crs != ti.crs:
        aoi = aoi.to_crs(ti.crs)
    logger.info('Locating intersecting tiles...')
    selected_tiles = gpd.overlay(aoi, ti)
    logger.info('Number of tiles located: {}'.format(len(selected_tiles)))

    # ti['fullpath'] = ti['location'].apply(lambda x: os.path.join(tiles_dir, x))
    tile_paths = ' '.join([os.path.join(tiles_dir, x) for x in list(selected_tiles['location'])])
    # tile_paths_str = 
    logger.info('Mosaicking TanDEM-X tiles...')
    # TODO: FIX no data values
    command = 'gdalbuildvrt {} {} -vrtnodata -32767'.format(out_mosaic, tile_paths)
    logger.debug('Command:\n{}'.format(command))

    if not dryrun:
        run_subprocess(command)
        logger.info('Mosaic complete.')
Пример #2
0
a mock script to be tuned according to you needs.
Zoran Čučković
"""

import argparse
import logging.config
import numpy as np
import os

from osgeo import gdal
from tqdm import tqdm

from misc_utils.logging_utils import create_logger, LOGGING_CONFIG

handler_level = 'INFO'
logging.config.dictConfig(LOGGING_CONFIG(handler_level))
logger = logging.getLogger(__name__)


def calc_TPI(win_size, elevation_model, output_model=None, count_model=None):
    """
    TODO:
    Write docstring.
    """
    if output_model is None:
        output_model = os.path.join(
            os.path.split(elevation_model)[0],
            '{}_TPI{}.tif'.format(os.path.basename(elevation_model), win_size))
        logger.info(
            'No output model path provided, using: {}'.format(output_model))
Пример #3
0
statistics thresholds to classify.
"""

import logging.config
import os
import random

import matplotlib.pyplot as plt
import pandas as pd
import geopandas as gpd
from shapely.geometry import Point

from misc_utils.logging_utils import LOGGING_CONFIG
# from archive_analysis.archive_analysis_utils import grid_aoi

logging.config.dictConfig(LOGGING_CONFIG('DEBUG'))
logger = logging.getLogger(__name__)


def random_points_within(poly, num_pts):
    """
    Generates random points within a polygon

    Parameters
    ----------
    poly : shapely.geometry.Polygon
        Polygon to create features withim.
    num_pts : INT
        Number of points to create.

    Returns
Пример #4
0
def dem_selector(AOI_PATH,
                 COORDS=None,
                 MONTHS=None,
                 MIN_DATE=None,
                 MAX_DATE=None,
                 MULTISPEC=False,
                 OUT_STEREO_FP=None,
                 OUT_ID_LIST=None,
                 CLOUDCOVER=None):
    """
    Select stereopairs over an AOI, either from a passed DEM_FP, or from
    the danco database.

    Parameters
    ----------
    AOI_PATH : os.path.abspath
        Path to AOI shapefile.
    COORDS : LIST
        xy coordinates in WGS84 to use for selection.
    MONTHS : LIST, optional
        List of month integers to include. The default is None.
    MIN_DATE : STR, optional
        Minimum DEM date to include. E.g '2015-01-30'. The default is None.
    MAX_DATE : STR, optional
        Maximum DEM date to include. The default is None.
    MULTISPEC : BOOL, optional
        True to only select stereo from multispectral sources. The default is False.
    CLOUDCOVER : INT
        Only include pairs with cloudcover below this threshold
    OUT_STEREO_FP : os.path.abspath, optional
        Path to write DEM footprints shapefile to. The default is None.
    OUT_ID_LIST : os.path.abspath, optional
        Path to write catalogids of selected stereopair catalogids to. The default is None.

    Returns
    -------
    geopandas.GeoDataFrame : Dataframe of footprints matching selection.

    """
    #### PARAMETERS ####
    STEREO_FP = 'dg_imagery_index_stereo'  # stereo footprint tablename
    CATALOGID = 'catalogid'  # field name in danco footprint for catalogids
    DATE_COL = 'acqdate'  # name of date field in stereo footprint
    SENSOR_COL = 'platform'  # name of sensor field in stereo footprint
    PAIRNAME_COL = 'pairname'  # name of field with unique pairnames
    CLOUDCOVER_COL = 'cloudcover'  # name of field with cloudcover
    STEREOPAIR_ID = 'stereopair'  # name of field with stereopair catalogid

    MONTH_COL = 'month'  # name of field to create in footprint if months are requested

    #### SETUP ####
    def check_where(where):
        """Checks if the input string exists already,
           if so formats correctly for adding to SQL"""
        if where:
            where += ' AND '
        return where

    # Create logger
    logging.config.dictConfig(LOGGING_CONFIG('DEBUG'))
    logger = logging.getLogger(__name__)

    #### LOAD INPUTS ####
    # Load AOI
    logger.info('Reading AOI...')
    if AOI_PATH:
        aoi = gpd.read_file(AOI_PATH)
    elif COORDS:
        lon = float(COORDS[0])
        lat = float(COORDS[1])
        loc = Point(lon, lat)
        aoi = gpd.GeoDataFrame(geometry=[loc], crs="EPSG:4326")

    # Load stereopairs footprint
    # Get bounds of aoi to reduce query size, with padding
    minx, miny, maxx, maxy = aoi.total_bounds
    pad = 10
    # Get DEM footprint crs - this loads no records, but it
    # will allow getting the crs of the footprints
    stereo = query_footprint(STEREO_FP, where="1=2")
    # Load stereo
    # Build SQL clause to select stereo in the area of the AOI, helps with load times
    stereo_where = """x1 > {} AND x1 < {} AND 
                      y1 > {} AND y1 < {}""".format(minx - pad, maxx + pad,
                                                    miny - pad, maxy + pad)
    # Add date constraints to SQL
    if MIN_DATE:
        stereo_where = check_where(stereo_where)
        stereo_where += """{} > '{}'""".format(DATE_COL, MIN_DATE)
    if MAX_DATE:
        stereo_where = check_where(stereo_where)
        stereo_where += """{} < '{}'""".format(DATE_COL, MAX_DATE)
    # Add to SQL clause to just select multispectral sensors
    if MULTISPEC:
        stereo_where = check_where(stereo_where)
        stereo_where += """{} IN ('WV02', 'WV03')""".format(SENSOR_COL)
    if CLOUDCOVER:
        stereo_where = check_where(stereo_where)
        stereo_where += """{} <= {}""".format(CLOUDCOVER_COL, CLOUDCOVER)

    # Load DEM footprints with SQL
    stereo = query_footprint(STEREO_FP, where=stereo_where)

    # If only certain months requested, reduce to those
    if MONTHS:
        stereo['temp_date'] = pd.to_datetime(stereo[DATE_COL])
        stereo[MONTH_COL] = stereo['temp_date'].dt.month
        stereo.drop(columns=['temp_date'], inplace=True)
        stereo = stereo[stereo[MONTH_COL].isin(MONTHS)]

    logger.info(
        'Stereopairs matching criteria (before AOI selection): {}'.format(
            len(stereo)))

    # Check coordinate system match and if not reproject AOI
    if aoi.crs != stereo.crs:
        aoi = aoi.to_crs(stereo.crs)

    #### SELECT stereo OVER ALL AOIS ####
    logger.info('Selecting stereopairs over AOI...')
    # Select by location
    # stereo = gpd.overlay(stereo, aoi, how='intersection')
    stereo = gpd.sjoin(stereo, aoi, how='inner')
    # Remove duplicates resulting from intersection (not sure why DUPs)
    stereo = stereo.drop_duplicates(subset=(PAIRNAME_COL))
    logger.info('Stereopairs found over AOI: {}'.format(len(stereo)))
    if len(stereo) == 0:
        logger.error('No stereopairss found over AOI, exiting...')
        sys.exit()

    #### WRITE FOOTPRINT AND TXT OF MATCHES ####
    # Write footprint out
    if OUT_STEREO_FP:
        logger.info(
            'Writing stereopair footprint to file: {}'.format(OUT_STEREO_FP))
        stereo.to_file(OUT_STEREO_FP)
    # Write list of IDs ou
    if OUT_ID_LIST:
        logger.info(
            'Writing list of catalogids to file: {}'.format(OUT_ID_LIST))
        write_stereopair_ids(list(stereo[CATALOGID]),
                             list(stereo[STEREOPAIR_ID]),
                             header='catalogid, stereopair',
                             out_path=OUT_ID_LIST)

    #### Summary Statistics ####
    count = len(stereo)
    min_date = stereo[DATE_COL].min()
    max_date = stereo[DATE_COL].max()

    logger.info("SUMMARY of STEREOPAIR SELECTION:")
    logger.info("Number of STEREOPAIRS: {}".format(count))
    logger.info("Earliest date: {}".format(min_date))
    logger.info("Latest date: {}".format(max_date))

    return stereo