示例#1
0
def test_misc_14():
    import collections
    import logging

    class MockLoggingHandler(logging.Handler):
        def __init__(self, *args, **kwargs):
            super(MockLoggingHandler, self).__init__(*args, **kwargs)
            self.messages = collections.defaultdict(list)

        def emit(self, record):
            self.messages[record.levelname].append(record.getMessage())


    logger = logging.getLogger('gdal_logging_test')
    logger.setLevel(logging.DEBUG)
    logger.propagate = False
    handler = MockLoggingHandler(level=logging.DEBUG)
    logger.addHandler(handler)

    prev_debug = gdal.GetConfigOption("CPL_DEBUG")
    try:
        gdal.ConfigurePythonLogging(logger_name='gdal_logging_test', enable_debug=True)

        assert gdal.GetConfigOption("CPL_DEBUG") == "ON", "should have enabled debug"

        gdal.Debug("test1", "debug1")
        gdal.Error(gdal.CE_Debug, gdal.CPLE_FileIO, "debug2")
        gdal.Error(gdal.CE_None, gdal.CPLE_AppDefined, "info1")
        gdal.Error(gdal.CE_Warning, gdal.CPLE_AssertionFailed, "warning1")
        gdal.Error(gdal.CE_Failure, 99999, "error1")

        expected = {
            'DEBUG': ["test1: debug1", "FileIO: debug2"],
            'INFO': ["AppDefined: info1"],
            'WARNING': ["AssertionFailed: warning1"],
            'ERROR': ["99999: error1"],
        }

        assert handler.messages == expected, "missing log messages"

        gdal.SetErrorHandler('CPLDefaultErrorHandler')
        handler.messages.clear()
        gdal.SetConfigOption('CPL_DEBUG', "OFF")

        gdal.ConfigurePythonLogging(logger_name='gdal_logging_test')

        assert gdal.GetConfigOption("CPL_DEBUG") == "OFF", \
            "shouldn't have enabled debug"

        # these get suppressed by CPL_DEBUG
        gdal.Debug("test1", "debug3")
        # these don't
        gdal.Error(gdal.CE_Debug, gdal.CPLE_None, "debug4")

        assert handler.messages['DEBUG'] == ['debug4'], "unexpected log messages"

    finally:
        gdal.SetErrorHandler('CPLDefaultErrorHandler')
        gdal.SetConfigOption('CPL_DEBUG', prev_debug)
        logger.removeHandler(handler)
示例#2
0
def vector_translate(
        input_path: Union[Path, str], 
        output_path: Path,
        translate_description: Optional[str] = None,
        input_layers: Union[List[str], str, None] = None,
        output_layer: Optional[str] = None,
        spatial_filter: Optional[Tuple[float, float, float, float]] = None,
        clip_bounds: Optional[Tuple[float, float, float, float]] = None, 
        sql_stmt: Optional[str] = None,
        sql_dialect: Optional[str] = None,
        transaction_size: int = 65536,
        append: bool = False,
        update: bool = False,
        create_spatial_index: Optional[bool] = None,
        explodecollections: bool = False,
        force_output_geometrytype: Optional[GeometryType] = None,
        sqlite_journal_mode: Optional[str] = None,
        verbose: bool = False) -> bool:

    # Remark: when executing a select statement, I keep getting error that 
    # there are two columns named "geom" as he doesnt see the "geom" column  
    # in the select as a geometry column. Probably a version issue. Maybe 
    # try again later.
    args = []

    # Cleanup the input_layers variable.
    if isinstance(input_path, Path) and input_path.suffix.lower() == '.shp':
        # For shapefiles, having input_layers not None gives issues
        input_layers = None
    elif sql_stmt is not None:
        # If a sql statement is passed, the input layers are not relevant,
        # and ogr2ogr will give a warning, so clear it.
        input_layers = None
     
    # Sql'ing, Filtering, clipping  
    if spatial_filter is not None:
        args.extend(['-spat', str(spatial_filter[0]), str(spatial_filter[1]), 
                    str(spatial_filter[2]), str(spatial_filter[3])])
    if clip_bounds is not None:
        args.extend(['-clipsrc', str(clip_bounds[0]), str(clip_bounds[1]), 
                    str(clip_bounds[2]), str(clip_bounds[3])])
    '''
    if sqlite_stmt is not None:
        args.extend(['-sql', sqlite_stmt, '-dialect', 'sqlite'])
    '''

    # Output file options
    if output_path.exists() is True:
        if append is True:
            args.append('-append')
        if update is True:
            args.append('-update')

    # Files
    #args.append(output_path)
    #args.append(input_path)

    # Output layer options
    if explodecollections is True:
        args.append('-explodecollections')
    if output_layer is not None:
        args.extend(['-nln', output_layer])
    if force_output_geometrytype is not None:
        args.extend(['-nlt', force_output_geometrytype.name])
    args.extend(['-nlt', 'PROMOTE_TO_MULTI'])
    if transaction_size is not None:
        args.extend(['-gt', str(transaction_size)])

    # Output layer creation options
    layerCreationOptions = []
    # TODO: should check if the layer exists instead of the file
    if not output_path.exists():
        if create_spatial_index is not None:
            if create_spatial_index is True:
                layerCreationOptions.extend(['SPATIAL_INDEX=YES'])
            else:
                layerCreationOptions.extend(['SPATIAL_INDEX=NO'])
    
    # Get output format from the filename
    output_filetype = GeofileType(output_path)

    # Sqlite specific options
    datasetCreationOptions = []
    if output_filetype == GeofileType.SQLite:
        # Use the spatialite type of sqlite
        #datasetCreationOptions.extend(['-dsco', 'SPATIALITE=YES'])
        datasetCreationOptions.append('SPATIALITE=YES')
      
    '''
    # Try if the busy_timeout isn't giving problems rather than solving them...
    if sqlite_journal_mode is not None:
        datasetCreationOptions.extend(['--config', 'OGR_SQLITE_PRAGMA', f"journal_mode={sqlite_journal_mode},busy_timeout=5000"])  
    else:
        datasetCreationOptions.extend(['--config OGR_SQLITE_PRAGMA busy_timeout=5000'])  
    '''
    if sqlite_journal_mode is not None:
        gdal.SetConfigOption('OGR_SQLITE_PRAGMA', f"journal_mode={sqlite_journal_mode}")

    #if append is False:
    #    args.extend(['--config', 'OGR_SQLITE_PRAGMA', '"journal_mode=WAL"'])
    #    args.extend(['-dsco', 'ADD_GPKG_OGR_CONTENTS=NO'])
    #else:
    #    args.extend(['--config', 'OGR_SQLITE_PRAGMA', 'busy_timeout=-1'])  
    #args.extend(['--config', 'OGR_SQLITE_SYNCHRONOUS', 'OFF'])  
    gdal.SetConfigOption('OGR_SQLITE_CACHE', '128')

    options = gdal.VectorTranslateOptions(
            options=args, 
            format=output_filetype.ogrdriver, 
            accessMode=None, 
            srcSRS=None, 
            dstSRS=None, 
            reproject=False, 
            SQLStatement=sql_stmt,
            SQLDialect=sql_dialect,
            where=None, #"geom IS NOT NULL", 
            selectFields=None, 
            addFields=False, 
            forceNullable=False, 
            spatFilter=spatial_filter, 
            spatSRS=None,
            datasetCreationOptions=datasetCreationOptions, 
            layerCreationOptions=layerCreationOptions, 
            layers=input_layers,
            layerName=output_layer,
            geometryType=None, 
            dim=None, 
            segmentizeMaxDist=None, 
            zField=None, 
            skipFailures=False, 
            limit=None, 
            callback=None, 
            callback_data=None)

    input_ds = None
    try: 
        # In some cases gdal only raises the last exception instead of the stack in VectorTranslate, 
        # so you lose necessary details! -> uncomment gdal.DontUseExceptions() when debugging!
        
        #gdal.DontUseExceptions()
        gdal.UseExceptions() 
        gdal.ConfigurePythonLogging(logger_name='gdal', enable_debug=False)

        logger.debug(f"Execute {sql_stmt} on {input_path}")
        input_ds = gdal.OpenEx(str(input_path))
        
        # TODO: memory output support might be interesting to support
        result_ds = gdal.VectorTranslate(
                destNameOrDestDS=str(output_path),
                srcDS=input_ds,
                #SQLStatement=sql_stmt,
                #SQLDialect=sql_dialect,
                #layerName=output_layer
                options=options)

        if result_ds is None:
            raise Exception("BOEM")
        else:
            if result_ds.GetLayerCount() == 0:
                del result_ds
                if output_path.exists():
                    gfo.remove(output_path)
    except Exception as ex:
        message = f"Error executing {sql_stmt}"
        logger.exception(message)
        raise Exception(message) from ex
    finally:
        if input_ds is not None:
            del input_ds
        
    return True
示例#3
0
from io import StringIO
import logging
import os
from pathlib import Path
import pprint
import re
import subprocess
import tempfile
from threading import Lock
import time
from typing import List, Optional, Tuple, Union

import geopandas as gpd
from osgeo import gdal
gdal.UseExceptions() 
gdal.ConfigurePythonLogging(logger_name='gdal', enable_debug=False)

import geofileops as gfo
from geofileops.util.geofiletype import GeofileType
from geofileops.util.geometry_util import GeometryType

#-------------------------------------------------------------
# First define/init some general variables/constants
#-------------------------------------------------------------

# Make sure only one instance per process is running
lock = Lock()

# Get a logger...
logger = logging.getLogger(__name__)