コード例 #1
0
    def transform_direct(self, srcSRS, dstSRS, inpath=None, outpath=None, outlayername=None,
                         outformat=None, layerCreationOptions=None, helmert_para=None):
        in_srs = osr.SpatialReference()
        in_srs.ImportFromEPSG(srcSRS)
        out_srs = osr.SpatialReference()
        out_srs.ImportFromEPSG(dstSRS)

        if inpath is None: inpath = self.in_path
        if outpath is None: outpath = self.out_path
        if outlayername is None: outlayername = self.out_layername
        if outformat is None: outformat = self.out_format
        if layerCreationOptions is None: layerCreationOptions = self.lco

        out_format = DataType_dict[outformat]

        if outformat == DataType.geojson:
            translateOptions = gdal.VectorTranslateOptions(format=out_format, srcSRS=in_srs, dstSRS=out_srs,
                                                           coordinateOperation=helmert_para,
                                                           layerName=outlayername)
        else:
            translateOptions = gdal.VectorTranslateOptions(format=out_format, srcSRS=in_srs, dstSRS=out_srs,
                                                           coordinateOperation=helmert_para,
                                                           accessMode="overwrite", layerName=outlayername,
                                                           layerCreationOptions=layerCreationOptions)

        if gdal.VectorTranslate(outpath, inpath, options=translateOptions):
            return [outpath, outlayername]
        else:
            return [None, None]
コード例 #2
0
def ogr2ogr(src, dst, options):
    """
    a simple wrapper for gdal.VectorTranslate aka `ogr2ogr <https://www.gdal.org/ogr2ogr.html>`_

    Parameters
    ----------
    src: str or :osgeo:class:`ogr.DataSource`
        the input data set
    dst: str
        the output data set
    options: dict
        additional parameters passed to gdal.VectorTranslate;
        see `gdal.VectorTranslateOptions <http://gdal.org/python/osgeo.gdal-module.html#VectorTranslateOptions>`_

    Returns
    -------

    """
    out = gdal.VectorTranslate(dst, src, options=gdal.VectorTranslateOptions(**options))
    out = None
コード例 #3
0
def ogr2ogr(src, dst, options):
    """
    a simple wrapper for :osgeo:func:`gdal.VectorTranslate` aka `ogr2ogr <https://www.gdal.org/ogr2ogr.html>`_

    Parameters
    ----------
    src: str or :osgeo:class:`ogr.DataSource`
        the input data set
    dst: str
        the output data set
    options: dict
        additional parameters passed to :osgeo:func:`gdal.VectorTranslate`;
        see :osgeo:func:`gdal.VectorTranslateOptions`

    Returns
    -------

    """
    out = gdal.VectorTranslate(dst,
                               src,
                               options=gdal.VectorTranslateOptions(**options))
    out = None
コード例 #4
0
def export(request):
    request.resource_permission(PERM_READ)

    srs = int(request.GET.get("srs", request.context.srs.id))
    srs = SRS.filter_by(id=srs).one()

    fid = request.GET.get("fid")
    fid = fid if fid != "" else None

    format = request.GET.get("format")
    encoding = request.GET.get("encoding")
    zipped = request.GET.get("zipped", "true")
    zipped = zipped.lower() == "true"

    display_name = request.GET.get("display_name", "false")
    display_name = display_name.lower() == "true"

    if format is None:
        raise ValidationError(_("Output format is not provided."))

    if format not in EXPORT_FORMAT_OGR:
        raise ValidationError(_("Format '%s' is not supported.") % (format, ))

    driver = EXPORT_FORMAT_OGR[format]

    # dataset creation options (configurable by user)
    dsco = list()
    if driver.dsco_configurable is not None:
        for option in driver.dsco_configurable:
            option = option.split(":")[0]
            if option in request.GET:
                dsco.append("%s=%s" % (option, request.GET.get(option)))

    # layer creation options
    lco = list(driver.options or [])

    if encoding is not None:
        lco.append("ENCODING=%s" % encoding)

    query = request.context.feature_query()
    query.geom()

    ogr_ds = _ogr_memory_ds()
    ogr_layer = _ogr_layer_from_features(  # NOQA: 841
        request.context, query(), ds=ogr_ds, fid=fid)

    with tempfile.TemporaryDirectory() as tmp_dir:
        filename = "%d.%s" % (
            request.context.id,
            driver.extension,
        )

        vtopts = ([
            "-f",
            driver.name,
            "-t_srs",
            srs.wkt,
        ] + list(itertools.chain(*[("-lco", o) for o in lco])) +
                  list(itertools.chain(*[("-dsco", o) for o in dsco])))

        if display_name:
            # CPLES_SQLI == 7
            flds = [
                '"{}" as "{}"'.format(
                    fld.keyname.replace('"', r'\"'),
                    fld.display_name.replace('"', r'\"'),
                ) for fld in request.context.fields
            ]
            if fid is not None:
                flds += ['FID as "{}"'.format(fid.replace('"', r'\"'))]
            vtopts += [
                "-sql", 'select {} from ""'.format(
                    ", ".join(flds if len(flds) > 0 else '*'))
            ]

        if driver.fid_support and fid is None:
            vtopts.append('-preserve_fid')

        gdal.VectorTranslate(
            os.path.join(tmp_dir, filename),
            ogr_ds,
            options=gdal.VectorTranslateOptions(options=vtopts))

        if zipped or not driver.single_file:
            content_type = "application/zip"
            content_disposition = "attachment; filename=%s" % ("%s.zip" %
                                                               (filename, ))
            with tempfile.NamedTemporaryFile(suffix=".zip") as tmp_file:
                with zipfile.ZipFile(tmp_file, "w",
                                     zipfile.ZIP_DEFLATED) as zipf:
                    for root, dirs, files in os.walk(tmp_dir):
                        for file in files:
                            path = os.path.join(root, file)
                            zipf.write(path, os.path.basename(path))
                response = FileResponse(tmp_file.name,
                                        content_type=content_type,
                                        request=request)
                response.content_disposition = content_disposition
                return response
        else:
            content_type = driver.mime or "application/octet-stream"
            content_disposition = "attachment; filename=%s" % filename
            response = FileResponse(os.path.join(tmp_dir, filename),
                                    content_type=content_type,
                                    request=request)
            response.content_disposition = content_disposition
            return response
コード例 #5
0
def _replace_data(schema_name, layer, fields, agol_meta_map, dry_run):
    """the insert logic for writing to the destination
    """
    cloud_db = config.format_ogr_connection(config.DBO_CONNECTION)
    internal_sgid = config.get_source_connection()

    internal_name = f'{schema_name}.{layer}'

    sql = f'SELECT objectid FROM "{schema_name}.{layer}"'

    if len(fields) > 0:
        #: escape reserved words?
        fields = [f'"{field}"' for field in fields]
        sql = f'SELECT {",".join(fields)} FROM "{schema_name}.{layer}"'

    options = [
        '-f',
        'PostgreSQL',
        '-dialect',
        'OGRSQL',
        '-sql',
        sql,
        '-lco',
        'FID=xid',
        '-lco',
        f'SCHEMA={schema_name}',
        '-lco',
        'OVERWRITE=YES',
        '-lco',
        'GEOMETRY_NAME=shape',
        '-lco',
        'PRECISION=YES',
        '-a_srs',
        config.UTM,
    ]

    if schema_name in agol_meta_map and layer in agol_meta_map[schema_name]:
        new_name, geometry_type = agol_meta_map[schema_name][layer].values()

        if new_name:
            layer = new_name

        if geometry_type == 'POLYGON':
            options.append('-nlt')
            options.append('MULTIPOLYGON')
        elif geometry_type == 'POLYLINE':
            options.append('-nlt')
            options.append('MULTILINESTRING')
        elif geometry_type == 'STAND ALONE':
            options.append('-nlt')
            options.append('NONE')
        else:
            options.append('-nlt')
            options.append(geometry_type)
    else:
        LOG.info(
            f'- skipping {Fore.MAGENTA}{layer}{Fore.RESET} since it is no longer in the meta table{Fore.RESET}'
        )

        return

    options.append('-nln')
    options.append(f'{layer}')

    pg_options = None
    try:
        pg_options = gdal.VectorTranslateOptions(options=options)
    except Exception:
        LOG.fatal(
            f'- {Fore.RED}invalid options{Fore.RESET} for {Fore.BLUE}{layer}{Fore.RESET}'
        )
        return

    LOG.info((f'- inserting {Fore.MAGENTA}{layer}{Fore.RESET} '
              f'into {Fore.BLUE}{schema_name}{Fore.RESET} '
              f'as {Fore.CYAN}{geometry_type}{Fore.RESET}'))
    LOG.debug(f'with {Fore.CYAN}{sql}{Fore.RESET}')

    if not dry_run:
        start_seconds = perf_counter()
        result = gdal.VectorTranslate(cloud_db,
                                      internal_sgid,
                                      options=pg_options)
        LOG.debug((
            f'- {Fore.GREEN}completed{Fore.RESET} '
            f'in {Fore.CYAN}{utils.format_time(perf_counter() - start_seconds)}{Fore.RESET}'
        ))

        del result

        LOG.debug(f'- {Fore.CYAN}make valid{Fore.RESET}')

        qualified_layer = f'{schema_name}.{layer}'

        make_valid(qualified_layer)
        schema.update_schema_for(internal_name, qualified_layer)
        create_index(qualified_layer)
コード例 #6
0
ファイル: auxil.py プロジェクト: IanMadlenya/pyroSAR
def ogr2ogr(src, dst, options):
    out = gdal.VectorTranslate(dst,
                               src,
                               options=gdal.VectorTranslateOptions(**options))
    out = None
コード例 #7
0
def export(request):
    request.resource_permission(PERM_READ)

    srs = int(request.GET.get("srs", request.context.srs.id))
    srs = SRS.filter_by(id=srs).one()
    fid = request.GET.get("fid")
    format = request.GET.get("format")
    encoding = request.GET.get("encoding")
    zipped = request.GET.get("zipped", "true")
    zipped = zipped.lower() == "true"

    if format is None:
        raise ValidationError(_("Output format is not provided."))
    else:
        format = format.upper()

    if not format in EXPORT_FORMAT_OGR:
        raise ValidationError(_("Format '%s' is not supported.") % (format, ))

    driver = EXPORT_FORMAT_OGR[format]

    # layer creation options
    lco = list(driver.options or [])

    if encoding is not None:
        lco.append("ENCODING=%s" % encoding)

    query = request.context.feature_query()
    query.geom()

    ogr_ds = _ogr_memory_ds()
    ogr_layer = _ogr_layer_from_features(request.context,
                                         query(),
                                         ds=ogr_ds,
                                         fid=fid)

    buf = BytesIO()

    with backports.tempfile.TemporaryDirectory() as temp_dir:
        filename = "%d.%s" % (
            request.context.id,
            driver.extension,
        )

        vtopts = [
            '-f',
            driver.name,
            '-t_srs',
            srs.wkt,
        ] + list(itertools.chain(*[('-lco', o) for o in lco]))

        if driver.fid_support and fid is None:
            vtopts.append('-preserve_fid')

        gdal.VectorTranslate(
            os.path.join(temp_dir, filename),
            ogr_ds,
            options=gdal.VectorTranslateOptions(options=vtopts))

        if zipped or not driver.single_file:
            with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zipf:
                for root, dirs, files in os.walk(temp_dir):
                    for file in files:
                        path = os.path.join(root, file)
                        zipf.write(path, os.path.basename(path))

            content_type = "application/zip"
            filename = "%s.zip" % (filename, )

        else:
            content_type = (driver.mime or "application/octet-stream")
            with open(os.path.join(temp_dir, filename)) as f:
                buf.write(f.read())

    content_disposition = (b"attachment; filename=%s" % filename)

    return Response(
        buf.getvalue(),
        content_type=b"%s" % str(content_type),
        content_disposition=content_disposition,
    )
コード例 #8
0
def shapefile2geojson(infile, outfile):

    options = gdal.VectorTranslateOptions(format="GeoJSON", dstSRS="EPSG:4326")
    gdal.VectorTranslate(outfile, infile, options=options)
コード例 #9
0
print(inDs.GetLayerCount())

layer = inDs.GetLayer()
spatialRef = layer.GetSpatialRef()
print(spatialRef)

srs = osr.SpatialReference(spatialRef.ExportToWkt())
print(srs)

tmp_output = r"D:\Data\深圳坐标\test\配准中心线_bj54.geojson"
# geojson_driver = ogr.GetDriverByName("ESRI Shapefile")
# outDs = driver.CreateDataSource(output)

translateOptions = gdal.VectorTranslateOptions(
    format="geojson",
    coordinateOperation=
    "+proj=helmert +convention=position_vector +x=2472704.709219 +y=391088.722412 +s=1.000014426327 +theta=3518.94103818"
)
gdal.VectorTranslate(tmp_output, input, options=translateOptions)

in_srs = osr.SpatialReference()
in_srs.ImportFromEPSG(2435)
out_srs = osr.SpatialReference()
out_srs.ImportFromEPSG(4547)

output = r"D:\Data\深圳坐标\test\配准中心线_2000.shp"
translateOptions = gdal.VectorTranslateOptions(
    format="ESRI Shapefile",
    srcSRS=in_srs,
    dstSRS=out_srs,
    layerCreationOptions=["ENCODING=GBK"])
コード例 #10
0
ファイル: ogr_util.py プロジェクト: theroggy/geofileops
def vector_translate(
        input_path: Union[Path, str], 
        output_path: Path,
        translate_description: Optional[str] = None,
        input_layers: Union[List[str], str, None] = None,
        output_layer: Optional[str] = None,
        spatial_filter: Optional[Tuple[float, float, float, float]] = None,
        clip_bounds: Optional[Tuple[float, float, float, float]] = None, 
        sql_stmt: Optional[str] = None,
        sql_dialect: Optional[str] = None,
        transaction_size: int = 65536,
        append: bool = False,
        update: bool = False,
        create_spatial_index: Optional[bool] = None,
        explodecollections: bool = False,
        force_output_geometrytype: Optional[GeometryType] = None,
        sqlite_journal_mode: Optional[str] = None,
        verbose: bool = False) -> bool:

    # Remark: when executing a select statement, I keep getting error that 
    # there are two columns named "geom" as he doesnt see the "geom" column  
    # in the select as a geometry column. Probably a version issue. Maybe 
    # try again later.
    args = []

    # Cleanup the input_layers variable.
    if isinstance(input_path, Path) and input_path.suffix.lower() == '.shp':
        # For shapefiles, having input_layers not None gives issues
        input_layers = None
    elif sql_stmt is not None:
        # If a sql statement is passed, the input layers are not relevant,
        # and ogr2ogr will give a warning, so clear it.
        input_layers = None
     
    # Sql'ing, Filtering, clipping  
    if spatial_filter is not None:
        args.extend(['-spat', str(spatial_filter[0]), str(spatial_filter[1]), 
                    str(spatial_filter[2]), str(spatial_filter[3])])
    if clip_bounds is not None:
        args.extend(['-clipsrc', str(clip_bounds[0]), str(clip_bounds[1]), 
                    str(clip_bounds[2]), str(clip_bounds[3])])
    '''
    if sqlite_stmt is not None:
        args.extend(['-sql', sqlite_stmt, '-dialect', 'sqlite'])
    '''

    # Output file options
    if output_path.exists() is True:
        if append is True:
            args.append('-append')
        if update is True:
            args.append('-update')

    # Files
    #args.append(output_path)
    #args.append(input_path)

    # Output layer options
    if explodecollections is True:
        args.append('-explodecollections')
    if output_layer is not None:
        args.extend(['-nln', output_layer])
    if force_output_geometrytype is not None:
        args.extend(['-nlt', force_output_geometrytype.name])
    args.extend(['-nlt', 'PROMOTE_TO_MULTI'])
    if transaction_size is not None:
        args.extend(['-gt', str(transaction_size)])

    # Output layer creation options
    layerCreationOptions = []
    # TODO: should check if the layer exists instead of the file
    if not output_path.exists():
        if create_spatial_index is not None:
            if create_spatial_index is True:
                layerCreationOptions.extend(['SPATIAL_INDEX=YES'])
            else:
                layerCreationOptions.extend(['SPATIAL_INDEX=NO'])
    
    # Get output format from the filename
    output_filetype = GeofileType(output_path)

    # Sqlite specific options
    datasetCreationOptions = []
    if output_filetype == GeofileType.SQLite:
        # Use the spatialite type of sqlite
        #datasetCreationOptions.extend(['-dsco', 'SPATIALITE=YES'])
        datasetCreationOptions.append('SPATIALITE=YES')
      
    '''
    # Try if the busy_timeout isn't giving problems rather than solving them...
    if sqlite_journal_mode is not None:
        datasetCreationOptions.extend(['--config', 'OGR_SQLITE_PRAGMA', f"journal_mode={sqlite_journal_mode},busy_timeout=5000"])  
    else:
        datasetCreationOptions.extend(['--config OGR_SQLITE_PRAGMA busy_timeout=5000'])  
    '''
    if sqlite_journal_mode is not None:
        gdal.SetConfigOption('OGR_SQLITE_PRAGMA', f"journal_mode={sqlite_journal_mode}")

    #if append is False:
    #    args.extend(['--config', 'OGR_SQLITE_PRAGMA', '"journal_mode=WAL"'])
    #    args.extend(['-dsco', 'ADD_GPKG_OGR_CONTENTS=NO'])
    #else:
    #    args.extend(['--config', 'OGR_SQLITE_PRAGMA', 'busy_timeout=-1'])  
    #args.extend(['--config', 'OGR_SQLITE_SYNCHRONOUS', 'OFF'])  
    gdal.SetConfigOption('OGR_SQLITE_CACHE', '128')

    options = gdal.VectorTranslateOptions(
            options=args, 
            format=output_filetype.ogrdriver, 
            accessMode=None, 
            srcSRS=None, 
            dstSRS=None, 
            reproject=False, 
            SQLStatement=sql_stmt,
            SQLDialect=sql_dialect,
            where=None, #"geom IS NOT NULL", 
            selectFields=None, 
            addFields=False, 
            forceNullable=False, 
            spatFilter=spatial_filter, 
            spatSRS=None,
            datasetCreationOptions=datasetCreationOptions, 
            layerCreationOptions=layerCreationOptions, 
            layers=input_layers,
            layerName=output_layer,
            geometryType=None, 
            dim=None, 
            segmentizeMaxDist=None, 
            zField=None, 
            skipFailures=False, 
            limit=None, 
            callback=None, 
            callback_data=None)

    input_ds = None
    try: 
        # In some cases gdal only raises the last exception instead of the stack in VectorTranslate, 
        # so you lose necessary details! -> uncomment gdal.DontUseExceptions() when debugging!
        
        #gdal.DontUseExceptions()
        gdal.UseExceptions() 
        gdal.ConfigurePythonLogging(logger_name='gdal', enable_debug=False)

        logger.debug(f"Execute {sql_stmt} on {input_path}")
        input_ds = gdal.OpenEx(str(input_path))
        
        # TODO: memory output support might be interesting to support
        result_ds = gdal.VectorTranslate(
                destNameOrDestDS=str(output_path),
                srcDS=input_ds,
                #SQLStatement=sql_stmt,
                #SQLDialect=sql_dialect,
                #layerName=output_layer
                options=options)

        if result_ds is None:
            raise Exception("BOEM")
        else:
            if result_ds.GetLayerCount() == 0:
                del result_ds
                if output_path.exists():
                    gfo.remove(output_path)
    except Exception as ex:
        message = f"Error executing {sql_stmt}"
        logger.exception(message)
        raise Exception(message) from ex
    finally:
        if input_ds is not None:
            del input_ds
        
    return True
コード例 #11
0
def combine_files(config):
    # read the first xml file
    name, extension = os.path.splitext(os.path.basename(config['outputfile']))
    print("name=%s ;;;;;;;;; extension=%s" % (name, extension))
    #when wgs84 switch axis
    if (config['projection'] == 'EPSG:4326'):
        first_filename = os.path.join(
            config['tmpdir'], '%(name)s_%(west)s_%(south)s%(extension)s' % {
                'name': name,
                'west': config['bbox']['west'],
                'south': config['bbox']['south'],
                'extension': extension
            })
    else:
        first_filename = os.path.join(
            config['tmpdir'], '%(name)s_%(west)s_%(south)s%(extension)s' % {
                'name': name,
                'west': config['bbox']['south'],
                'south': config['bbox']['west'],
                'extension': extension
            })

    first_filename = os.path.join(
        config['tmpdir'], '%(name)s_%(west)s_%(south)s%(extension)s' % {
            'name': name,
            'west': config['bbox']['south'],
            'south': config['bbox']['west'],
            'extension': extension
        })
    print("first filename = %s" % first_filename)

    first_xml = etree.parse(first_filename)
    first_root = first_xml.getroot()
    nsmap = first_root.nsmap

    try:
        number_matched = int(first_root.get('numberMatched'))
    except (ValueError, TypeError):
        number_matched = False
    print("number_matched=%i" % number_matched)

    try:
        number_returned = int(first_root.get('numberReturned'))
    except (ValueError, TypeError):
        number_returned = False
    print("number_returned=%i" % number_returned)

    #for wfs 1.1.0 or 1.0.0
    try:
        number_offeatures = int(first_root.get('numberOfFeatures'))
    except (ValueError, TypeError):
        number_offeatures = False
    print("number_offeatures=%i" % number_offeatures)

    #write with ogr: write to memory, merge, and finally export to file
    gdaloutputfile = config['outputfile'].replace(extension, ".gpkg")
    print("Exporting: %s" % gdaloutputfile)
    gdal.UseExceptions()
    gdalDriverName = 'GPKG'

    #teste de remocao de duplicados
    #geopkg = gdal.OpenEx(gdaloutputfile, gdal.OF_UPDATE)
    #geopkg.ExecuteSQL("pragma wal_checkpoint(TRUNCATE)")
    #quit()
    #layer = geopkg.GetLayer(0)
    #nome = geopkg.GetLayer(0).GetName()
    #layer.ResetReading();
    #print("delete from %s where rowid not in (select min(rowid) from %s group by %s);" % (nome, nome, config['uniqueid_field']))
    #geopkg.ExecuteSQL("delete from %s where rowid not in (select min(rowid) from %s group by %s);" % (nome, nome, config['uniqueid_field']))
    #del geopkg
    #quit()

    srcDS = gdal.OpenEx(first_filename)
    srcLayer = srcDS.GetLayer(0)
    #spatialRef = srcDS.GetSpatialRef().exportToWkt()
    #options to create and append data - skipfailures if important to continue even when a duplicate fails to be added bc the unique index prevents it
    ogrOptions = gdal.VectorTranslateOptions(options=[
        '-f', gdalDriverName, '-t_srs', 'EPSG:4326', '-update', '-append',
        '-skipfailures', '-lco', 'SPATIAL_INDEX=NO'
    ])
    driver = ogr.GetDriverByName(gdalDriverName)
    if os.path.exists(gdaloutputfile):
        print("Deleting existing file: %s" % gdaloutputfile)
        deleted = driver.DeleteDataSource(gdaloutputfile)
        if (deleted != 0):
            print("impossible to delete... quiting.")
            quit()

    #ds = gdal.VectorTranslate(gdaloutputfile, srcDS=first_filename, options=ogrOptions)
    #gdalName=ds.GetLayer(0).GetName()
    outmemfile = os.path.join('/vsimem', os.path.basename(gdaloutputfile))
    print('outmemfile=%s' % outmemfile)
    ds = gdal.VectorTranslate(outmemfile,
                              srcDS=first_filename,
                              options=ogrOptions)

    #try to optimize geopackage performance
    #memLayer = ds.GetLayer(0)
    #memLayerName = memLayer.GetName()
    #ds.ExecuteSQL('CREATE UNIQUE INDEX IF NOT EXISTS gmlid_idx ON "%s" (%s);' % (outmemfile, config['uniqueid_field']))

    #add a unique index to avoid duplicates if it is configured
    if (config['uniqueid_field'] != 'None'):
        #ds.ExecuteSQL('CREATE UNIQUE INDEX IF NOT EXISTS gmlid_idx ON "%s" (%s);' % (memLayerName, config['uniqueid_field']))
        pass
    #Dereference and close dataset, then reopen.
    del ds

    for filename in os.listdir(config['tmpdir']):
        print("filename=%s" % filename)
        if filename.startswith(name):
            abs_filename = os.path.join(config['tmpdir'], filename)
            if abs_filename != first_filename:
                print('merging', abs_filename)

                xml = etree.parse(abs_filename)
                root = xml.getroot()

                if number_matched is not False:
                    number_matched += int(root.get('numberMatched'))
                    print("elements in file=%s" % root.get('numberMatched'))
                    print("elements total=%i" % number_matched)

                if number_returned is not False:
                    number_returned += int(root.get('numberReturned'))
                    print("elements in file=%s" % root.get('numberMatched'))
                    print("elements total=%i" % number_returned)

                if number_offeatures is not False:
                    number_offeatures += int(root.get('numberOfFeatures'))
                    print("elements in file=%s" % root.get('numberOfFeatures'))
                    print("elements total=%i" % number_offeatures)

                #avoid errors in merging if 0 elements
                if (number_matched == 0 and number_returned == 0
                        and number_offeatures == 0):
                    print("Empty file... skipping.")
                    continue

                #for node in xml.xpath('.//wfs:member', namespaces=nsmap):
                #    first_root.append(node)
                #ds = gdal.VectorTranslate(gdaloutputfile, srcDS=abs_filename, options=ogrOptions)
                #Dereference and close dataset, then reopen.
                #del ds
                ds = gdal.VectorTranslate(outmemfile,
                                          srcDS=abs_filename,
                                          options=ogrOptions)
                #remove duplicates
                #if (config['uniqueid_field'] != 'None'):
                #  ds.ExecuteSQL('CREATE UNIQUE INDEX IF NOT EXISTS gmlid_idx ON "%s" (%s);' % (memLayerName, config['uniqueid_field']))

                #Dereference and close dataset, then reopen.
                del ds

                #memOptions = [
                #    '-f', 'memData',
                #    '-t_srs', 'EPSG:4326',
                #    '-update',
                #    '-append',
                #    '-skipfailures'
                #]
                #ds = gdal.VectorTranslate(layer_mem, srcDS=abs_filename, options=memOptions)

    # manipulate numberMatched numberReturned
    if number_matched is not False:
        first_root.set('numberMatched', str(number_matched))

    if number_returned is not False:
        first_root.set('numberReturned', str(number_returned))

    if number_offeatures is not False:
        first_root.set('numberOfFeatures', str(number_offeatures))

    #manipulate the extend / bounding box
    #avoid errors in merging if 0 elements
    if (number_matched > 0 or number_returned > 0 or number_offeatures > 0):
        pass
        #in my case these attributes don't exist
        #first_root.xpath('.//wfs:boundedBy/gml:Envelope/gml:lowerCorner', namespaces=nsmap)[0].text = '%s %s' % (config['bbox']['west'], config['bbox']['east'])
        #first_root.xpath('.//wfs:boundedBy/gml:Envelope/gml:upperCorner', namespaces=nsmap)[0].text = '%s %s' % (config['bbox']['south'], config['bbox']['north'])
    else:
        print("No results - merged file not written.")

    #print("a escrever etree para %s" % config['outputfile'])
    #print("etree first xml=%s" % etree.tostring(first_xml))
    #with open(config['outputfile'], 'wb') as f:
    #    f.write(etree.tostring(first_xml))
    #    f.close()

    #remove duplicates before exporting to disk
    #teste de remocao de duplicados
    memgeopkg = gdal.OpenEx(outmemfile, gdal.OF_UPDATE)
    #geopkg.ExecuteSQL("pragma wal_checkpoint(TRUNCATE)")
    #quit()
    if (config['uniqueid_field'] is not None):
        layer = memgeopkg.GetLayer(0)
        nome = memgeopkg.GetLayer(0).GetName()
        print("Deleting duplicates based on unique fields: %s" %
              config['uniqueid_field'])
        memgeopkg.ExecuteSQL(
            'delete from "%s" where rowid not in (select min(rowid) from "%s" group by "%s");'
            % (nome, nome, config['uniqueid_field']))
        del memgeopkg

    #write from memory to disk
    #print("Exporting from memory to disk: %s." % gdaloutputfile)
    #we can maybe optimize write performance using PRAGMA directives from SQLite
    #geopkg = driver.CreateDataSource(gdaloutputfile)
    #geopkg.ExecuteSQL("PRAGMA main.page_size = 4096;");
    #geopkg.ExecuteSQL("PRAGMA main.cache_size=10000;");
    #geopkg.ExecuteSQL("PRAGMA main.locking_mode=EXCLUSIVE;");
    #geopkg.ExecuteSQL("PRAGMA main.synchronous=NORMAL;");
    #geopkg.ExecuteSQL("PRAGMA main.journal_mode=WAL;");
    #geopkg.ExecuteSQL("PRAGMA main.cache_size=5000;");

    print("Configuring gdal to optimize GeoPackage write performance...")
    gdal.SetConfigOption(
        'OGR_SQLITE_PRAGMA',
        'main.page_size=4096,main.cache_size=10000,main.cache_size=5000')
    print("Writing to disk... %s" % gdaloutputfile)
    ds = gdal.VectorTranslate(gdaloutputfile,
                              srcDS=outmemfile,
                              options=ogrOptions)
    print("Creating spatial index...")
    layer = ds.GetLayer(0)
    layerName = layer.GetName()
    print("layerName: %s" % layerName)
    geomFieldName = layer.GetLayerDefn().GetGeomFieldDefn(0).GetName()
    print("geom name: %s" % geomFieldName)
    ds.ExecuteSQL("SELECT CreateSpatialIndex('%s','%s')" %
                  (layerName, geomFieldName))
    del ds