Ejemplo n.º 1
0
def plmosaic_21():

    if gdaltest.plmosaic_drv is None:
        return 'skip'

    gdal.SetConfigOption('PL_URL', '/vsimem/root')
    ds = gdal.OpenEx('PLMosaic:',
                     gdal.OF_RASTER,
                     open_options=[
                         'API_KEY=foo', 'MOSAIC=my_mosaic', 'CACHE_PATH=',
                         'USE_TILES=YES'
                     ])
    gdal.SetConfigOption('PL_URL', None)

    gdal.ErrorReset()
    gdal.PushErrorHandler()
    ds.ReadRaster(256, 512, 1, 1)
    gdal.PopErrorHandler()
    if gdal.GetLastErrorMsg() == '':
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.ErrorReset()
    gdal.PushErrorHandler()
    ds.GetRasterBand(1).ReadRaster(256, 512, 1, 1)
    gdal.PopErrorHandler()
    if gdal.GetLastErrorMsg() == '':
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.ErrorReset()
    gdal.PushErrorHandler()
    ds.GetRasterBand(1).ReadBlock(1, 2)
    gdal.PopErrorHandler()
    if gdal.GetLastErrorMsg() == '':
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.FileFromMemBuffer(
        '/vsimem/root/?name__is=mosaic_uint16', """{
"mosaics": [{
    "id": "mosaic_uint16",
    "name": "mosaic_uint16",
    "coordinate_system": "EPSG:3857",
    "datatype": "uint16",
    "grid": {
        "quad_size": 4096,
        "resolution": 4.77731426716
    },
    "first_acquired": "first_date",
    "last_acquired": "last_date",
    "_links" : {
        "tiles" : "/vsimem/root/mosaic_uint16/tiles{0-3}/{z}/{x}/{y}.png"
    },
    "quad_download": true
}]
}""")

    # Should emit a warning
    gdal.ErrorReset()
    gdal.PushErrorHandler()
    gdal.SetConfigOption('PL_URL', '/vsimem/root')
    ds = gdal.OpenEx('PLMosaic:',
                     gdal.OF_RASTER,
                     open_options=[
                         'API_KEY=foo', 'MOSAIC=mosaic_uint16', 'CACHE_PATH=',
                         'USE_TILES=YES'
                     ])
    gdal.SetConfigOption('PL_URL', None)
    gdal.PopErrorHandler()
    if gdal.GetLastErrorMsg().find(
            'Cannot use tile API for full resolution data on non Byte mosaic'
    ) < 0:
        gdaltest.post_reason('fail')
        print(gdal.GetLastErrorMsg())
        return 'fail'

    gdal.FileFromMemBuffer(
        '/vsimem/root/?name__is=mosaic_without_tiles', """{
"mosaics": [{
    "id": "mosaic_without_tiles",
    "name": "mosaic_without_tiles",
    "coordinate_system": "EPSG:3857",
    "datatype": "byte",
    "grid": {
        "quad_size": 4096,
        "resolution": 4.77731426716
    },
    "first_acquired": "first_date",
    "last_acquired": "last_date",
    "quad_download": true
}]
}""")

    # Should emit a warning
    gdal.ErrorReset()
    gdal.PushErrorHandler()
    gdal.SetConfigOption('PL_URL', '/vsimem/root')
    ds = gdal.OpenEx('PLMosaic:',
                     gdal.OF_RASTER,
                     open_options=[
                         'API_KEY=foo', 'MOSAIC=mosaic_without_tiles',
                         'CACHE_PATH=', 'USE_TILES=YES'
                     ])
    gdal.SetConfigOption('PL_URL', None)
    gdal.PopErrorHandler()
    if gdal.GetLastErrorMsg().find(
            'Cannot find tile definition, so use_tiles will be ignored') < 0:
        gdaltest.post_reason('fail')
        print(gdal.GetLastErrorMsg())
        return 'fail'

    return 'success'
Ejemplo n.º 2
0
    def test_check_geometries(self):
        """DelineateIt: Check that we can reasonably repair geometries."""
        from natcap.invest import delineateit
        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84/UTM zone 31s

        dem_matrix = numpy.array(
            [[0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0],
             [0, 1, 1, 1, 1, 1],
             [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0]], dtype=numpy.int8)
        dem_raster_path = os.path.join(self.workspace_dir, 'dem.tif')
        pygeoprocessing.testing.create_raster_on_disk(
            [dem_matrix],
            origin=(2, -2),
            pixel_size=(2, -2),
            projection_wkt=srs.ExportToWkt(),
            nodata=255,  # byte datatype
            filename=dem_raster_path)

        # empty geometry
        invalid_geometry = ogr.CreateGeometryFromWkt('POLYGON EMPTY')
        self.assertTrue(invalid_geometry.IsEmpty())

        # point outside of the DEM bbox
        invalid_point = ogr.CreateGeometryFromWkt('POINT (-100 -100)')

        # line intersects the DEM but is not contained by it
        valid_line = ogr.CreateGeometryFromWkt(
            'LINESTRING (-100 100, 100 -100)')

        # invalid polygon coult fixed by buffering by 0
        invalid_bowtie_polygon = ogr.CreateGeometryFromWkt(
            'POLYGON ((2 -2, 6 -2, 2 -6, 6 -6, 2 -2))')
        self.assertFalse(invalid_bowtie_polygon.IsValid())

        # Bowtie polygon with vertex in the middle, could be fixed
        # by buffering by 0
        invalid_alt_bowtie_polygon = ogr.CreateGeometryFromWkt(
            'POLYGON ((2 -2, 6 -2, 4 -4, 6 -6, 2 -6, 4 -4, 2 -2))')
        self.assertFalse(invalid_alt_bowtie_polygon.IsValid())

        # invalid polygon could be fixed by closing rings
        invalid_open_ring_polygon = ogr.CreateGeometryFromWkt(
            'POLYGON ((2 -2, 6 -2, 6 -6, 2 -6))')
        self.assertFalse(invalid_open_ring_polygon.IsValid())

        gpkg_driver = gdal.GetDriverByName('GPKG')
        outflow_vector_path = os.path.join(self.workspace_dir, 'vector.gpkg')
        outflow_vector = gpkg_driver.Create(
            outflow_vector_path, 0, 0, 0, gdal.GDT_Unknown)
        outflow_layer = outflow_vector.CreateLayer(
            'outflow_layer', srs, ogr.wkbUnknown)
        outflow_layer.CreateField(ogr.FieldDefn('geom_id', ogr.OFTInteger))

        outflow_layer.StartTransaction()
        for index, geometry in enumerate((invalid_geometry,
                                          invalid_point,
                                          valid_line,
                                          invalid_bowtie_polygon,
                                          invalid_alt_bowtie_polygon,
                                          invalid_open_ring_polygon)):
            if geometry is None:
                self.fail('Geometry could not be created')

            outflow_feature = ogr.Feature(outflow_layer.GetLayerDefn())
            outflow_feature.SetField('geom_id', index)
            outflow_feature.SetGeometry(geometry)
            outflow_layer.CreateFeature(outflow_feature)
        outflow_layer.CommitTransaction()

        self.assertEquals(outflow_layer.GetFeatureCount(), 6)
        outflow_layer = None
        outflow_vector = None

        target_vector_path = os.path.join(self.workspace_dir, 'checked_geometries.gpkg')
        with self.assertRaises(ValueError) as cm:
            delineateit.check_geometries(
                outflow_vector_path, dem_raster_path, target_vector_path,
                skip_invalid_geometry=False
            )
        self.assertTrue('is invalid' in str(cm.exception))

        delineateit.check_geometries(
            outflow_vector_path, dem_raster_path, target_vector_path,
            skip_invalid_geometry=True
        )

        # I only expect to see 1 feature in the output layer, as there's only 1
        # valid geometry.
        expected_geom_areas = {
            2: 0,
        }

        target_vector = gdal.OpenEx(target_vector_path, gdal.OF_VECTOR)
        target_layer = target_vector.GetLayer()
        self.assertEqual(target_layer.GetFeatureCount(), len(expected_geom_areas))

        for feature in target_layer:
            geom = feature.GetGeometryRef()
            self.assertAlmostEqual(
                geom.Area(), expected_geom_areas[feature.GetField('geom_id')])

        target_layer = None
        target_vector = None
Ejemplo n.º 3
0
def process(argv, progress=None, progress_arg=None):

    if len(argv) == 0:
        return Usage()

    dst_filename = None
    output_format = None
    src_datasets = []
    overwrite_ds = False
    overwrite_layer = False
    update = False
    append = False
    single_layer = False
    layer_name_template = None
    skip_failures = False
    src_geom_types = []
    field_strategy = None
    src_layer_field_name = None
    src_layer_field_content = None
    a_srs = None
    s_srs = None
    t_srs = None
    dsco = []
    lco = []

    i = 0
    while i < len(argv):
        arg = argv[i]
        if (arg == '-f' or arg == '-of') and i + 1 < len(argv):
            i = i + 1
            output_format = argv[i]
        elif arg == '-o' and i + 1 < len(argv):
            i = i + 1
            dst_filename = argv[i]
        elif arg == '-progress':
            progress = ogr.TermProgress_nocb
            progress_arg = None
        elif arg == '-q' or arg == '-quiet':
            pass
        elif arg[0:5] == '-skip':
            skip_failures = True
        elif arg == '-update':
            update = True
        elif arg == '-overwrite_ds':
            overwrite_ds = True
        elif arg == '-overwrite_layer':
            overwrite_layer = True
            update = True
        elif arg == '-append':
            append = True
            update = True
        elif arg == '-single':
            single_layer = True
        elif arg == '-a_srs' and i + 1 < len(argv):
            i = i + 1
            a_srs = argv[i]
        elif arg == '-s_srs' and i + 1 < len(argv):
            i = i + 1
            s_srs = argv[i]
        elif arg == '-t_srs' and i + 1 < len(argv):
            i = i + 1
            t_srs = argv[i]
        elif arg == '-nln' and i + 1 < len(argv):
            i = i + 1
            layer_name_template = argv[i]
        elif arg == '-field_strategy' and i + 1 < len(argv):
            i = i + 1
            field_strategy = argv[i]
        elif arg == '-src_layer_field_name' and i + 1 < len(argv):
            i = i + 1
            src_layer_field_name = argv[i]
        elif arg == '-src_layer_field_content' and i + 1 < len(argv):
            i = i + 1
            src_layer_field_content = argv[i]
        elif arg == '-dsco' and i + 1 < len(argv):
            i = i + 1
            dsco.append(argv[i])
        elif arg == '-lco' and i + 1 < len(argv):
            i = i + 1
            lco.append(argv[i])
        elif arg == '-src_geom_type' and i + 1 < len(argv):
            i = i + 1
            src_geom_type_names = argv[i].split(',')
            for src_geom_type_name in src_geom_type_names:
                src_geom_type = _GetGeomType(src_geom_type_name)
                if src_geom_type is None:
                    print('ERROR: Unrecognized geometry type: %s' %
                          src_geom_type_name)
                    return 1
                src_geom_types.append(src_geom_type)
        elif arg[0] == '-':
            print('ERROR: Unrecognized argument : %s' % arg)
            return Usage()
        else:
            src_datasets.append(arg)
        i = i + 1

    if dst_filename is None:
        print('Missing -o')
        return 1

    if update:
        if output_format is not None:
            print('ERROR: -f incompatible with -update')
            return 1
        if len(dsco) != 0:
            print('ERROR: -dsco incompatible with -update')
            return 1
        output_format = ''
    else:
        if output_format is None:
            output_format = 'ESRI Shapefile'

    if src_layer_field_content is None:
        src_layer_field_content = '{AUTO_NAME}'
    elif src_layer_field_name is None:
        src_layer_field_name = 'source_ds_lyr'

    if not single_layer and output_format == 'ESRI Shapefile' and \
       dst_filename.lower().endswith('.shp'):
        print('ERROR: Non-single layer mode incompatible with non-directory '
              'shapefile output')
        return 1

    if len(src_datasets) == 0:
        print('ERROR: No source datasets')
        return 1

    if layer_name_template is None:
        if single_layer:
            layer_name_template = 'merged'
        else:
            layer_name_template = '{AUTO_NAME}'

    vrt_filename = None
    if not EQUAL(output_format, 'VRT'):
        dst_ds = gdal.OpenEx(dst_filename, gdal.OF_VECTOR | gdal.OF_UPDATE)
        if dst_ds is not None:
            if not update and not overwrite_ds:
                print('ERROR: Destination dataset already exists, ' +
                      'but -update nor -overwrite_ds are specified')
                return 1
            if overwrite_ds:
                drv = dst_ds.GetDriver()
                dst_ds = None
                if drv.GetDescription() == 'OGR_VRT':
                    # We don't want to destroy the sources of the VRT
                    gdal.Unlink(dst_filename)
                else:
                    drv.Delete(dst_filename)
        elif update:
            print('ERROR: Destination dataset does not exist')
            return 1
        if dst_ds is None:
            drv = gdal.GetDriverByName(output_format)
            if drv is None:
                print('ERROR: Invalid driver: %s' % output_format)
                return 1
            dst_ds = drv.Create(dst_filename, 0, 0, 0, gdal.GDT_Unknown, dsco)
            if dst_ds is None:
                return 1

        vrt_filename = '/vsimem/_ogrmerge_.vrt'
    else:
        vrt_filename = dst_filename

    f = gdal.VSIFOpenL(vrt_filename, 'wb')
    if f is None:
        print('ERROR: Cannot create %s' % vrt_filename)
        return 1

    writer = XMLWriter(f)
    writer.open_element('OGRVRTDataSource')

    if single_layer:

        ogr_vrt_union_layer_written = False

        for src_ds_idx, src_dsname in enumerate(src_datasets):
            src_ds = ogr.Open(src_dsname)
            if src_ds is None:
                print('ERROR: Cannot open %s' % src_dsname)
                if skip_failures:
                    continue
                gdal.VSIFCloseL(f)
                gdal.Unlink(vrt_filename)
                return 1
            for src_lyr_idx, src_lyr in enumerate(src_ds):
                if len(src_geom_types) != 0:
                    gt = ogr.GT_Flatten(src_lyr.GetGeomType())
                    if gt not in src_geom_types:
                        continue

                if not ogr_vrt_union_layer_written:
                    ogr_vrt_union_layer_written = True
                    writer.open_element('OGRVRTUnionLayer',
                                        attrs={'name': layer_name_template})

                    if src_layer_field_name is not None:
                        writer.write_element_value('SourceLayerFieldName',
                                                   src_layer_field_name)

                    if field_strategy is not None:
                        writer.write_element_value('FieldStrategy',
                                                   field_strategy)

                layer_name = src_layer_field_content

                basename = None
                if os.path.exists(src_dsname):
                    basename = os.path.basename(src_dsname)
                    if basename.find('.') >= 0:
                        basename = '.'.join(basename.split(".")[0:-1])

                if basename == src_lyr.GetName():
                    layer_name = layer_name.replace('{AUTO_NAME}', basename)
                elif basename is None:
                    layer_name = layer_name.replace(
                        '{AUTO_NAME}',
                        'Dataset%d_%s' % (src_ds_idx, src_lyr.GetName()))
                else:
                    layer_name = layer_name.replace(
                        '{AUTO_NAME}', basename + '_' + src_lyr.GetName())

                if basename is not None:
                    layer_name = layer_name.replace('{DS_BASENAME}', basename)
                else:
                    layer_name = layer_name.replace('{DS_BASENAME}',
                                                    src_dsname)
                layer_name = layer_name.replace('{DS_NAME}', '%s' % src_dsname)
                layer_name = layer_name.replace('{DS_INDEX}',
                                                '%d' % src_ds_idx)
                layer_name = layer_name.replace('{LAYER_NAME}',
                                                src_lyr.GetName())
                layer_name = layer_name.replace('{LAYER_INDEX}',
                                                '%d' % src_lyr_idx)

                if t_srs is not None:
                    writer.open_element('OGRVRTWarpedLayer')

                writer.open_element('OGRVRTLayer', attrs={'name': layer_name})
                attrs = None
                if EQUAL(output_format, 'VRT') and \
                   os.path.exists(src_dsname) and \
                   not os.path.isabs(src_dsname) and \
                   vrt_filename.find('/') < 0 and \
                   vrt_filename.find('\\') < 0:
                    attrs = {'relativeToVRT': '1'}
                writer.write_element_value('SrcDataSource',
                                           src_dsname,
                                           attrs=attrs)
                writer.write_element_value('SrcLayer', src_lyr.GetName())

                if a_srs is not None:
                    writer.write_element_value('LayerSRS', a_srs)

                writer.close_element('OGRVRTLayer')

                if t_srs is not None:
                    if s_srs is not None:
                        writer.write_element_value('SrcSRS', s_srs)

                    writer.write_element_value('TargetSRS', t_srs)

                    writer.close_element('OGRVRTWarpedLayer')

        if ogr_vrt_union_layer_written:
            writer.close_element('OGRVRTUnionLayer')

    else:

        for src_ds_idx, src_dsname in enumerate(src_datasets):
            src_ds = ogr.Open(src_dsname)
            if src_ds is None:
                print('ERROR: Cannot open %s' % src_dsname)
                if skip_failures:
                    continue
                gdal.VSIFCloseL(f)
                gdal.Unlink(vrt_filename)
                return 1
            for src_lyr_idx, src_lyr in enumerate(src_ds):
                if len(src_geom_types) != 0:
                    gt = ogr.GT_Flatten(src_lyr.GetGeomType())
                    if gt not in src_geom_types:
                        continue

                layer_name = layer_name_template
                basename = None
                if os.path.exists(src_dsname):
                    basename = os.path.basename(src_dsname)
                    if basename.find('.') >= 0:
                        basename = '.'.join(basename.split(".")[0:-1])

                if basename == src_lyr.GetName():
                    layer_name = layer_name.replace('{AUTO_NAME}', basename)
                elif basename is None:
                    layer_name = layer_name.replace(
                        '{AUTO_NAME}',
                        'Dataset%d_%s' % (src_ds_idx, src_lyr.GetName()))
                else:
                    layer_name = layer_name.replace(
                        '{AUTO_NAME}', basename + '_' + src_lyr.GetName())

                if basename is not None:
                    layer_name = layer_name.replace('{DS_BASENAME}', basename)
                elif layer_name.find('{DS_BASENAME}') >= 0:
                    if skip_failures:
                        if layer_name.find('{DS_INDEX}') < 0:
                            layer_name = layer_name.replace(
                                '{DS_BASENAME}', 'Dataset%d' % src_ds_idx)
                    else:
                        print('ERROR: Layer name template %s '
                              'includes {DS_BASENAME} '
                              'but %s is not a file' %
                              (layer_name_template, src_dsname))

                        gdal.VSIFCloseL(f)
                        gdal.Unlink(vrt_filename)
                        return 1
                layer_name = layer_name.replace('{DS_NAME}', '%s' % src_dsname)
                layer_name = layer_name.replace('{DS_INDEX}',
                                                '%d' % src_ds_idx)
                layer_name = layer_name.replace('{LAYER_NAME}',
                                                src_lyr.GetName())
                layer_name = layer_name.replace('{LAYER_INDEX}',
                                                '%d' % src_lyr_idx)

                if t_srs is not None:
                    writer.open_element('OGRVRTWarpedLayer')

                writer.open_element('OGRVRTLayer', attrs={'name': layer_name})
                attrs = None
                if EQUAL(output_format, 'VRT') and \
                   os.path.exists(src_dsname) and \
                   not os.path.isabs(src_dsname) and \
                   vrt_filename.find('/') < 0 and \
                   vrt_filename.find('\\') < 0:
                    attrs = {'relativeToVRT': '1'}
                writer.write_element_value('SrcDataSource',
                                           src_dsname,
                                           attrs=attrs)
                writer.write_element_value('SrcLayer', src_lyr.GetName())

                if a_srs is not None:
                    writer.write_element_value('LayerSRS', a_srs)

                writer.close_element('OGRVRTLayer')

                if t_srs is not None:
                    if s_srs is not None:
                        writer.write_element_value('SrcSRS', s_srs)

                    writer.write_element_value('TargetSRS', t_srs)

                    writer.close_element('OGRVRTWarpedLayer')

    writer.close_element('OGRVRTDataSource')

    gdal.VSIFCloseL(f)

    ret = 0
    if not EQUAL(output_format, 'VRT'):
        accessMode = None
        if append:
            accessMode = 'append'
        elif overwrite_layer:
            accessMode = 'overwrite'
        ret = gdal.VectorTranslate(dst_ds,
                                   vrt_filename,
                                   accessMode=accessMode,
                                   layerCreationOptions=lco,
                                   skipFailures=skip_failures,
                                   callback=progress,
                                   callback_data=progress_arg)
        if ret == 1:
            ret = 0
        else:
            ret = 1
        gdal.Unlink(vrt_filename)

    return ret
Ejemplo n.º 4
0
def execute(args):
    """Habitat Quality.

    Open files necessary for the portion of the habitat_quality
    model.

    Parameters:
        workspace_dir (string): a path to the directory that will write output
            and other temporary files (required)
        lulc_cur_path (string): a path to an input land use/land cover raster
            (required)
        lulc_fut_path (string): a path to an input land use/land cover raster
            (optional)
        lulc_bas_path (string): a path to an input land use/land cover raster
            (optional, but required for rarity calculations)
        threat_raster_folder (string): a path to the directory that will
            contain all threat rasters (required)
        threats_table_path (string): a path to an input CSV containing data
            of all the considered threats. Each row is a degradation source
            and each column a different attribute of the source with the
            following names: 'THREAT','MAX_DIST','WEIGHT' (required).
        access_vector_path (string): a path to an input polygon shapefile
            containing data on the relative protection against threats (optional)
        sensitivity_table_path (string): a path to an input CSV file of LULC
            types, whether they are considered habitat, and their sensitivity
            to each threat (required)
        half_saturation_constant (float): a python float that determines
            the spread and central tendency of habitat quality scores
            (required)
        results_suffix (string): a python string that will be inserted into all
            raster path paths just before the file extension.

    Example Args Dictionary::

        {
            'workspace_dir': 'path/to/workspace_dir',
            'lulc_cur_path': 'path/to/lulc_cur_raster',
            'lulc_fut_path': 'path/to/lulc_fut_raster',
            'lulc_bas_path': 'path/to/lulc_bas_raster',
            'threat_raster_folder': 'path/to/threat_rasters/',
            'threats_table_path': 'path/to/threats_csv',
            'access_vector_path': 'path/to/access_shapefile',
            'sensitivity_table_path': 'path/to/sensitivity_csv',
            'half_saturation_constant': 0.5,
            'suffix': '_results',
        }

    Returns:
        None
    """
    workspace = args['workspace_dir']

    # Append a _ to the suffix if it's not empty and doesn't already have one
    suffix = utils.make_suffix_string(args, 'results_suffix')

    # Check to see if each of the workspace folders exists.  If not, create the
    # folder in the filesystem.
    inter_dir = os.path.join(workspace, 'intermediate')
    out_dir = os.path.join(workspace, 'output')
    kernel_dir = os.path.join(inter_dir, 'kernels')
    utils.make_directories([inter_dir, out_dir, kernel_dir])

    # get a handle on the folder with the threat rasters
    threat_raster_dir = args['threat_raster_folder']

    # Ensure the key is a string.
    threat_dict = {
        str(key): value for key, value in utils.build_lookup_from_csv(
            args['threats_table_path'], 'THREAT', to_lower=False).items()}
    sensitivity_dict = utils.build_lookup_from_csv(
        args['sensitivity_table_path'], 'LULC', to_lower=False)

    # check that the required headers exist in the sensitivity table.
    # Raise exception if they don't.
    sens_header_list = list(sensitivity_dict.values())[0]
    required_sens_header_list = ['LULC', 'NAME', 'HABITAT']
    missing_sens_header_list = [
        h for h in required_sens_header_list if h not in sens_header_list]
    if missing_sens_header_list:
        raise ValueError(
            'Column(s) %s are missing in the sensitivity table' %
            (', '.join(missing_sens_header_list)))

    # check that the threat names in the threats table match with the threats
    # columns in the sensitivity table. Raise exception if they don't.
    for threat in threat_dict:
        if 'L_' + threat not in sens_header_list:
            missing_threat_header_list = (
                set(sens_header_list) - set(required_sens_header_list))
            raise ValueError(
                'Threat "%s" does not match any column in the sensitivity '
                'table. Possible columns: %s' %
                (threat, missing_threat_header_list))

    # get the half saturation constant
    try:
        half_saturation = float(args['half_saturation_constant'])
    except ValueError:
        raise ValueError('Half-saturation constant is not a numeric number.'
                         'It is: %s' % args['half_saturation_constant'])

    # declare dictionaries to store the land cover and the threat rasters
    # pertaining to the different threats
    lulc_path_dict = {}
    threat_path_dict = {}
    # also store land cover and threat rasters in a list
    lulc_and_threat_raster_list = []
    aligned_raster_list = []
    # declare a set to store unique codes from lulc rasters
    raster_unique_lucodes = set()

    # compile all the threat rasters associated with the land cover
    for lulc_key, lulc_args in (('_c', 'lulc_cur_path'),
                                ('_f', 'lulc_fut_path'),
                                ('_b', 'lulc_bas_path')):
        if lulc_args in args:
            lulc_path = args[lulc_args]
            lulc_path_dict[lulc_key] = lulc_path
            # save land cover paths in a list for alignment and resize
            lulc_and_threat_raster_list.append(lulc_path)
            aligned_raster_list.append(
                os.path.join(
                    inter_dir, os.path.basename(lulc_path).replace(
                        '.tif', '_aligned.tif')))

            # save unique codes to check if it's missing in sensitivity table
            for _, lulc_block in pygeoprocessing.iterblocks((lulc_path, 1)):
                raster_unique_lucodes.update(numpy.unique(lulc_block))

            # Remove the nodata value from the set of landuser codes.
            nodata = pygeoprocessing.get_raster_info(lulc_path)['nodata'][0]
            try:
                raster_unique_lucodes.remove(nodata)
            except KeyError:
                # KeyError when the nodata value was not encountered in the
                # raster's pixel values.  Same result when nodata value is
                # None.
                pass

            # add a key to the threat dictionary that associates all threat
            # rasters with this land cover
            threat_path_dict['threat' + lulc_key] = {}

            # for each threat given in the CSV file try opening the associated
            # raster which should be found in threat_raster_folder
            for threat in threat_dict:
                # it's okay to have no threat raster for baseline scenario
                threat_path_dict['threat' + lulc_key][threat] = (
                    resolve_ambiguous_raster_path(
                        os.path.join(threat_raster_dir, threat + lulc_key),
                        raise_error=(lulc_key != '_b')))

                # save threat paths in a list for alignment and resize
                threat_path = threat_path_dict['threat' + lulc_key][threat]
                if threat_path:
                    lulc_and_threat_raster_list.append(threat_path)
                    aligned_raster_list.append(
                        os.path.join(
                            inter_dir, os.path.basename(lulc_path).replace(
                                '.tif', '_aligned.tif')))
    # check if there's any lucode from the LULC rasters missing in the
    # sensitivity table
    table_unique_lucodes = set(sensitivity_dict.keys())
    missing_lucodes = raster_unique_lucodes.difference(table_unique_lucodes)
    if missing_lucodes:
        raise ValueError(
            'The following land cover codes were found in your landcover rasters '
            'but not in your sensitivity table. Check your sensitivity table '
            'to see if they are missing: %s. \n\n' %
            ', '.join([str(x) for x in sorted(missing_lucodes)]))

    # Align and resize all the land cover and threat rasters,
    # and tore them in the intermediate folder
    LOGGER.info('Starting aligning and resizing land cover and threat rasters')

    lulc_raster_info = pygeoprocessing.get_raster_info(args['lulc_cur_path'])
    lulc_pixel_size = lulc_raster_info['pixel_size']
    lulc_bbox = lulc_raster_info['bounding_box']

    aligned_raster_list = [
        os.path.join(inter_dir, os.path.basename(path).replace(
            '.tif', '_aligned.tif')) for path in lulc_and_threat_raster_list]

    pygeoprocessing.align_and_resize_raster_stack(
        lulc_and_threat_raster_list, aligned_raster_list,
        ['near']*len(lulc_and_threat_raster_list), lulc_pixel_size,
        lulc_bbox)

    LOGGER.info('Finished aligning and resizing land cover and threat rasters')

    # Modify paths in lulc_path_dict and threat_path_dict to be aligned rasters
    for lulc_key, lulc_path in lulc_path_dict.items():
        lulc_path_dict[lulc_key] = os.path.join(
            inter_dir, os.path.basename(lulc_path).replace(
                '.tif', '_aligned.tif'))
        for threat in threat_dict:
            threat_path = threat_path_dict['threat' + lulc_key][threat]
            if threat_path in lulc_and_threat_raster_list:
                aligned_threat_path = os.path.join(
                    inter_dir, os.path.basename(threat_path).replace(
                        '.tif', '_aligned.tif'))
                threat_path_dict['threat' + lulc_key][threat] = (
                    aligned_threat_path)

                # Iterate though the threat raster and update pixel values
                # as needed so that:
                #  * Nodata values are replaced with 0
                #  * Anything other than 0 or nodata is replaced with 1
                LOGGER.info('Preprocessing threat values for %s',
                            aligned_threat_path)
                threat_nodata = pygeoprocessing.get_raster_info(
                    aligned_threat_path)['nodata'][0]
                threat_raster = gdal.OpenEx(aligned_threat_path,
                                            gdal.OF_RASTER | gdal.GA_Update)
                threat_band = threat_raster.GetRasterBand(1)
                for block_offset in pygeoprocessing.iterblocks(
                        (aligned_threat_path, 1), offset_only=True):
                    block = threat_band.ReadAsArray(**block_offset)

                    # First check if we actually need to set anything.
                    # No need to perform unnecessary writes!
                    if set(numpy.unique(block)) == set([0, 1]):
                        continue

                    zero_threat = numpy.isclose(block, threat_nodata)
                    block[zero_threat] = 0
                    block[~numpy.isclose(block, 0)] = 1

                    threat_band.WriteArray(
                        block, yoff=block_offset['yoff'],
                        xoff=block_offset['xoff'])
                threat_band = None
                threat_raster = None

    LOGGER.info('Starting habitat_quality biophysical calculations')

    # Rasterize access vector, if value is null set to 1 (fully accessible),
    # else set to the value according to the ACCESS attribute
    cur_lulc_path = lulc_path_dict['_c']
    fill_value = 1.0
    try:
        LOGGER.info('Handling Access Shape')
        access_raster_path = os.path.join(
            inter_dir, 'access_layer%s.tif' % suffix)
        # create a new raster based on the raster info of current land cover
        pygeoprocessing.new_raster_from_base(
            cur_lulc_path, access_raster_path, gdal.GDT_Float32,
            [_OUT_NODATA], fill_value_list=[fill_value])
        pygeoprocessing.rasterize(
            args['access_vector_path'], access_raster_path, burn_values=None,
            option_list=['ATTRIBUTE=ACCESS'])

    except KeyError:
        LOGGER.info(
            'No Access Shape Provided, access raster filled with 1s.')

    # calculate the weight sum which is the sum of all the threats' weights
    weight_sum = 0.0
    for threat_data in threat_dict.values():
        # Sum weight of threats
        weight_sum = weight_sum + threat_data['WEIGHT']

    LOGGER.debug('lulc_path_dict : %s', lulc_path_dict)

    # for each land cover raster provided compute habitat quality
    for lulc_key, lulc_path in lulc_path_dict.items():
        LOGGER.info('Calculating habitat quality for landuse: %s', lulc_path)

        # Create raster of habitat based on habitat field
        habitat_raster_path = os.path.join(
            inter_dir, 'habitat%s%s.tif' % (lulc_key, suffix))
        map_raster_to_dict_values(
            lulc_path, habitat_raster_path, sensitivity_dict, 'HABITAT',
            _OUT_NODATA, values_required=False)

        # initialize a list that will store all the threat/threat rasters
        # after they have been adjusted for distance, weight, and access
        deg_raster_list = []

        # a list to keep track of the normalized weight for each threat
        weight_list = numpy.array([])

        # variable to indicate whether we should break out of calculations
        # for a land cover because a threat raster was not found
        exit_landcover = False

        # adjust each threat/threat raster for distance, weight, and access
        for threat, threat_data in threat_dict.items():
            LOGGER.info('Calculating threat: %s.\nThreat data: %s' %
                        (threat, threat_data))

            # get the threat raster for the specific threat
            threat_raster_path = threat_path_dict['threat' + lulc_key][threat]
            LOGGER.info('threat_raster_path %s', threat_raster_path)
            if threat_raster_path is None:
                LOGGER.info(
                    'The threat raster for %s could not be found for the land '
                    'cover %s. Skipping Habitat Quality calculation for this '
                    'land cover.' % (threat, lulc_key))
                exit_landcover = True
                break

            # need the pixel size for the threat raster so we can create
            # an appropriate kernel for convolution
            threat_pixel_size = pygeoprocessing.get_raster_info(
                threat_raster_path)['pixel_size']
            # pixel size tuple could have negative value
            mean_threat_pixel_size = (
                abs(threat_pixel_size[0]) + abs(threat_pixel_size[1]))/2.0

            # convert max distance (given in KM) to meters
            max_dist_m = threat_data['MAX_DIST'] * 1000.0

            # convert max distance from meters to the number of pixels that
            # represents on the raster
            max_dist_pixel = max_dist_m / mean_threat_pixel_size
            LOGGER.debug('Max distance in pixels: %f', max_dist_pixel)

            # blur the threat raster based on the effect of the threat over
            # distance
            decay_type = threat_data['DECAY']
            kernel_path = os.path.join(
                kernel_dir, 'kernel_%s%s%s.tif' % (threat, lulc_key, suffix))
            if decay_type == 'linear':
                make_linear_decay_kernel_path(max_dist_pixel, kernel_path)
            elif decay_type == 'exponential':
                utils.exponential_decay_kernel_raster(
                    max_dist_pixel, kernel_path)
            else:
                raise ValueError(
                    "Unknown type of decay in biophysical table, should be "
                    "either 'linear' or 'exponential'. Input was %s for threat"
                    " %s." % (decay_type, threat))

            filtered_threat_raster_path = os.path.join(
                inter_dir, 'filtered_%s%s%s.tif' % (threat, lulc_key, suffix))
            pygeoprocessing.convolve_2d(
                (threat_raster_path, 1), (kernel_path, 1),
                filtered_threat_raster_path,
                ignore_nodata=True)

            # create sensitivity raster based on threat
            sens_raster_path = os.path.join(
                inter_dir, 'sens_%s%s%s.tif' % (threat, lulc_key, suffix))
            map_raster_to_dict_values(
                lulc_path, sens_raster_path, sensitivity_dict, 'L_' + threat,
                _OUT_NODATA, values_required=True)

            # get the normalized weight for each threat
            weight_avg = threat_data['WEIGHT'] / weight_sum

            # add the threat raster adjusted by distance and the raster
            # representing sensitivity to the list to be past to
            # vectorized_rasters below
            deg_raster_list.append(filtered_threat_raster_path)
            deg_raster_list.append(sens_raster_path)

            # store the normalized weight for each threat in a list that
            # will be used below in total_degradation
            weight_list = numpy.append(weight_list, weight_avg)

        # check to see if we got here because a threat raster was missing
        # and if so then we want to skip to the next landcover
        if exit_landcover:
            continue

        def total_degradation(*raster):
            """A vectorized function that computes the degradation value for
                each pixel based on each threat and then sums them together

                *rasters - a list of floats depicting the adjusted threat
                    value per pixel based on distance and sensitivity.
                    The values are in pairs so that the values for each threat
                    can be tracked:
                    [filtered_val_threat1, sens_val_threat1,
                     filtered_val_threat2, sens_val_threat2, ...]
                    There is an optional last value in the list which is the
                    access_raster value, but it is only present if
                    access_raster is not None.

                returns - the total degradation score for the pixel"""

            # we can not be certain how many threats the user will enter,
            # so we handle each filtered threat and sensitivity raster
            # in pairs
            sum_degradation = numpy.zeros(raster[0].shape)
            for index in range(len(raster) // 2):
                step = index * 2
                sum_degradation += (
                    raster[step] * raster[step + 1] * weight_list[index])

            nodata_mask = numpy.empty(raster[0].shape, dtype=numpy.int8)
            nodata_mask[:] = 0
            for array in raster:
                nodata_mask = nodata_mask | (array == _OUT_NODATA)

            # the last element in raster is access
            return numpy.where(
                    nodata_mask, _OUT_NODATA, sum_degradation * raster[-1])

        # add the access_raster onto the end of the collected raster list. The
        # access_raster will be values from the shapefile if provided or a
        # raster filled with all 1's if not
        deg_raster_list.append(access_raster_path)

        deg_sum_raster_path = os.path.join(
            out_dir, 'deg_sum' + lulc_key + suffix + '.tif')

        LOGGER.info('Starting raster calculation on total_degradation')

        deg_raster_band_list = [(path, 1) for path in deg_raster_list]
        pygeoprocessing.raster_calculator(
            deg_raster_band_list, total_degradation,
            deg_sum_raster_path, gdal.GDT_Float32, _OUT_NODATA)

        LOGGER.info('Finished raster calculation on total_degradation')

        # Compute habitat quality
        # ksq: a term used below to compute habitat quality
        ksq = half_saturation**_SCALING_PARAM

        def quality_op(degradation, habitat):
            """Vectorized function that computes habitat quality given
                a degradation and habitat value.

                degradation - a float from the created degradation
                    raster above.
                habitat - a float indicating habitat suitability from
                    from the habitat raster created above.

                returns - a float representing the habitat quality
                    score for a pixel
            """
            degredataion_clamped = numpy.where(degradation < 0, 0, degradation)

            return numpy.where(
                    (degradation == _OUT_NODATA) | (habitat == _OUT_NODATA),
                    _OUT_NODATA,
                    (habitat * (1.0 - ((degredataion_clamped**_SCALING_PARAM) /
                     (degredataion_clamped**_SCALING_PARAM + ksq)))))

        quality_path = os.path.join(
            out_dir, 'quality' + lulc_key + suffix + '.tif')

        LOGGER.info('Starting raster calculation on quality_op')

        deg_hab_raster_list = [deg_sum_raster_path, habitat_raster_path]

        deg_hab_raster_band_list = [
            (path, 1) for path in deg_hab_raster_list]
        pygeoprocessing.raster_calculator(
            deg_hab_raster_band_list, quality_op, quality_path,
            gdal.GDT_Float32, _OUT_NODATA)

        LOGGER.info('Finished raster calculation on quality_op')

    # Compute Rarity if user supplied baseline raster
    if '_b' not in lulc_path_dict:
        LOGGER.info('Baseline not provided to compute Rarity')
    else:
        lulc_base_path = lulc_path_dict['_b']

        # get the area of a base pixel to use for computing rarity where the
        # pixel sizes are different between base and cur/fut rasters
        base_pixel_size = pygeoprocessing.get_raster_info(
            lulc_base_path)['pixel_size']
        base_area = float(abs(base_pixel_size[0]) * abs(base_pixel_size[1]))
        base_nodata = pygeoprocessing.get_raster_info(
            lulc_base_path)['nodata'][0]

        lulc_code_count_b = raster_pixel_count(lulc_base_path)

        # compute rarity for current landscape and future (if provided)
        for lulc_key in ['_c', '_f']:
            if lulc_key not in lulc_path_dict:
                continue
            lulc_path = lulc_path_dict[lulc_key]
            lulc_time = 'current' if lulc_key == '_c' else 'future'

            # get the area of a cur/fut pixel
            lulc_pixel_size = pygeoprocessing.get_raster_info(
                lulc_path)['pixel_size']
            lulc_area = float(abs(lulc_pixel_size[0]) * abs(lulc_pixel_size[1]))
            lulc_nodata = pygeoprocessing.get_raster_info(
                lulc_path)['nodata'][0]

            def trim_op(base, cover_x):
                """Trim cover_x to the mask of base.

                Parameters:
                    base (numpy.ndarray): base raster from 'lulc_base'
                    cover_x (numpy.ndarray): either future or current land
                        cover raster from 'lulc_path' above

                Returns:
                    _OUT_NODATA where either array has nodata, otherwise
                    cover_x.
                """
                return numpy.where(
                    (base == base_nodata) | (cover_x == lulc_nodata),
                    base_nodata, cover_x)

            LOGGER.info('Create new cover for %s', lulc_path)

            new_cover_path = os.path.join(
                inter_dir, 'new_cover' + lulc_key + suffix + '.tif')

            LOGGER.info('Starting masking %s land cover to base land cover.'
                        % lulc_time)

            pygeoprocessing.raster_calculator(
                [(lulc_base_path, 1), (lulc_path, 1)], trim_op, new_cover_path,
                gdal.GDT_Float32, _OUT_NODATA)

            LOGGER.info('Finished masking %s land cover to base land cover.'
                        % lulc_time)

            LOGGER.info('Starting rarity computation on %s land cover.'
                        % lulc_time)

            lulc_code_count_x = raster_pixel_count(new_cover_path)

            # a dictionary to map LULC types to a number that depicts how
            # rare they are considered
            code_index = {}

            # compute rarity index for each lulc code
            # define 0.0 if an lulc code is found in the cur/fut landcover
            # but not the baseline
            for code in lulc_code_count_x:
                if code in lulc_code_count_b:
                    numerator = lulc_code_count_x[code] * lulc_area
                    denominator = lulc_code_count_b[code] * base_area
                    ratio = 1.0 - (numerator / denominator)
                    code_index[code] = ratio
                else:
                    code_index[code] = 0.0

            rarity_path = os.path.join(
                out_dir, 'rarity' + lulc_key + suffix + '.tif')

            pygeoprocessing.reclassify_raster(
                (new_cover_path, 1), code_index, rarity_path, gdal.GDT_Float32,
                _RARITY_NODATA)

            LOGGER.info('Finished rarity computation on %s land cover.'
                        % lulc_time)
    LOGGER.info('Finished habitat_quality biophysical calculations')
Ejemplo n.º 5
0
def isSingleTableGpkg(layer):
    ds = gdal.OpenEx(layer)
    return ds.GetLayerCount() == 1
Ejemplo n.º 6
0
def createAnomalyMap(anomalyMethod='linearrate',
                     anomalyThreshold=2.5,
                     filterThreshold=3.703703,
                     NHDDBPath=None,
                     NHDLayerName=None,
                     odir=None):
    global comidlist
    global Qfclist
    global Hfclist
    global fccount
    global h  # reuse h; reset first
    # create comid hash for forecast output
    h = None
    h = dict.fromkeys(comidlist)
    for i in range(0, fccount):
        h[comidlist[i]] = i

    # open NHDPlus MR to scan each flowline only once
    ds = gdal.OpenEx(NHDDBPath, gdal.OF_VECTOR | gdal.OF_READONLY)
    if ds is None:
        print "createAnomalyMap(): ERROR Open failed: " + str(NHDDBPath) + "\n"
        sys.exit(1)
    lyr = ds.GetLayerByName(NHDLayerName)
    if lyr is None:
        print "createAnomalyMap(): ERROR fetch layer: " + str(
            NHDLayerName) + "\n"
        sys.exit(1)
    lyr.ResetReading()
    num_records = lyr.GetFeatureCount()
    lyr_defn = lyr.GetLayerDefn()
    srs = lyr.GetSpatialRef()
    geomType = lyr.GetGeomType()
    # get index of attributes to be extracted
    fi_comid = lyr_defn.GetFieldIndex('COMID')
    fdef_comid = lyr_defn.GetFieldDefn(fi_comid)
    fi_huc = lyr_defn.GetFieldIndex('REACHCODE')
    fdef_huc = lyr_defn.GetFieldDefn(fi_huc)
    fi_meanflow = lyr_defn.GetFieldIndex('Q0001E')
    fdef_meanflow = lyr_defn.GetFieldDefn(fi_meanflow)

    # create output shp
    driverName = "ESRI Shapefile"
    ofilename = 'anomalymap-at-' + init_timestr + '-for-' + timestr
    of = odir + '/' + ofilename + '.shp'
    drv = gdal.GetDriverByName(driverName)
    if drv is None:
        print "createAnomalyMap(): ERROR %s driver not available.\n" % driverName
        sys.exit(1)
    ods = drv.Create(of, 0, 0, 0, gdal.GDT_Unknown)
    if ods is None:
        print "createAnomalyMap(): ERROR Creation of output file failed: " + of + "\n"
        sys.exit(1)
    olyr = ods.CreateLayer('anomalymap', srs, geomType)
    if olyr is None:
        print "createAnomalyMap(): ERROR Layer creation failed: anomalymap " + "\n"
        sys.exit(1)
    # create fields
    ofdef_comid = ogr.FieldDefn("COMID", ogr.OFTInteger)
    ofdef_H = ogr.FieldDefn("H", ogr.OFTReal)
    ofdef_Q = ogr.FieldDefn("Q", ogr.OFTReal)
    ofdef_rating = ogr.FieldDefn("RATING", ogr.OFTReal)
    if olyr.CreateField(ofdef_comid) != 0 or olyr.CreateField(
            fdef_huc) != 0 or olyr.CreateField(
                ofdef_Q) != 0 or olyr.CreateField(
                    fdef_meanflow) != 0 or olyr.CreateField(
                        ofdef_rating) != 0 or olyr.CreateField(ofdef_H) != 0:
        print "createAnomalyMap(): ERROR Creating fields in output .\n"
        sys.exit(1)
    # get integer index to speed up the loops
    olyr_defn = olyr.GetLayerDefn()
    ofi_comid = olyr_defn.GetFieldIndex('COMID')
    ofi_huc = olyr_defn.GetFieldIndex('REACHCODE')
    ofi_Q = olyr_defn.GetFieldIndex('Q')
    ofi_meanflow = olyr_defn.GetFieldIndex('Q0001E')
    ofi_rating = olyr_defn.GetFieldIndex('RATING')
    ofi_H = olyr_defn.GetFieldIndex('H')

    count = 0
    for f in lyr:  # for each row. in NHDPlus MR, it's 2.67m
        comid = f.GetFieldAsInteger(fi_comid)
        if not comid in h:  # comid has no forecast record
            continue
        i = h[comid]  # index of this comid in Qfclist and Hfclist
        Qfc = Qfclist[i]
        meanflow = f.GetFieldAsDouble(fi_meanflow)
        rate = calcAnomalyRate(Qfc, meanflow, anomalyMethod, anomalyThreshold,
                               filterThreshold)
        if rate < 0.00000001:  # filter by rate diff
            continue
        # it is an anomaly, get it
        Hfc = Hfclist[i]
        huc = f.GetFieldAsString(fi_huc)
        # create feature and write to output
        fc = ogr.Feature(olyr_defn)
        fc.SetField(ofi_comid, comid)
        fc.SetField(ofi_huc, huc)
        fc.SetField(ofi_Q, Qfc)
        fc.SetField(ofi_meanflow, meanflow)
        fc.SetField(ofi_rating, rate)
        fc.SetField(ofi_H, Hfc)
        # create geom field
        geom = f.GetGeometryRef()
        fc.SetGeometry(geom)  # this method makes a copy of geom
        if olyr.CreateFeature(fc) != 0:
            print "createAnomalyMap(): ERROR Creating new feature in output for COMID=" + str(
                comid) + " .\n"
            sys.exit(1)
        fc.Destroy()
        count += 1
    ds = None
    ods = None

    print datetime.now().strftime(
        "%Y-%m-%d %H:%M:%S : createAnomalyMap ") + " generated " + str(
            count) + " anomalies from " + str(fccount) + " forecast reaches"
Ejemplo n.º 7
0
    def archive_table(self, config):
        tab = [ 0 ]
        def my_cbk(pct, _, arg):
            assert pct >= tab[0]
            tab[0] = pct
            return 1

        schema_name = config.get('schema_name', '')
        version_name = config.get('version_name', '')
        path = config.get('path', '')
        layerCreationOptions = config.get('layerCreationOptions',
                                            ['OVERWRITE=YES'])
        dstSRS = config.get('dstSRS', 'EPSG:4326')
        srcSRS = config.get('srcSRS', 'EPSG:4326')
        geometryType = config.get('geometryType', 'NONE')
        SQLStatement = config.get('SQLStatement', None)
        srcOpenOptions = config.get('srcOpenOptions',
                                ['AUTODETECT_TYPE=NO',
                                'EMPTY_STRING_AS_NULL=YES'])
        newFieldNames = config.get('newFieldNames', [])

        # initiate destination
        dstDS = gdal.OpenEx(self.engine, gdal.OF_VECTOR)

        # initiate source
        path = path.replace('ftp:/', self.ftp_prefix)
        path = path.replace('FTP_PREFIX', self.ftp_prefix) # for backward compatibility

        srcDS = Archiver.load_srcDS(path, srcOpenOptions, 
                                    newFieldNames, 
                                    self.s3_endpoint, 
                                    self.s3_secret_access_key, 
                                    self.s3_access_key_id)

        originalLayerName = srcDS.GetLayer().GetName()
        
        # check on schema
        dstDS.ExecuteSQL(f'CREATE SCHEMA IF NOT EXISTS {schema_name};')
        
        version = datetime.today().strftime("%Y/%m/%d") if version_name == '' else version_name
        layerName = f'{schema_name}.{version}'

        print(f'\nArchiving {layerName} to recipes...')
        gdal.VectorTranslate(
            dstDS,
            srcDS,
            SQLStatement=SQLStatement.replace(schema_name, originalLayerName) if SQLStatement else None,
            format='PostgreSQL',
            layerCreationOptions=layerCreationOptions,
            dstSRS=dstSRS,
            srcSRS=srcSRS,
            geometryType=geometryType,
            layerName=layerName,
            accessMode='overwrite',
            callback=gdal.TermProgress)

        # tag version as latest
        print(f'\nTagging {layerName} as {schema_name}.latest ...')

        try: 
            dstDS.ExecuteSQL(f'''
            DROP VIEW IF EXISTS {schema_name}.latest;
            ''')
        except: 
            pass

        try:
            dstDS.ExecuteSQL(f'''
            DROP TABLE IF EXISTS {schema_name}.latest;
            ''')
        except: 
            pass

        try:
            dstDS.ExecuteSQL(f'''
            UPDATE {schema_name}."{version}"
            SET wkb_geometry = st_makevalid(wkb_geometry);
            ''')
        except: 
            pass

        dstDS.ExecuteSQL(f'''
        CREATE VIEW {schema_name}.latest as (SELECT \'{version}\' as v, * from {schema_name}."{version}");
        ''')
Ejemplo n.º 8
0
def test_overviewds_1():
    ds = gdal.OpenEx('data/byte.tif', open_options=['OVERVIEW_LEVEL=-1'])
    assert ds is None
    ds = gdal.OpenEx('data/byte.tif', open_options=['OVERVIEW_LEVEL=0'])
    assert ds is None
Ejemplo n.º 9
0
def gdal_edit(argv):

    argv = gdal.GeneralCmdLineProcessor(argv)
    if argv is None:
        return -1

    datasetname = None
    srs = None
    ulx = None
    uly = None
    lrx = None
    lry = None
    nodata = None
    unsetnodata = False
    xres = None
    yres = None
    unsetgt = False
    unsetstats = False
    stats = False
    setstats = False
    approx_stats = False
    unsetmd = False
    ro = False
    molist = []
    gcp_list = []
    open_options = []
    offset = []
    scale = []
    colorinterp = {}
    unsetrpc = False

    i = 1
    argc = len(argv)
    while i < argc:
        if argv[i] == '-ro':
            ro = True
        elif argv[i] == '-a_srs' and i < len(argv) - 1:
            srs = argv[i + 1]
            i = i + 1
        elif argv[i] == '-a_ullr' and i < len(argv) - 4:
            ulx = float(argv[i + 1])
            i = i + 1
            uly = float(argv[i + 1])
            i = i + 1
            lrx = float(argv[i + 1])
            i = i + 1
            lry = float(argv[i + 1])
            i = i + 1
        elif argv[i] == '-tr' and i < len(argv) - 2:
            xres = float(argv[i + 1])
            i = i + 1
            yres = float(argv[i + 1])
            i = i + 1
        elif argv[i] == '-a_nodata' and i < len(argv) - 1:
            nodata = float(argv[i + 1])
            i = i + 1
        elif argv[i] == '-scale' and i < len(argv):
            scale.append(float(argv[i + 1]))
            i = i + 1
            while i < len(argv) - 1 and ArgIsNumeric(argv[i + 1]):
                scale.append(float(argv[i + 1]))
                i = i + 1
        elif argv[i] == '-offset' and i < len(argv) - 1:
            offset.append(float(argv[i + 1]))
            i = i + 1
            while i < len(argv) - 1 and ArgIsNumeric(argv[i + 1]):
                offset.append(float(argv[i + 1]))
                i = i + 1
        elif argv[i] == '-mo' and i < len(argv) - 1:
            molist.append(argv[i + 1])
            i = i + 1
        elif argv[i] == '-gcp' and i + 4 < len(argv):
            pixel = float(argv[i + 1])
            i = i + 1
            line = float(argv[i + 1])
            i = i + 1
            x = float(argv[i + 1])
            i = i + 1
            y = float(argv[i + 1])
            i = i + 1
            if i + 1 < len(argv) and ArgIsNumeric(argv[i + 1]):
                z = float(argv[i + 1])
                i = i + 1
            else:
                z = 0
            gcp = gdal.GCP(x, y, z, pixel, line)
            gcp_list.append(gcp)
        elif argv[i] == '-unsetgt':
            unsetgt = True
        elif argv[i] == '-unsetrpc':
            unsetrpc = True
        elif argv[i] == '-unsetstats':
            unsetstats = True
        elif argv[i] == '-approx_stats':
            stats = True
            approx_stats = True
        elif argv[i] == '-stats':
            stats = True
        elif argv[i] == '-setstats' and i < len(argv) - 4:
            stats = True
            setstats = True
            if argv[i + 1] != 'None':
                statsmin = float(argv[i + 1])
            else:
                statsmin = None
            i = i + 1
            if argv[i + 1] != 'None':
                statsmax = float(argv[i + 1])
            else:
                statsmax = None
            i = i + 1
            if argv[i + 1] != 'None':
                statsmean = float(argv[i + 1])
            else:
                statsmean = None
            i = i + 1
            if argv[i + 1] != 'None':
                statsdev = float(argv[i + 1])
            else:
                statsdev = None
            i = i + 1
        elif argv[i] == '-unsetmd':
            unsetmd = True
        elif argv[i] == '-unsetnodata':
            unsetnodata = True
        elif argv[i] == '-oo' and i < len(argv) - 1:
            open_options.append(argv[i + 1])
            i = i + 1
        elif argv[i].startswith('-colorinterp_') and i < len(argv) - 1:
            band = int(argv[i][len('-colorinterp_'):])
            val = argv[i + 1]
            if val.lower() == 'red':
                val = gdal.GCI_RedBand
            elif val.lower() == 'green':
                val = gdal.GCI_GreenBand
            elif val.lower() == 'blue':
                val = gdal.GCI_BlueBand
            elif val.lower() == 'alpha':
                val = gdal.GCI_AlphaBand
            elif val.lower() == 'gray' or val.lower() == 'grey':
                val = gdal.GCI_GrayIndex
            elif val.lower() == 'undefined':
                val = gdal.GCI_Undefined
            else:
                sys.stderr.write(
                    'Unsupported color interpretation %s.\n' % val +
                    'Only red, green, blue, alpha, gray, undefined are supported.\n'
                )
                return Usage()
            colorinterp[band] = val
            i = i + 1
        elif argv[i][0] == '-':
            sys.stderr.write('Unrecognized option : %s\n' % argv[i])
            return Usage()
        elif datasetname is None:
            datasetname = argv[i]
        else:
            sys.stderr.write('Unexpected option : %s\n' % argv[i])
            return Usage()

        i = i + 1

    if datasetname is None:
        return Usage()

    if (srs is None and lry is None and yres is None and not unsetgt
            and not unsetstats and not stats and not setstats
            and nodata is None and not molist and not unsetmd and not gcp_list
            and not unsetnodata and not colorinterp and scale is None
            and offset is None and not unsetrpc):
        print('No option specified')
        print('')
        return Usage()

    exclusive_option = 0
    if lry is not None:
        exclusive_option = exclusive_option + 1
    if yres is not None:
        exclusive_option = exclusive_option + 1
    if unsetgt:
        exclusive_option = exclusive_option + 1
    if exclusive_option > 1:
        print('-a_ullr, -tr and -unsetgt options are exclusive.')
        print('')
        return Usage()

    if unsetstats and stats:
        print(
            '-unsetstats and either -stats or -approx_stats options are exclusive.'
        )
        print('')
        return Usage()

    if unsetnodata and nodata:
        print('-unsetnodata and -nodata options are exclusive.')
        print('')
        return Usage()

    if open_options is not None:
        if ro:
            ds = gdal.OpenEx(datasetname,
                             gdal.OF_RASTER,
                             open_options=open_options)
        else:
            ds = gdal.OpenEx(datasetname,
                             gdal.OF_RASTER | gdal.OF_UPDATE,
                             open_options=open_options)
    # GDAL 1.X compat
    elif ro:
        ds = gdal.Open(datasetname)
    else:
        ds = gdal.Open(datasetname, gdal.GA_Update)
    if ds is None:
        return -1

    if scale:
        if len(scale) == 1:
            scale = scale * ds.RasterCount
        elif len(scale) != ds.RasterCount:
            print(
                'If more than one scale value is provided, their number must match the number of bands.'
            )
            print('')
            return Usage()

    if offset:
        if len(offset) == 1:
            offset = offset * ds.RasterCount
        elif len(offset) != ds.RasterCount:
            print(
                'If more than one offset value is provided, their number must match the number of bands.'
            )
            print('')
            return Usage()

    wkt = None
    if srs == '' or srs == 'None':
        ds.SetProjection('')
    elif srs is not None:
        sr = osr.SpatialReference()
        if sr.SetFromUserInput(srs) != 0:
            print('Failed to process SRS definition: %s' % srs)
            return -1
        wkt = sr.ExportToWkt()
        if not gcp_list:
            ds.SetProjection(wkt)

    if lry is not None:
        gt = [
            ulx, (lrx - ulx) / ds.RasterXSize, 0, uly, 0,
            (lry - uly) / ds.RasterYSize
        ]
        ds.SetGeoTransform(gt)

    if yres is not None:
        gt = ds.GetGeoTransform()
        # Doh ! why is gt a tuple and not an array...
        gt = [gt[j] for j in range(6)]
        gt[1] = xres
        gt[5] = yres
        ds.SetGeoTransform(gt)

    if unsetgt:
        # For now only the GTiff drivers understands full-zero as a hint
        # to unset the geotransform
        if ds.GetDriver().ShortName == 'GTiff':
            ds.SetGeoTransform([0, 0, 0, 0, 0, 0])
        else:
            ds.SetGeoTransform([0, 1, 0, 0, 0, 1])

    if gcp_list:
        if wkt is None:
            wkt = ds.GetGCPProjection()
        if wkt is None:
            wkt = ''
        ds.SetGCPs(gcp_list, wkt)

    if nodata is not None:
        for i in range(ds.RasterCount):
            ds.GetRasterBand(i + 1).SetNoDataValue(nodata)
    elif unsetnodata:
        for i in range(ds.RasterCount):
            ds.GetRasterBand(i + 1).DeleteNoDataValue()

    if scale:
        for i in range(ds.RasterCount):
            ds.GetRasterBand(i + 1).SetScale(scale[i])

    if offset:
        for i in range(ds.RasterCount):
            ds.GetRasterBand(i + 1).SetOffset(offset[i])

    if unsetstats:
        for i in range(ds.RasterCount):
            band = ds.GetRasterBand(i + 1)
            for key in band.GetMetadata().keys():
                if key.startswith('STATISTICS_'):
                    band.SetMetadataItem(key, None)

    if stats:
        for i in range(ds.RasterCount):
            ds.GetRasterBand(i + 1).ComputeStatistics(approx_stats)

    if setstats:
        for i in range(ds.RasterCount):
            if statsmin is None or statsmax is None or statsmean is None or statsdev is None:
                ds.GetRasterBand(i + 1).ComputeStatistics(approx_stats)
                min, max, mean, stdev = ds.GetRasterBand(i + 1).GetStatistics(
                    approx_stats, True)
                if statsmin is None:
                    statsmin = min
                if statsmax is None:
                    statsmax = max
                if statsmean is None:
                    statsmean = mean
                if statsdev is None:
                    statsdev = stdev
            ds.GetRasterBand(i + 1).SetStatistics(statsmin, statsmax,
                                                  statsmean, statsdev)

    if molist:
        if unsetmd:
            md = {}
        else:
            md = ds.GetMetadata()
        for moitem in molist:
            equal_pos = moitem.find('=')
            if equal_pos > 0:
                md[moitem[0:equal_pos]] = moitem[equal_pos + 1:]
        ds.SetMetadata(md)
    elif unsetmd:
        ds.SetMetadata({})

    for band in colorinterp:
        ds.GetRasterBand(band).SetColorInterpretation(colorinterp[band])

    if unsetrpc:
        ds.SetMetadata(None, 'RPC')

    ds = band = None

    return 0
Ejemplo n.º 10
0
def ogr_mongodb_1():
    if ogrtest.mongodb_drv is None:
        return 'skip'

    # The below options must be used the very first time mongoDB is initialized
    # otherwise they will get ignored
    open_options = []
    open_options += ['SSL_PEM_KEY_FILE=bla']
    open_options += ['SSL_PEM_KEY_PASSWORD=bla']
    open_options += ['SSL_CA_FILE=bla']
    open_options += ['SSL_CRL_FILE=bla']
    open_options += ['SSL_ALLOW_INVALID_CERTIFICATES=YES']
    open_options += ['SSL_ALLOW_INVALID_HOSTNAMES=YES']
    open_options += ['FIPS_MODE=YES']
    gdal.PushErrorHandler()
    ds = gdal.OpenEx('mongodb:', open_options=open_options)
    gdal.PopErrorHandler()

    # Might work or not depending on how the db is set up
    gdal.PushErrorHandler()
    ds = ogr.Open("mongodb:")
    gdal.PopErrorHandler()

    # Wrong URI
    gdal.PushErrorHandler()
    ds = ogr.Open("mongodb://")
    gdal.PopErrorHandler()
    if ds is not None:
        gdaltest.post_reason('fail')
        return 'fail'

    # URI to non exhisting host
    gdal.PushErrorHandler()
    ds = ogr.Open("mongodb://non_existing")
    gdal.PopErrorHandler()
    if ds is not None:
        gdaltest.post_reason('fail')
        return 'fail'

    # Connect to non exhisting host
    gdal.PushErrorHandler()
    ds = gdal.OpenEx('mongodb:', open_options=['HOST=non_existing'])
    gdal.PopErrorHandler()
    if ds is not None:
        gdaltest.post_reason('fail')
        return 'fail'

    # All arguments split up
    open_options = []
    open_options += ['HOST=' + ogrtest.mongodb_test_host]
    open_options += ['PORT=' + str(ogrtest.mongodb_test_port)]
    open_options += ['DBNAME=' + ogrtest.mongodb_test_dbname]
    if ogrtest.mongodb_test_user is not None:
        open_options += ['USER='******'PASSWORD='******'mongodb:', open_options=open_options)
    if ds is None:
        gdaltest.post_reason('fail')
        return 'fail'

    # Without DBNAME
    open_options = []
    open_options += ['HOST=' + ogrtest.mongodb_test_host]
    open_options += ['PORT=' + str(ogrtest.mongodb_test_port)]
    if ogrtest.mongodb_test_user is not None:
        open_options += ['AUTH_DBNAME=' + ogrtest.mongodb_test_dbname]
        open_options += ['USER='******'PASSWORD='******'mongodb:', open_options=open_options)
    gdal.PopErrorHandler()

    # A few error cases with authentication
    if ogrtest.mongodb_test_user is not None:
        open_options = []
        open_options += ['HOST=' + ogrtest.mongodb_test_host]
        open_options += ['PORT=' + str(ogrtest.mongodb_test_port)]
        open_options += ['DBNAME=' + ogrtest.mongodb_test_dbname]
        # Missing user and password
        gdal.PushErrorHandler()
        ds = gdal.OpenEx('mongodb:', open_options=open_options)
        gdal.PopErrorHandler()
        if ds is not None:
            gdaltest.post_reason('fail')
            return 'fail'

        open_options = []
        open_options += ['HOST=' + ogrtest.mongodb_test_host]
        open_options += ['PORT=' + str(ogrtest.mongodb_test_port)]
        open_options += ['DBNAME=' + ogrtest.mongodb_test_dbname]
        open_options += ['USER='******'mongodb:', open_options=open_options)
        gdal.PopErrorHandler()
        if ds is not None:
            gdaltest.post_reason('fail')
            return 'fail'

        open_options = []
        open_options += ['HOST=' + ogrtest.mongodb_test_host]
        open_options += ['PORT=' + str(ogrtest.mongodb_test_port)]
        open_options += ['USER='******'PASSWORD='******'mongodb:', open_options=open_options)
        gdal.PopErrorHandler()
        if ds is not None:
            gdaltest.post_reason('fail')
            return 'fail'

        open_options = []
        open_options += ['HOST=' + ogrtest.mongodb_test_host]
        open_options += ['PORT=' + str(ogrtest.mongodb_test_port)]
        open_options += ['DBNAME=' + ogrtest.mongodb_test_dbname]
        open_options += ['USER='******'PASSWORD='******'_wrong'
        ]
        # Wrong password
        gdal.PushErrorHandler()
        ds = gdal.OpenEx('mongodb:', open_options=open_options)
        gdal.PopErrorHandler()
        if ds is not None:
            gdaltest.post_reason('fail')
            return 'fail'

    # Test AUTH_JSON: invalid JSon
    gdal.PushErrorHandler()
    open_options = []
    open_options += ['HOST=' + ogrtest.mongodb_test_host]
    open_options += ['PORT=' + str(ogrtest.mongodb_test_port)]
    open_options += ['DBNAME=' + ogrtest.mongodb_test_dbname]
    open_options += ['AUTH_JSON={']
    ds = gdal.OpenEx('mongodb:', open_options=open_options)
    gdal.PopErrorHandler()
    if ds is not None:
        gdaltest.post_reason('fail')
        return 'fail'

    # Test AUTH_JSON: missing mechanism
    gdal.PushErrorHandler()
    open_options = []
    open_options += ['HOST=' + ogrtest.mongodb_test_host]
    open_options += ['PORT=' + str(ogrtest.mongodb_test_port)]
    open_options += ['DBNAME=' + ogrtest.mongodb_test_dbname]
    open_options += ['AUTH_JSON={}']
    ds = gdal.OpenEx('mongodb:', open_options=open_options)
    gdal.PopErrorHandler()
    if ds is not None:
        gdaltest.post_reason('fail')
        return 'fail'

    # Successful AUTH_JSON use
    if ogrtest.mongodb_test_user is not None:
        open_options = []
        open_options += ['HOST=' + ogrtest.mongodb_test_host]
        open_options += ['PORT=' + str(ogrtest.mongodb_test_port)]
        open_options += ['DBNAME=' + ogrtest.mongodb_test_dbname]
        open_options += ['AUTH_JSON={ "mechanism" : "MONGODB-CR", "db": "%s", "user": "******", "pwd": "%s" }' % \
            (ogrtest.mongodb_test_dbname, ogrtest.mongodb_test_user, ogrtest.mongodb_test_password)]
        ds = gdal.OpenEx('mongodb:', open_options=open_options)
        if ds is None:
            gdaltest.post_reason('fail')
            return 'fail'

    return 'success'
Ejemplo n.º 11
0
def ogr_mongodb_2():
    if ogrtest.mongodb_drv is None:
        return 'skip'

    ogrtest.mongodb_ds = ogr.Open(ogrtest.mongodb_test_uri, update=1)
    if ogrtest.mongodb_ds.GetLayerByName('not_existing') is not None:
        gdaltest.post_reason('fail')
        return 'fail'

    if ogrtest.mongodb_ds.TestCapability(ogr.ODsCCreateLayer) != 1:
        gdaltest.post_reason('fail')
        return 'fail'

    if ogrtest.mongodb_ds.TestCapability(ogr.ODsCDeleteLayer) != 1:
        gdaltest.post_reason('fail')
        return 'fail'

    if ogrtest.mongodb_ds.TestCapability(
            ogr.ODsCCreateGeomFieldAfterCreateLayer) != 1:
        gdaltest.post_reason('fail')
        return 'fail'

    # Create layer
    a_uuid = str(uuid.uuid1()).replace('-', '_')
    ogrtest.mongodb_layer_name = 'test_' + a_uuid
    srs = osr.SpatialReference()
    srs.ImportFromEPSG(4258)  # ETRS 89 will reproject identically to EPSG:4326
    lyr = ogrtest.mongodb_ds.CreateLayer(
        ogrtest.mongodb_layer_name,
        geom_type=ogr.wkbPolygon,
        srs=srs,
        options=['GEOMETRY_NAME=location.mygeom', 'FID='])

    gdal.PushErrorHandler()
    ret = lyr.CreateGeomField(
        ogr.GeomFieldDefn('location.mygeom', ogr.wkbPoint))
    gdal.PopErrorHandler()
    if ret == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    ret = lyr.CreateField(ogr.FieldDefn('str', ogr.OFTString))
    if ret != 0:
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.PushErrorHandler()
    ret = lyr.CreateField(ogr.FieldDefn('str', ogr.OFTString))
    gdal.PopErrorHandler()
    if ret == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    lyr.CreateField(ogr.FieldDefn('location.name', ogr.OFTString))
    bool_field = ogr.FieldDefn('bool', ogr.OFTInteger)
    bool_field.SetSubType(ogr.OFSTBoolean)
    lyr.CreateField(bool_field)
    lyr.CreateField(ogr.FieldDefn('int', ogr.OFTInteger))
    lyr.CreateField(ogr.FieldDefn('int64', ogr.OFTInteger64))
    lyr.CreateField(ogr.FieldDefn('real', ogr.OFTReal))
    lyr.CreateField(ogr.FieldDefn('dt', ogr.OFTDateTime))
    lyr.CreateField(ogr.FieldDefn('embed.str', ogr.OFTString))
    lyr.CreateField(ogr.FieldDefn('binary', ogr.OFTBinary))
    lyr.CreateField(ogr.FieldDefn('strlist', ogr.OFTStringList))
    lyr.CreateField(ogr.FieldDefn('intlist', ogr.OFTIntegerList))
    lyr.CreateField(ogr.FieldDefn('int64list', ogr.OFTInteger64List))
    lyr.CreateField(ogr.FieldDefn('realist', ogr.OFTRealList))
    lyr.CreateField(ogr.FieldDefn('embed.embed2.int', ogr.OFTInteger))
    lyr.CreateField(ogr.FieldDefn('embed.embed2.real', ogr.OFTReal))

    # Test CreateFeature()
    f = ogr.Feature(lyr.GetLayerDefn())
    f['str'] = 'str'
    f['location.name'] = 'Paris'
    f['bool'] = 1
    f['int'] = 1
    f['int64'] = 1234567890123456  # put a number larger than 1 << 40 so that fromjson() doesn't pick double
    f['real'] = 1.23
    f['dt'] = '1234/12/31 23:59:59.123+00'
    f.SetFieldBinaryFromHexString('binary', '00FF')
    f['strlist'] = ['a', 'b']
    f['intlist'] = [1, 2]
    f['int64list'] = [1234567890123456, 1234567890123456]
    f['realist'] = [1.23, 4.56]
    f['embed.str'] = 'foo'
    f['embed.embed2.int'] = 3
    f['embed.embed2.real'] = 3.45
    f.SetGeometryDirectly(
        ogr.CreateGeometryFromWkt('POLYGON((2 49,2 50,3 50,3 49,2 49))'))
    if lyr.CreateFeature(f) != 0:
        gdaltest.post_reason('fail')
        return 'fail'
    if f['_id'] is None:
        gdaltest.post_reason('fail')
        return 'fail'
    f_ref = f.Clone()

    # Test GetFeatureCount()
    if lyr.GetFeatureCount() != 1:
        gdaltest.post_reason('fail')
        return 'fail'

    # Test GetNextFeature()
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if not f.Equal(f_ref):
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'
    f = lyr.GetNextFeature()
    if f is not None:
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    # Test GetFeature()
    f = lyr.GetFeature(1)
    if not f.Equal(f_ref):
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    # Test SetFeature()
    f['bool'] = 0
    if lyr.SetFeature(f) != 0:
        gdaltest.post_reason('fail')
        return 'fail'
    f_ref = f.Clone()
    f = lyr.GetFeature(1)
    if f['bool'] != 0:
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    # Test (not working) DeleteFeature()
    gdal.PushErrorHandler()
    ret = lyr.DeleteFeature(1)
    gdal.PopErrorHandler()
    if ret == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    # Test Mongo filter
    lyr.SetAttributeFilter('{ "int": 1 }')
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if not f.Equal(f_ref):
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    lyr.SetAttributeFilter('{ "int": 2 }')
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if f is not None:
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    # Test OGR filter
    lyr.SetAttributeFilter('int = 1')
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if not f.Equal(f_ref):
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    lyr.SetAttributeFilter('int = 2')
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if f is not None:
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    # Test geometry filter
    lyr.SetAttributeFilter(None)
    lyr.SetSpatialFilterRect(2.1, 49.1, 2.9, 49.9)
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if not f.Equal(f_ref):
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    lyr.SetSpatialFilterRect(1.1, 49.1, 1.9, 49.9)
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if f is not None:
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    f = f_ref.Clone()
    f.SetFID(-1)
    f.SetGeometryDirectly(None)
    if lyr.CreateFeature(f) != 0:
        gdaltest.post_reason('fail')
        return 'fail'

    # Duplicate key
    gdal.PushErrorHandler()
    ret = lyr.SyncToDisk()
    gdal.PopErrorHandler()
    if ret == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    f['_id'] = None
    lyr.CreateFeature(f)
    ret = lyr.SyncToDisk()
    if ret != 0:
        gdaltest.post_reason('fail')
        return 'fail'

    # Missing _id
    f.UnsetField('_id')
    gdal.PushErrorHandler()
    ret = lyr.SetFeature(f)
    gdal.PopErrorHandler()
    if ret == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    # MongoDB dialect of ExecuteSQL() with invalid JSON
    gdal.PushErrorHandler()
    sql_lyr = ogrtest.mongodb_ds.ExecuteSQL('{', dialect='MongoDB')
    gdal.PopErrorHandler()

    # MongoDB dialect of ExecuteSQL() with inexisting command
    sql_lyr = ogrtest.mongodb_ds.ExecuteSQL('{ "foo": 1 }', dialect='MongoDB')
    if sql_lyr is None:
        gdaltest.post_reason('fail')
        return 'fail'
    ogrtest.mongodb_ds.ReleaseResultSet(sql_lyr)

    # MongoDB dialect of ExecuteSQL() with existing commnand
    sql_lyr = ogrtest.mongodb_ds.ExecuteSQL('{ "listCommands" : 1 }',
                                            dialect='MongoDB')
    if sql_lyr is None:
        gdaltest.post_reason('fail')
        return 'fail'
    f = sql_lyr.GetNextFeature()
    if f is None:
        gdaltest.post_reason('fail')
        return 'fail'
    f = sql_lyr.GetNextFeature()
    if f is not None:
        gdaltest.post_reason('fail')
        return 'fail'
    sql_lyr.GetLayerDefn()
    sql_lyr.ResetReading()
    sql_lyr.TestCapability('')
    ogrtest.mongodb_ds.ReleaseResultSet(sql_lyr)

    # Regular ExecuteSQL()
    sql_lyr = ogrtest.mongodb_ds.ExecuteSQL('SELECT * FROM ' +
                                            ogrtest.mongodb_layer_name)
    if sql_lyr is None:
        gdaltest.post_reason('fail')
        return 'fail'
    ogrtest.mongodb_ds.ReleaseResultSet(sql_lyr)

    # Test CreateLayer again with same name
    gdal.PushErrorHandler()
    lyr = ogrtest.mongodb_ds.CreateLayer(ogrtest.mongodb_layer_name)
    gdal.PopErrorHandler()
    if lyr is not None:
        return 'fail'

    ogrtest.mongodb_ds = gdal.OpenEx(
        ogrtest.mongodb_test_uri,
        gdal.OF_UPDATE,
        open_options=[
            'FEATURE_COUNT_TO_ESTABLISH_FEATURE_DEFN=-1', 'BULK_INSERT=NO',
            'JSON_FIELD=TRUE'
        ])

    # Check after reopening
    lyr = ogrtest.mongodb_ds.GetLayerByName(ogrtest.mongodb_layer_name)
    if lyr.TestCapability(ogr.OLCFastSpatialFilter) == 0:
        gdaltest.post_reason('fail')
        return 'fail'
    f = lyr.GetNextFeature()
    json_field = f['_json']
    # We cannot use feature.Equal() has the C++ layer defn has changed
    for i in range(f_ref.GetDefnRef().GetFieldCount()):
        if f.GetField(i) != f_ref.GetField(i) or \
           f.GetFieldDefnRef(i).GetType() != f_ref.GetFieldDefnRef(i).GetType() or \
           f.GetFieldDefnRef(i).GetSubType() != f_ref.GetFieldDefnRef(i).GetSubType():
            gdaltest.post_reason('fail')
            f.DumpReadable()
            f_ref.DumpReadable()
            return 'fail'
    for i in range(f_ref.GetDefnRef().GetGeomFieldCount()):
        if not f.GetGeomFieldRef(i).Equals(f_ref.GetGeomFieldRef(i)) or \
               f.GetGeomFieldDefnRef(i).GetName() != f_ref.GetGeomFieldDefnRef(i).GetName() or \
               f.GetGeomFieldDefnRef(i).GetType() != f_ref.GetGeomFieldDefnRef(i).GetType():
            gdaltest.post_reason('fail')
            f.DumpReadable()
            f_ref.DumpReadable()
            return 'fail'

    lyr.SetSpatialFilterRect(2.1, 49.1, 2.9, 49.9)
    lyr.ResetReading()
    if f is None:
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    # Create a feature only from its _json content and do not store any ogr metadata related to the layer
    ogrtest.mongodb_layer_name_no_ogr_metadata = ogrtest.mongodb_layer_name + "_no_ogr_metadata"
    lyr = ogrtest.mongodb_ds.CreateLayer(
        ogrtest.mongodb_layer_name_no_ogr_metadata,
        options=[
            'GEOMETRY_NAME=location.mygeom', 'FID=', 'WRITE_OGR_METADATA=NO'
        ])
    f = ogr.Feature(lyr.GetLayerDefn())
    f['_json'] = json_field
    if lyr.CreateFeature(f) != 0:
        gdaltest.post_reason('fail')
        return 'fail'

    ogrtest.mongodb_layer_name_guess_types = ogrtest.mongodb_layer_name + "_guess_types"
    lyr = ogrtest.mongodb_ds.CreateLayer(
        ogrtest.mongodb_layer_name_guess_types,
        geom_type=ogr.wkbNone,
        options=['FID=', 'WRITE_OGR_METADATA=NO'])
    f = ogr.Feature(lyr.GetLayerDefn())
    f['_json'] = '{'
    f['_json'] += '"int": 2, '
    f['_json'] += '"int64": { "$numberLong" : "1234567890123456" }, '
    f['_json'] += '"real": 2.34, '
    f['_json'] += '"intlist" : [2], '
    f['_json'] += '"reallist" : [2.34], '
    f['_json'] += '"int64list" : [{ "$numberLong" : "1234567890123456" }], '
    f['_json'] += '"int_str" : 2, '
    f['_json'] += '"str_int" : "2", '
    f['_json'] += '"int64_str" : { "$numberLong" : "1234567890123456" }, '
    f['_json'] += '"str_int64" : "2", '
    f['_json'] += '"int_int64": 2, '
    f['_json'] += '"int64_int": { "$numberLong" : "1234567890123456" }, '
    f['_json'] += '"int_real": 2, '
    f['_json'] += '"real_int": 3.45, '
    f['_json'] += '"int64_real": { "$numberLong" : "1234567890123456" }, '
    f['_json'] += '"real_int64": 3.45, '
    f['_json'] += '"real_str": 3.45, '
    f['_json'] += '"str_real": "3.45", '
    f['_json'] += '"int_bool" : 2, '
    f['_json'] += '"bool_int" : true, '
    f['_json'] += '"intlist_strlist" : [2], '
    f['_json'] += '"strlist_intlist" : ["2"], '
    f['_json'] += '"intlist_int64list": [2], '
    f['_json'] += '"int64list_intlist": [{ "$numberLong" : "1234567890123456" }], '
    f['_json'] += '"intlist_reallist": [2], '
    f['_json'] += '"reallist_intlist": [3.45], '
    f['_json'] += '"int64list_reallist": [{ "$numberLong" : "1234567890123456" }], '
    f['_json'] += '"reallist_int64list": [3.45], '
    f['_json'] += '"intlist_boollist" : [2], '
    f['_json'] += '"boollist_intlist" : [true], '
    f['_json'] += '"mixedlist": [true,1,{ "$numberLong" : "1234567890123456" },3.45],'
    f['_json'] += '"mixedlist2": [true,1,{ "$numberLong" : "1234567890123456" },3.45,"str"]'
    f['_json'] += '}'
    if lyr.CreateFeature(f) != 0:
        gdaltest.post_reason('fail')
        return 'fail'

    f = ogr.Feature(lyr.GetLayerDefn())
    f['_json'] = '{'
    f['_json'] += '"int_str" : "3", '
    f['_json'] += '"str_int" : 3, '
    f['_json'] += '"int64_str" : "2", '
    f['_json'] += '"str_int64" : { "$numberLong" : "1234567890123456" }, '
    f['_json'] += '"int_int64": { "$numberLong" : "1234567890123456" }, '
    f['_json'] += '"int64_int": 2, '
    f['_json'] += '"int_real" : 3.45, '
    f['_json'] += '"real_int": 2, '
    f['_json'] += '"int64_real": 3.45, '
    f['_json'] += '"real_int64": { "$numberLong" : "1234567890123456" }, '
    f['_json'] += '"real_str": "3.45", '
    f['_json'] += '"str_real": 3.45, '
    f['_json'] += '"int_bool" : true, '
    f['_json'] += '"bool_int" : 2, '
    f['_json'] += '"intlist_strlist" : ["3"], '
    f['_json'] += '"strlist_intlist" : [3], '
    f['_json'] += '"intlist_int64list": [{ "$numberLong" : "1234567890123456" }], '
    f['_json'] += '"int64list_intlist": [2], '
    f['_json'] += '"intlist_reallist": [3.45], '
    f['_json'] += '"reallist_intlist": [2], '
    f['_json'] += '"int64list_reallist": [3.45], '
    f['_json'] += '"reallist_int64list": [{ "$numberLong" : "1234567890123456" }], '
    f['_json'] += '"intlist_boollist" : [true], '
    f['_json'] += '"boollist_intlist" : [2]'
    f['_json'] += '}'
    if lyr.CreateFeature(f) != 0:
        gdaltest.post_reason('fail')
        return 'fail'

    # This new features will not be taken into account by below the FEATURE_COUNT_TO_ESTABLISH_FEATURE_DEFN=2
    f = ogr.Feature(lyr.GetLayerDefn())
    f['_json'] = '{'
    f['_json'] += '"int": { "$minKey": 1 }, '
    f['_json'] += '"int64": { "$minKey": 1 }, '
    f['_json'] += '"real": { "$minKey": 1 }, '
    f['_json'] += '"intlist" : [1, "1", { "$minKey": 1 },{ "$maxKey": 1 },{ "$numberLong" : "-1234567890123456" }, { "$numberLong" : "1234567890123456" }, -1234567890123456.1, 1234567890123456.1, { "$numberLong" : "1" }, 1.23 ], '
    f['_json'] += '"int64list" : [1, { "$numberLong" : "1234567890123456" }, "1", { "$minKey": 1 },{ "$maxKey": 1 }, -1e300, 1e300, 1.23 ], '
    f['_json'] += '"reallist" : [1, { "$numberLong" : "1234567890123456" }, 1.0, "1", { "$minKey": 1 },{ "$maxKey": 1 }, { "$numberLong" : "1234567890123456" } ] '
    f['_json'] += '}'
    if lyr.CreateFeature(f) != 0:
        gdaltest.post_reason('fail')
        return 'fail'

    f = ogr.Feature(lyr.GetLayerDefn())
    f['_json'] = '{'
    f['_json'] += '"int": { "$maxKey": 1 }, '
    f['_json'] += '"int64": { "$maxKey": 1 }, '
    f['_json'] += '"real": { "$maxKey": 1 } '
    f['_json'] += '}'
    if lyr.CreateFeature(f) != 0:
        gdaltest.post_reason('fail')
        return 'fail'

    ogrtest.mongodb_layer_name_with_2d_index = ogrtest.mongodb_layer_name + "_with_2d_index"
    gdal.SetConfigOption('OGR_MONGODB_SPAT_INDEX_TYPE', '2d')
    lyr = ogrtest.mongodb_ds.CreateLayer(
        ogrtest.mongodb_layer_name_with_2d_index,
        geom_type=ogr.wkbPoint,
        options=['FID=', 'WRITE_OGR_METADATA=NO'])
    gdal.SetConfigOption('OGR_MONGODB_SPAT_INDEX_TYPE', None)
    f = ogr.Feature(lyr.GetLayerDefn())
    f.SetGeometryDirectly(ogr.CreateGeometryFromWkt('POINT(2 49)'))
    if lyr.CreateFeature(f) != 0:
        gdaltest.post_reason('fail')
        return 'fail'

    ogrtest.mongodb_layer_name_no_spatial_index = ogrtest.mongodb_layer_name + "_no_spatial_index"
    for i in range(2):
        lyr = ogrtest.mongodb_ds.CreateLayer(
            ogrtest.mongodb_layer_name_no_spatial_index,
            options=['SPATIAL_INDEX=NO', 'OVERWRITE=YES'])
        f = ogr.Feature(lyr.GetLayerDefn())
        f.SetGeometryDirectly(ogr.CreateGeometryFromWkt('POINT(2 49)'))
        if lyr.CreateFeature(f) != 0:
            gdaltest.post_reason('fail')
            return 'fail'
        ogrtest.mongodb_ds.ExecuteSQL(
            'WRITE_OGR_METADATA ' +
            ogrtest.mongodb_layer_name_no_spatial_index)

    # Open "ghost" layer
    lyr = ogrtest.mongodb_ds.GetLayerByName('_ogr_metadata')
    if lyr is None:
        gdaltest.post_reason('fail')
        return 'fail'
    lyr.SetAttributeFilter("layer LIKE '%s%%'" % ogrtest.mongodb_layer_name)
    if lyr.GetFeatureCount() != 2:
        print(lyr.GetFeatureCount())
        gdaltest.post_reason('fail')
        return 'fail'

    if ogrtest.mongodb_ds.DeleteLayer(-1) == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    lyr = ogrtest.mongodb_ds.GetLayerByName(ogrtest.mongodb_test_dbname + '.' +
                                            '_ogr_metadata')
    if lyr is None:
        gdaltest.post_reason('fail')
        return 'fail'

    ogrtest.mongodb_ds = None

    # Reopen in read-only
    ogrtest.mongodb_ds = gdal.OpenEx(
        ogrtest.mongodb_test_uri,
        0,
        open_options=[
            'FEATURE_COUNT_TO_ESTABLISH_FEATURE_DEFN=2', 'JSON_FIELD=TRUE'
        ])

    lyr = ogrtest.mongodb_ds.GetLayerByName(
        ogrtest.mongodb_layer_name_no_ogr_metadata)
    if lyr.TestCapability(ogr.OLCFastSpatialFilter) != 0:
        gdaltest.post_reason('fail')
        return 'fail'
    f = lyr.GetNextFeature()
    for i in range(f_ref.GetDefnRef().GetFieldCount()):
        # Order might be a bit different...
        j = f.GetDefnRef().GetFieldIndex(f_ref.GetFieldDefnRef(i).GetNameRef())
        if f.GetField(j) != f_ref.GetField(i) or \
        f.GetFieldDefnRef(j).GetType() != f_ref.GetFieldDefnRef(i).GetType() or \
        f.GetFieldDefnRef(j).GetSubType() != f_ref.GetFieldDefnRef(i).GetSubType():
            gdaltest.post_reason('fail')
            f.DumpReadable()
            f_ref.DumpReadable()
            return 'fail'
    for i in range(f_ref.GetDefnRef().GetGeomFieldCount()):
        # Order might be a bit different...
        j = f.GetDefnRef().GetGeomFieldIndex(
            f_ref.GetGeomFieldDefnRef(i).GetNameRef())
        if not f.GetGeomFieldRef(j).Equals(f_ref.GetGeomFieldRef(i)) or \
            f.GetGeomFieldDefnRef(j).GetName() != f_ref.GetGeomFieldDefnRef(i).GetName() or \
            f.GetGeomFieldDefnRef(j).GetType() != f_ref.GetGeomFieldDefnRef(i).GetType():
            gdaltest.post_reason('fail')
            f.DumpReadable()
            f_ref.DumpReadable()
            print(f.GetGeomFieldDefnRef(j).GetType())
            print(f_ref.GetGeomFieldDefnRef(i).GetType())
            return 'fail'

    lyr.SetSpatialFilterRect(2.1, 49.1, 2.9, 49.9)
    lyr.ResetReading()
    if f is None:
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    lyr = ogrtest.mongodb_ds.GetLayerByName(
        ogrtest.mongodb_layer_name_guess_types)

    expected_fields = [
        ("int", ogr.OFTInteger), ("int64", ogr.OFTInteger64),
        ("real", ogr.OFTReal), ("intlist", ogr.OFTIntegerList),
        ("reallist", ogr.OFTRealList), ("int64list", ogr.OFTInteger64List),
        ("int_str", ogr.OFTString), ("str_int", ogr.OFTString),
        ("int64_str", ogr.OFTString), ("str_int64", ogr.OFTString),
        ("int_int64", ogr.OFTInteger64), ("int64_int", ogr.OFTInteger64),
        ("int_real", ogr.OFTReal), ("real_int", ogr.OFTReal),
        ("int64_real", ogr.OFTReal), ("real_int64", ogr.OFTReal),
        ("real_str", ogr.OFTString), ("str_real", ogr.OFTString),
        ("int_bool", ogr.OFTInteger), ("bool_int", ogr.OFTInteger),
        ("intlist_strlist", ogr.OFTStringList),
        ("strlist_intlist", ogr.OFTStringList),
        ("intlist_int64list", ogr.OFTInteger64List),
        ("int64list_intlist", ogr.OFTInteger64List),
        ("intlist_reallist", ogr.OFTRealList),
        ("reallist_intlist", ogr.OFTRealList),
        ("int64list_reallist", ogr.OFTRealList),
        ("reallist_int64list", ogr.OFTRealList),
        ("intlist_boollist", ogr.OFTIntegerList),
        ("boollist_intlist", ogr.OFTIntegerList),
        ("mixedlist", ogr.OFTRealList), ("mixedlist2", ogr.OFTStringList)
    ]
    for (fieldname, fieldtype) in expected_fields:
        fld_defn = lyr.GetLayerDefn().GetFieldDefn(
            lyr.GetLayerDefn().GetFieldIndex(fieldname))
        if fld_defn.GetType() != fieldtype:
            gdaltest.post_reason('fail')
            print(fieldname)
            print(fld_defn.GetType())
            return 'fail'
        if fld_defn.GetSubType() != ogr.OFSTNone:
            gdaltest.post_reason('fail')
            return 'fail'

    f = lyr.GetNextFeature()
    f = lyr.GetNextFeature()
    f = lyr.GetNextFeature()
    if f['intlist'] != [1,1,-2147483648,2147483647,-2147483648,2147483647,-2147483648,2147483647,1,1] or \
       f['int64list'] != [1,1234567890123456,1,-9223372036854775808,9223372036854775807,-9223372036854775808,9223372036854775807,1] or \
       f['int'] != -2147483648 or f['int64'] != -9223372036854775808 or f['real'] - 1 != f['real']:
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'
    f = lyr.GetNextFeature()
    if f['int'] != 2147483647 or f[
            'int64'] != 9223372036854775807 or f['real'] + 1 != f['real']:
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    lyr = ogrtest.mongodb_ds.GetLayerByName(
        ogrtest.mongodb_layer_name_with_2d_index)
    if lyr.TestCapability(ogr.OLCFastSpatialFilter) == 0:
        gdaltest.post_reason('fail')
        return 'fail'
    lyr.SetSpatialFilterRect(1.9, 48.9, 2.1, 49.1)
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if f is None:
        gdaltest.post_reason('fail')
        return 'fail'
    lyr.SetSpatialFilterRect(1.9, 48.9, 1.95, 48.95)
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if f is not None:
        gdaltest.post_reason('fail')
        return 'fail'

    lyr = ogrtest.mongodb_ds.GetLayerByName(
        ogrtest.mongodb_layer_name_no_spatial_index)
    if lyr.TestCapability(ogr.OLCFastSpatialFilter) != 0:
        gdaltest.post_reason('fail')
        print(lyr.TestCapability(ogr.OLCFastSpatialFilter))
        return 'fail'
    lyr.SetSpatialFilterRect(1.9, 48.9, 2.1, 49.1)
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if f is None:
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.PushErrorHandler()
    lyr = ogrtest.mongodb_ds.CreateLayer('foo')
    gdal.PopErrorHandler()
    if lyr is not None:
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.ErrorReset()
    gdal.PushErrorHandler()
    ogrtest.mongodb_ds.ExecuteSQL('WRITE_OGR_METADATA ' +
                                  ogrtest.mongodb_layer_name)
    gdal.PopErrorHandler()
    if gdal.GetLastErrorMsg() == '':
        gdaltest.post_reason('fail')
        return 'fail'

    lyr_count_before = ogrtest.mongodb_ds.GetLayerCount()
    gdal.PushErrorHandler()
    ogrtest.mongodb_ds.ExecuteSQL('DELLAYER:' + ogrtest.mongodb_layer_name)
    gdal.PopErrorHandler()
    if ogrtest.mongodb_ds.GetLayerCount() != lyr_count_before:
        gdaltest.post_reason('fail')
        return 'fail'

    lyr = ogrtest.mongodb_ds.GetLayerByName(ogrtest.mongodb_layer_name)

    gdal.PushErrorHandler()
    ret = lyr.CreateField(ogr.FieldDefn('foo', ogr.OFTString))
    gdal.PopErrorHandler()
    if ret == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.PushErrorHandler()
    ret = lyr.CreateGeomField(ogr.GeomFieldDefn('foo', ogr.wkbPoint))
    gdal.PopErrorHandler()
    if ret == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    f = ogr.Feature(lyr.GetLayerDefn())
    gdal.PushErrorHandler()
    ret = lyr.CreateFeature(f)
    gdal.PopErrorHandler()
    if ret == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.PushErrorHandler()
    ret = lyr.SetFeature(f)
    gdal.PopErrorHandler()
    if ret == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.PushErrorHandler()
    ret = lyr.DeleteFeature(1)
    gdal.PopErrorHandler()
    if ret == 0:
        gdaltest.post_reason('fail')
        return 'fail'

    return 'success'
Ejemplo n.º 12
0
def _find_raster_pour_points(flow_dir_raster_path_band):
    """
    Memory-safe pour point calculation from a flow direction raster.

    Args:
        flow_dir_raster_path_band (tuple): tuple of (raster path, band index)
            indicating the flow direction raster to use.

    Returns:
        set of (x, y) coordinate tuples of pour points, in the same coordinate
        system as the input raster.
    """
    flow_dir_raster_path, band_index = flow_dir_raster_path_band
    raster_info = pygeoprocessing.get_raster_info(flow_dir_raster_path)
    # Open the flow direction raster band
    raster = gdal.OpenEx(flow_dir_raster_path, gdal.OF_RASTER)
    band = raster.GetRasterBand(band_index)
    width, height = raster_info['raster_size']

    pour_points = set()
    # Read in flow direction data and find pour points one block at a time
    for offsets in pygeoprocessing.iterblocks(
            (flow_dir_raster_path, band_index), offset_only=True):
        # Expand each block by a one-pixel-wide margin, if possible.
        # This way the blocks will overlap so the watershed
        # calculation will be continuous.
        if offsets['xoff'] > 0:
            offsets['xoff'] -= 1
            offsets['win_xsize'] += 1
        if offsets['yoff'] > 0:
            offsets['yoff'] -= 1
            offsets['win_ysize'] += 1
        if offsets['xoff'] + offsets['win_xsize'] < width:
            offsets['win_xsize'] += 1
        if offsets['yoff'] + offsets['win_ysize'] < height:
            offsets['win_ysize'] += 1

        # Keep track of which block edges are raster edges
        edges = numpy.empty(4, dtype=numpy.intc)
        # edges order: top, left, bottom, right
        edges[0] = (offsets['yoff'] == 0)
        edges[1] = (offsets['xoff'] == 0)
        edges[2] = (offsets['yoff'] + offsets['win_ysize'] == height)
        edges[3] = (offsets['xoff'] + offsets['win_xsize'] == width)

        flow_dir_block = band.ReadAsArray(**offsets)
        pour_points = pour_points.union(
            delineateit_core.calculate_pour_point_array(
                # numpy.intc is equivalent to an int in C (normally int32 or
                # int64). This way it can be passed directly into a memoryview
                # (int[:, :]) in the Cython function.
                flow_dir_block.astype(numpy.intc),
                edges,
                nodata=raster_info['nodata'][band_index - 1],
                offset=(offsets['xoff'], offsets['yoff']),
                origin=(raster_info['geotransform'][0],
                        raster_info['geotransform'][3]),
                pixel_size=raster_info['pixel_size']))

    raster = None
    band = None

    return pour_points
Ejemplo n.º 13
0
def snap_points_to_nearest_stream(points_vector_path, stream_raster_path,
                                  flow_accum_raster_path, snap_distance,
                                  snapped_points_vector_path):
    """Adjust the location of points to the nearest stream pixel.

    The new point layer will have all fields and field values copied over from
    the source vector.  Any points that are outside of the stream raster will
    not be included in the output vector.

    Args:
        points_vector_path (string): A path to a vector on disk containing
            point geometries.  Must be in the same projection as the stream
            raster.
        stream_raster_path (string): A path to a stream raster, where
            pixel values are ``1`` (indicating a stream pixel) or ``0``
            (indicating a non-stream pixel).
        flow_accum_raster_path (string): A path to a flow accumulation raster
            that is aligned with the stream raster. Used to break ties between
            equally-near stream pixels.
        snap_distance (number): The maximum distance (in pixels) to search
            for stream pixels for each point.  This must be a positive, nonzero
            value.
        snapped_points_vector_path (string): A path to where the output
            points will be written.

    Returns:
        ``None``

    Raises:
        ``ValueError`` when snap_distance is less than or equal to 0.

    """
    if snap_distance <= 0:
        raise ValueError('Snap_distance must be >= 0, not %s' % snap_distance)

    points_vector = gdal.OpenEx(points_vector_path, gdal.OF_VECTOR)
    points_layer = points_vector.GetLayer()

    stream_raster_info = pygeoprocessing.get_raster_info(stream_raster_path)
    geotransform = stream_raster_info['geotransform']
    n_cols, n_rows = stream_raster_info['raster_size']
    stream_raster = gdal.OpenEx(stream_raster_path, gdal.OF_RASTER)
    stream_band = stream_raster.GetRasterBand(1)

    flow_accum_raster = gdal.OpenEx(flow_accum_raster_path, gdal.OF_RASTER)
    flow_accum_band = flow_accum_raster.GetRasterBand(1)

    driver = gdal.GetDriverByName('GPKG')
    snapped_vector = driver.Create(snapped_points_vector_path, 0, 0, 0,
                                   gdal.GDT_Unknown)
    layer_name = os.path.splitext(
        os.path.basename(snapped_points_vector_path))[0]
    snapped_layer = snapped_vector.CreateLayer(
        layer_name, points_layer.GetSpatialRef(), points_layer.GetGeomType())
    snapped_layer.CreateFields(points_layer.schema)
    snapped_layer_defn = snapped_layer.GetLayerDefn()

    snapped_layer.StartTransaction()
    n_features = points_layer.GetFeatureCount()
    last_time = time.time()
    for index, point_feature in enumerate(points_layer, 1):
        if time.time() - last_time > 5.0:
            LOGGER.info('Snapped %s of %s points', index, n_features)
            last_time = time.time()

        source_geometry = point_feature.GetGeometryRef()
        geom_name = source_geometry.GetGeometryName()
        geom_count = source_geometry.GetGeometryCount()

        if source_geometry.IsEmpty():
            LOGGER.warning(
                f"FID {point_feature.GetFID()} is missing a defined geometry. "
                "Skipping.")
            continue

        # If the geometry is not a primitive point, just create the new feature
        # as it is now in the new vector. MULTIPOINT geometries with a single
        # component point count as primitive points.
        # OGR's wkbMultiPoint, wkbMultiPointM, wkbMultiPointZM and
        # wkbMultiPoint25D all use the MULTIPOINT geometry name.
        if ((geom_name not in ('POINT', 'MULTIPOINT')) or
                (geom_name == 'MULTIPOINT' and geom_count > 1)):
            LOGGER.warning(
                f"FID {point_feature.GetFID()} ({geom_name}, n={geom_count}) "
                "Geometry cannot be snapped.")
            new_feature = ogr.Feature(snapped_layer.GetLayerDefn())
            new_feature.SetGeometry(source_geometry)
            for field_name, field_value in point_feature.items().items():
                new_feature.SetField(field_name, field_value)
            snapped_layer.CreateFeature(new_feature)
            continue

        point = shapely.wkb.loads(bytes(source_geometry.ExportToWkb()))
        if geom_name == 'MULTIPOINT':
            # We already checked (above) that there's only one component point
            point = point.geoms[0]

        x_index = (point.x - geotransform[0]) // geotransform[1]
        y_index = (point.y - geotransform[3]) // geotransform[5]
        if (x_index < 0 or x_index > n_cols or
                y_index < 0 or y_index > n_rows):
            LOGGER.warning(
                'Encountered a point that was outside the bounds of the '
                f'stream raster.  FID:{point_feature.GetFID()} at {point}')
            continue

        x_left = max(x_index - snap_distance, 0)
        y_top = max(y_index - snap_distance, 0)
        x_right = min(x_index + snap_distance, n_cols)
        y_bottom = min(y_index + snap_distance, n_rows)

        # snap to the nearest stream pixel out to the snap distance
        stream_window = stream_band.ReadAsArray(
            int(x_left), int(y_top), int(x_right - x_left),
            int(y_bottom - y_top))
        row_indexes, col_indexes = numpy.nonzero(
            stream_window == 1)

        # Find the closest stream pixel that meets the distance
        # requirement. If there is a tie, snap to the stream pixel with
        # a higher flow accumulation value.
        if row_indexes.size > 0:  # there are streams within the snap distance
            # Calculate euclidean distance from the point to each stream pixel
            distance_array = numpy.hypot(
                # distance along y axis from the point to each stream pixel
                y_index - y_top - row_indexes,
                # distance along x axis from the point to each stream pixel
                x_index - x_left - col_indexes,
                dtype=numpy.float32)

            is_nearest = distance_array == distance_array.min()
            # if > 1 stream pixel is nearest, break tie with flow accumulation
            if is_nearest.sum() > 1:
                flow_accum_array = flow_accum_band.ReadAsArray(
                    int(x_left), int(y_top), int(x_right - x_left),
                    int(y_bottom - y_top))
                # weight by flow accum
                is_nearest = is_nearest * flow_accum_array[row_indexes, col_indexes]

            # 1d index of max value in flattened array
            nearest_stream_index_1d = numpy.argmax(is_nearest)

            # convert 1d index back to coordinates relative to window
            nearest_stream_row = row_indexes[nearest_stream_index_1d]
            nearest_stream_col = col_indexes[nearest_stream_index_1d]

            offset_row = nearest_stream_row - (y_index - y_top)
            offset_col = nearest_stream_col - (x_index - x_left)

            y_index += offset_row
            x_index += offset_col

        point_geometry = ogr.Geometry(ogr.wkbPoint)
        point_geometry.AddPoint(
            geotransform[0] + (x_index + 0.5) * geotransform[1],
            geotransform[3] + (y_index + 0.5) * geotransform[5])

        # Get the output Layer's Feature Definition
        snapped_point_feature = ogr.Feature(snapped_layer_defn)
        for field_name, field_value in point_feature.items().items():
            snapped_point_feature.SetField(field_name, field_value)
        snapped_point_feature.SetGeometry(point_geometry)

        snapped_layer.CreateFeature(snapped_point_feature)
    snapped_layer.CommitTransaction()
    snapped_layer = None
    snapped_vector = None

    points_layer = None
    points_vector = None
Ejemplo n.º 14
0
def check_geometries(outlet_vector_path, dem_path, target_vector_path,
                     skip_invalid_geometry=False):
    """Perform reasonable checks and repairs on the incoming vector.

    This function will iterate through the vector at ``outlet_vector_path``
    and validate geometries, putting the geometries into a new geopackage
    at ``target_vector_path``.

    The vector at ``target_vector_path`` will include features that:

        * Have valid geometries
        * Are simplified to 1/2 the DEM pixel size
        * Intersect the bounding box of the DEM

    Any geometries that are empty or do not intersect the DEM will not be
    included in ``target_vector_path``.

    Args:
        outlet_vector_path (string): The path to an outflow vector.  The first
            layer of the vector only will be inspected.
        dem_path (string): The path to a DEM on disk.
        target_vector_path (string): The target path to where the output
            geopackage should be written.
        skip_invalid_geometry (bool): Whether to raise an exception
            when invalid geometry is found.  If ``False``, an exception
            will be raised when the first invalid geometry is found.
            If ``True``, the invalid geometry will be not be included
            in the output vector but any other valid geometries will.

    Returns:
        ``None``

    """
    if os.path.exists(target_vector_path):
        LOGGER.debug('Target vector path %s exists on disk; removing.',
                     target_vector_path)
        os.remove(target_vector_path)

    dem_info = pygeoprocessing.get_raster_info(dem_path)
    dem_bbox = shapely.prepared.prep(
        shapely.geometry.box(*dem_info['bounding_box']))
    nyquist_limit = numpy.mean(numpy.abs(dem_info['pixel_size'])) / 2.

    dem_srs = osr.SpatialReference()
    dem_srs.ImportFromWkt(dem_info['projection_wkt'])

    gpkg_driver = gdal.GetDriverByName('GPKG')
    target_vector = gpkg_driver.Create(target_vector_path, 0, 0, 0,
                                       gdal.GDT_Unknown)
    layer_name = os.path.splitext(os.path.basename(target_vector_path))[0]
    target_layer = target_vector.CreateLayer(
        layer_name, dem_srs, ogr.wkbUnknown)  # Use source layer type?

    outflow_vector = gdal.OpenEx(outlet_vector_path, gdal.OF_VECTOR)
    outflow_layer = outflow_vector.GetLayer()
    LOGGER.info('Checking %s geometries from source vector',
                outflow_layer.GetFeatureCount())
    target_layer.CreateFields(outflow_layer.schema)

    target_layer.StartTransaction()
    for feature in outflow_layer:
        original_geometry = feature.GetGeometryRef()

        try:
            shapely_geom = shapely.wkb.loads(
                bytes(original_geometry.ExportToWkb()))

            # The classic bowtie polygons will load but require a separate
            # check for validity.
            if not shapely_geom.is_valid:
                raise ValueError('Shapely geom is invalid.')
        except (shapely.errors.ReadingError, ValueError):
            # Parent class for shapely GEOS errors
            # Raised when the geometry is invalid.
            if not skip_invalid_geometry:
                outflow_layer = None
                outflow_vector = None
                target_layer = None
                target_vector = None
                raise ValueError(
                    "The geometry at feature %s is invalid.  Check the logs "
                    "for details and try re-running with repaired geometry."
                    % feature.GetFID())
            else:
                LOGGER.warning(
                    "The geometry at feature %s is invalid and will not be "
                    "included in the set of features to be delineated.",
                    feature.GetFID())
                continue

        if shapely_geom.is_empty:
            LOGGER.warning(
                'Feature %s has no geometry. Skipping', feature.GetFID())
            continue

        shapely_bbox = shapely.geometry.box(*shapely_geom.bounds)
        if not dem_bbox.intersects(shapely_bbox):
            LOGGER.warning('Feature %s does not intersect the DEM. Skipping.',
                           feature.GetFID())
            continue

        simplified_geometry = shapely_geom.simplify(nyquist_limit)

        new_feature = ogr.Feature(target_layer.GetLayerDefn())
        new_feature.SetGeometry(ogr.CreateGeometryFromWkb(
            simplified_geometry.wkb))
        for field_name, field_value in feature.items().items():
            new_feature.SetField(field_name, field_value)
        target_layer.CreateFeature(new_feature)

    target_layer.CommitTransaction()

    LOGGER.info('%s features copied to %s from the original %s features',
                target_layer.GetFeatureCount(),
                os.path.basename(target_vector_path),
                outflow_layer.GetFeatureCount())
    outflow_layer = None
    outflow_vector = None
    target_layer = None
    target_vector = None
Ejemplo n.º 15
0
def ogr_s57_9():

    try:
        os.unlink('tmp/ogr_s57_9.000')
    except OSError:
        pass

    gdal.SetConfigOption(
        'OGR_S57_OPTIONS',
        'RETURN_PRIMITIVES=ON,RETURN_LINKAGES=ON,LNAM_REFS=ON')
    ds = ogr.GetDriverByName('S57').CreateDataSource('tmp/ogr_s57_9.000')
    src_ds = ogr.Open('data/1B5X02NE.000')
    gdal.SetConfigOption('OGR_S57_OPTIONS', None)
    for src_lyr in src_ds:
        if src_lyr.GetName() == 'DSID':
            continue
        lyr = ds.GetLayerByName(src_lyr.GetName())
        for src_feat in src_lyr:
            feat = ogr.Feature(lyr.GetLayerDefn())
            feat.SetFrom(src_feat)
            lyr.CreateFeature(feat)
    src_ds = None
    ds = None

    ds = ogr.Open('tmp/ogr_s57_9.000')
    if ds is None:
        return 'fail'

    gdaltest.s57_ds = ds
    if ogr_s57_2() != 'success':
        return 'fail'
    if ogr_s57_3() != 'success':
        return 'fail'
    if ogr_s57_4() != 'success':
        return 'fail'
    if ogr_s57_5() != 'success':
        return 'fail'

    gdaltest.s57_ds = None

    try:
        os.unlink('tmp/ogr_s57_9.000')
    except OSError:
        pass

    gdal.SetConfigOption(
        'OGR_S57_OPTIONS',
        'RETURN_PRIMITIVES=ON,RETURN_LINKAGES=ON,LNAM_REFS=ON')
    gdal.VectorTranslate(
        'tmp/ogr_s57_9.000',
        'data/1B5X02NE.000',
        options="-f S57 IsolatedNode ConnectedNode Edge Face M_QUAL")
    gdal.SetConfigOption('OGR_S57_OPTIONS', None)

    ds = gdal.OpenEx('tmp/ogr_s57_9.000',
                     open_options=['RETURN_PRIMITIVES=ON'])
    if ds is None:
        gdaltest.post_reason('fail')
        return 'fail'

    if ds.GetLayerByName('IsolatedNode') is None:
        gdaltest.post_reason('fail')
        return 'fail'

    gdaltest.s57_ds = ds
    if ogr_s57_4() != 'success':
        return 'fail'

    gdaltest.s57_ds = None

    try:
        os.unlink('tmp/ogr_s57_9.000')
    except OSError:
        pass

    return 'success'
Ejemplo n.º 16
0
def _validate_raster_input(base_raster_path_band_const_list, raster_info_list,
                           target_raster_path):
    """Check for valid raster/arg inputs and output.

    Args:
        base_raster_path_band_const_list (list/tuple): the same object passed
            to .raster_calculator indicating the datastack to process.
        target_raster_path (str): desired target raster path from
            raster_calculator, used to ensure it is not also an input parameter

    Returns:
        None

    Raises:
        ValueError if any input parameter would cause an error when passing to
            .raster_calculator
    """
    if not base_raster_path_band_const_list:
        raise ValueError("`base_raster_path_band_const_list` is empty and "
                         "should have at least one value.")

    # It's a common error to not pass in path/band tuples, so check for that
    # and report error if so
    bad_raster_path_list = False
    if not isinstance(base_raster_path_band_const_list, (list, tuple)):
        bad_raster_path_list = True
    else:
        for value in base_raster_path_band_const_list:
            if (not _is_raster_path_band_formatted(value)
                    and not isinstance(value, numpy.ndarray)
                    and not (isinstance(value, tuple) and len(value) == 2
                             and value[1] == 'raw')):
                bad_raster_path_list = True
                break
    if bad_raster_path_list:
        raise ValueError("Expected a sequence of path / integer band tuples, "
                         "ndarrays, or (value, 'raw') pairs for "
                         "`base_raster_path_band_const_list`, instead got: "
                         "%s" %
                         pprint.pformat(base_raster_path_band_const_list))

    # check that any rasters exist on disk and have enough bands
    not_found_paths = []
    gdal.PushErrorHandler('CPLQuietErrorHandler')
    base_raster_path_band_list = [
        path_band for path_band in base_raster_path_band_const_list
        if _is_raster_path_band_formatted(path_band)
    ]
    for value in base_raster_path_band_list:
        if gdal.OpenEx(value[0], gdal.OF_RASTER) is None:
            not_found_paths.append(value[0])
    gdal.PopErrorHandler()
    if not_found_paths:
        raise ValueError(
            "The following files were expected but do not exist on the "
            "filesystem: " + str(not_found_paths))

    # check that band index exists in raster
    invalid_band_index_list = []
    for value in base_raster_path_band_list:
        raster = gdal.OpenEx(value[0], gdal.OF_RASTER)
        if not (1 <= value[1] <= raster.RasterCount):
            invalid_band_index_list.append(value)
        raster = None
    if invalid_band_index_list:
        raise ValueError("The following rasters do not contain requested band "
                         "indexes: %s" % invalid_band_index_list)

    # check that the target raster is not also an input raster
    if target_raster_path in [x[0] for x in base_raster_path_band_list]:
        raise ValueError(
            "%s is used as a target path, but it is also in the base input "
            "path list %s" %
            (target_raster_path, str(base_raster_path_band_const_list)))

    # check that raster inputs are all the same dimensions
    geospatial_info_set = set()
    for raster_info in raster_info_list:
        geospatial_info_set.add(raster_info['raster_size'])
    if len(geospatial_info_set) > 1:
        raise ValueError("Input Rasters are not the same dimensions. The "
                         "following raster are not identical %s" %
                         str(geospatial_info_set))
Ejemplo n.º 17
0
def test_pcidsk_15():
    if gdaltest.pcidsk_new == 0:
        pytest.skip()

    # One raster band and vector layer
    ds = gdal.GetDriverByName('PCIDSK').Create('/vsimem/pcidsk_15.pix', 1, 1)
    ds.CreateLayer('foo')
    ds = None

    ds = gdal.Open('/vsimem/pcidsk_15.pix')
    assert ds.RasterCount == 1
    assert ds.GetLayerCount() == 1

    ds2 = gdal.GetDriverByName('PCIDSK').CreateCopy('/vsimem/pcidsk_15_2.pix',
                                                    ds)
    ds2 = None
    ds = None

    ds = gdal.Open('/vsimem/pcidsk_15_2.pix')
    assert ds.RasterCount == 1
    assert ds.GetLayerCount() == 1
    ds = None

    # One vector layer only
    ds = gdal.GetDriverByName('PCIDSK').Create('/vsimem/pcidsk_15.pix', 0, 0,
                                               0)
    ds.CreateLayer('foo')
    ds = None

    ds = gdal.OpenEx('/vsimem/pcidsk_15.pix')
    assert ds.RasterCount == 0
    assert ds.GetLayerCount() == 1

    ds2 = gdal.GetDriverByName('PCIDSK').CreateCopy('/vsimem/pcidsk_15_2.pix',
                                                    ds)
    ds2 = None
    ds = None

    ds = gdal.OpenEx('/vsimem/pcidsk_15_2.pix')
    assert ds.RasterCount == 0
    assert ds.GetLayerCount() == 1
    ds = None

    # Zero raster band and vector layer
    ds = gdal.GetDriverByName('PCIDSK').Create('/vsimem/pcidsk_15.pix', 0, 0,
                                               0)
    ds = None

    ds = gdal.OpenEx('/vsimem/pcidsk_15.pix')
    assert ds.RasterCount == 0
    assert ds.GetLayerCount() == 0

    ds2 = gdal.GetDriverByName('PCIDSK').CreateCopy('/vsimem/pcidsk_15_2.pix',
                                                    ds)
    del ds2
    ds = None

    ds = gdal.OpenEx('/vsimem/pcidsk_15_2.pix')
    assert ds.RasterCount == 0
    assert ds.GetLayerCount() == 0
    ds = None

    gdal.GetDriverByName('PCIDSK').Delete('/vsimem/pcidsk_15.pix')
    gdal.GetDriverByName('PCIDSK').Delete('/vsimem/pcidsk_15_2.pix')
Ejemplo n.º 18
0
def raster_calculator(
        base_raster_path_band_const_list,
        local_op,
        target_raster_path,
        datatype_target,
        nodata_target,
        n_workers=max(1, multiprocessing.cpu_count()),
        calc_raster_stats=True,
        largest_block=_LARGEST_ITERBLOCK,
        raster_driver_creation_tuple=DEFAULT_GTIFF_CREATION_TUPLE_OPTIONS):
    """Apply local a raster operation on a stack of rasters.

    This function applies a user defined function across a stack of
    rasters' pixel stack. The rasters in ``base_raster_path_band_list`` must
    be spatially aligned and have the same cell sizes.

    Args:
        base_raster_path_band_const_list (sequence): a sequence containing
            either (str, int) tuples, ``numpy.ndarray`` s of up to two
            dimensions, or an (object, 'raw') tuple.  A ``(str, int)``
            tuple refers to a raster path band index pair to use as an input.
            The ``numpy.ndarray`` s must be broadcastable to each other AND the
            size of the raster inputs. Values passed by  ``(object, 'raw')``
            tuples pass ``object`` directly into the ``local_op``. All rasters
            must have the same raster size. If only arrays are input, numpy
            arrays must be broadcastable to each other and the final raster
            size will be the final broadcast array shape. A value error is
            raised if only "raw" inputs are passed.
        local_op (function) a function that must take in as many parameters as
            there are elements in ``base_raster_path_band_const_list``. The
            parameters in ``local_op`` will map 1-to-1 in order with the values
            in ``base_raster_path_band_const_list``. ``raster_calculator`` will
            call ``local_op`` to generate the pixel values in ``target_raster``
            along memory block aligned processing windows. Note any
            particular call to ``local_op`` will have the arguments from
            ``raster_path_band_const_list`` sliced to overlap that window.
            If an argument from ``raster_path_band_const_list`` is a
            raster/path band tuple, it will be passed to ``local_op`` as a 2D
            numpy array of pixel values that align with the processing window
            that ``local_op`` is targeting. A 2D or 1D array will be sliced to
            match the processing window and in the case of a 1D array tiled in
            whatever dimension is flat. If an argument is a scalar it is
            passed as as scalar.
            The return value must be a 2D array of the same size as any of the
            input parameter 2D arrays and contain the desired pixel values
            for the target raster.
        target_raster_path (string): the path of the output raster.  The
            projection, size, and cell size will be the same as the rasters
            in ``base_raster_path_const_band_list`` or the final broadcast
            size of the constant/ndarray values in the list.
        datatype_target (gdal datatype; int): the desired GDAL output type of
            the target raster.
        nodata_target (numerical value): the desired nodata value of the
            target raster.
        n_workers (int): number of Processes to launch for parallel processing,
            defaults to ``multiprocessing.cpu_count()``.
        calc_raster_stats (boolean): If True, calculates and sets raster
            statistics (min, max, mean, and stdev) for target raster.
        largest_block (int): Attempts to internally iterate over raster blocks
            with this many elements.  Useful in cases where the blocksize is
            relatively small, memory is available, and the function call
            overhead dominates the iteration.  Defaults to 2**20.  A value of
            anything less than the original blocksize of the raster will
            result in blocksizes equal to the original size.
        raster_driver_creation_tuple (tuple): a tuple containing a GDAL driver
            name string as the first element and a GDAL creation options
            tuple/list as the second. Defaults to
            geoprocessing.DEFAULT_GTIFF_CREATION_TUPLE_OPTIONS.

    Returns:
        None

    Raises:
        ValueError: invalid input provided

    """
    raster_info_list = [
        get_raster_info(path_band[0])
        for path_band in base_raster_path_band_const_list
        if _is_raster_path_band_formatted(path_band)
    ]

    _validate_raster_input(base_raster_path_band_const_list, raster_info_list,
                           target_raster_path)

    n_cols, n_rows = _calculate_target_raster_size(
        raster_info_list, base_raster_path_band_const_list)

    # create a "canonical" argument list that contains only
    # (file paths, band id) tuples, 2d numpy arrays, or raw values
    base_canonical_arg_list = []
    for value in base_raster_path_band_const_list:
        # the input has been tested and value is either a raster/path band
        # tuple, 1d ndarray, 2d ndarray, or (value, 'raw') tuple.
        if _is_raster_path_band_formatted(value):
            # it's a raster/path band, keep track of open raster and band
            # for later so we can __swig_destroy__ them.
            base_canonical_arg_list.append(RasterPathBand(value[0], value[1]))
        elif isinstance(value, numpy.ndarray):
            if value.ndim == 1:
                # easier to process as a 2d array for writing to band
                base_canonical_arg_list.append(
                    value.reshape((1, value.shape[0])))
            else:  # dimensions are two because we checked earlier.
                base_canonical_arg_list.append(value)
        else:
            # it's a regular tuple
            base_canonical_arg_list.append(value)

    if datatype_target not in _VALID_GDAL_TYPES:
        raise ValueError(
            'Invalid target type, should be a gdal.GDT_* type, received '
            '"%s"' % datatype_target)

    # create target raster
    raster_driver = gdal.GetDriverByName(raster_driver_creation_tuple[0])
    try:
        os.makedirs(os.path.dirname(target_raster_path))
    except OSError as exception:
        # it's fine if the directory already exists, otherwise there's a big
        # error!
        if exception.errno != errno.EEXIST:
            raise

    target_raster = raster_driver.Create(
        target_raster_path,
        n_cols,
        n_rows,
        1,
        datatype_target,
        options=raster_driver_creation_tuple[1])

    target_band = target_raster.GetRasterBand(1)
    if nodata_target is not None:
        target_band.SetNoDataValue(nodata_target)
    if raster_info_list:
        # use the first raster in the list for the projection and geotransform
        target_raster.SetProjection(raster_info_list[0]['projection_wkt'])
        target_raster.SetGeoTransform(raster_info_list[0]['geotransform'])
    target_band = None
    target_raster = None

    manager = multiprocessing.Manager()
    stats_worker_queue = None
    if calc_raster_stats:
        # if this queue is used to send computed valid blocks of
        # the raster to an incremental statistics calculator worker
        stats_worker_queue = manager.Queue()

    # iterate over each block and calculate local_op
    block_offset_list = list(
        iterblocks((target_raster_path, 1),
                   offset_only=True,
                   largest_block=largest_block))

    if calc_raster_stats:
        LOGGER.debug('start stats worker')
        stats_worker = multiprocessing.Process(
            target=geoprocessing_core.stats_worker,
            args=(stats_worker_queue, len(block_offset_list)))
        stats_worker.start()

    LOGGER.debug('start workers')
    processing_state = manager.dict()
    processing_state['blocks_complete'] = 0
    processing_state['total_blocks'] = len(block_offset_list)
    processing_state['last_time'] = time.time()
    block_size_bytes = (numpy.dtype(numpy.float64).itemsize *
                        block_offset_list[0]['win_xsize'] *
                        block_offset_list[0]['win_ysize'])
    target_write_lock = manager.Lock()
    block_offset_queue = multiprocessing.Queue(n_workers)
    process_list = []
    for _ in range(n_workers):
        shared_memory = None
        if calc_raster_stats:
            if sys.version_info >= (3, 8):
                shared_memory = multiprocessing.shared_memory.SharedMemory(
                    create=True, size=block_size_bytes)
        worker = multiprocessing.Process(
            target=_raster_calculator_worker,
            args=(block_offset_queue, base_canonical_arg_list, local_op,
                  stats_worker_queue, nodata_target, target_raster_path,
                  target_write_lock, processing_state, shared_memory))
        worker.start()
        process_list.append((worker, shared_memory))

    # Fill the work queue
    for block_offset in block_offset_list:
        block_offset_queue.put(block_offset)

    for _ in range(n_workers):
        block_offset_queue.put(None)

    LOGGER.info('wait for stats worker to complete')
    stats_worker.join(_MAX_TIMEOUT)
    if stats_worker.is_alive():
        LOGGER.error(f'stats worker {stats_worker.pid} '
                     'didn\'t terminate, sending kill signal.')
        try:
            os.kill(stats_worker.pid, signal.SIGTERM)
        except Exception:
            LOGGER.exception(f'unable to kill {stats_worker.pid}')

    # wait for the workers to join
    LOGGER.info('all work sent, waiting for workers to finish')
    for worker, shared_memory in process_list:
        worker.join(_MAX_TIMEOUT)
        if worker.is_alive():
            LOGGER.error(
                f'worker {worker.pid} didn\'t terminate, sending kill signal.')
            try:
                os.kill(stats_worker.pid, signal.SIGTERM)
            except Exception:
                LOGGER.exception(f'unable to kill {worker.pid}')
        if shared_memory is not None:
            LOGGER.debug(f'unlink {shared_memory.name}')
            shared_memory.unlink()

    if calc_raster_stats:
        payload = stats_worker_queue.get(True, _MAX_TIMEOUT)
        if payload is not None:
            target_min, target_max, target_mean, target_stddev = payload
            target_raster = gdal.OpenEx(target_raster_path,
                                        gdal.OF_RASTER | gdal.GA_Update)
            target_band = target_raster.GetRasterBand(1)
            target_band.SetStatistics(float(target_min), float(target_max),
                                      float(target_mean), float(target_stddev))
            target_band = None
            target_raster = None
    LOGGER.info('raster_calculator 100.0%% complete')
Ejemplo n.º 19
0
Archivo: ogr_csw.py Proyecto: ahhz/gdal
def ogr_csw_vsimem_csw_output_schema_csw():

    if gdaltest.csw_drv is None:
        return 'skip'

    ds = gdal.OpenEx('CSW:/vsimem/csw_endpoint',
                     open_options=['OUTPUT_SCHEMA=CSW'])
    lyr = ds.GetLayer(0)

    gdal.FileFromMemBuffer(
        """/vsimem/csw_endpoint&POSTFIELDS=<?xml version="1.0" encoding="UTF-8"?><csw:GetRecords resultType="results" service="CSW" version="2.0.2" outputSchema="http://www.opengis.net/cat/csw/2.0.2" startPosition="1" maxRecords="500" xmlns:csw="http://www.opengis.net/cat/csw/2.0.2" xmlns:gml="http://www.opengis.net/gml" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dct="http://purl.org/dc/terms/" xmlns:ogc="http://www.opengis.net/ogc" xmlns:ows="http://www.opengis.net/ows" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/cat/csw/2.0.2 http://schemas.opengis.net/csw/2.0.2/CSW-discovery.xsd"><csw:Query typeNames="csw:Record"><csw:ElementSetName>full</csw:ElementSetName></csw:Query></csw:GetRecords>""",
        """<invalid_xml
""")
    lyr.ResetReading()
    gdal.PushErrorHandler()
    f = lyr.GetNextFeature()
    gdal.PopErrorHandler()
    if f is not None:
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.FileFromMemBuffer(
        """/vsimem/csw_endpoint&POSTFIELDS=<?xml version="1.0" encoding="UTF-8"?><csw:GetRecords resultType="results" service="CSW" version="2.0.2" outputSchema="http://www.opengis.net/cat/csw/2.0.2" startPosition="1" maxRecords="500" xmlns:csw="http://www.opengis.net/cat/csw/2.0.2" xmlns:gml="http://www.opengis.net/gml" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dct="http://purl.org/dc/terms/" xmlns:ogc="http://www.opengis.net/ogc" xmlns:ows="http://www.opengis.net/ows" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/cat/csw/2.0.2 http://schemas.opengis.net/csw/2.0.2/CSW-discovery.xsd"><csw:Query typeNames="csw:Record"><csw:ElementSetName>full</csw:ElementSetName></csw:Query></csw:GetRecords>""",
        """<csw:GetRecordsResponse/>""")
    lyr.ResetReading()
    gdal.PushErrorHandler()
    f = lyr.GetNextFeature()
    gdal.PopErrorHandler()
    if f is not None:
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.FileFromMemBuffer(
        """/vsimem/csw_endpoint&POSTFIELDS=<?xml version="1.0" encoding="UTF-8"?><csw:GetRecords resultType="results" service="CSW" version="2.0.2" outputSchema="http://www.opengis.net/cat/csw/2.0.2" startPosition="1" maxRecords="500" xmlns:csw="http://www.opengis.net/cat/csw/2.0.2" xmlns:gml="http://www.opengis.net/gml" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dct="http://purl.org/dc/terms/" xmlns:ogc="http://www.opengis.net/ogc" xmlns:ows="http://www.opengis.net/ows" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.opengis.net/cat/csw/2.0.2 http://schemas.opengis.net/csw/2.0.2/CSW-discovery.xsd"><csw:Query typeNames="csw:Record"><csw:ElementSetName>full</csw:ElementSetName></csw:Query></csw:GetRecords>""",
        """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<csw:GetRecordsResponse
    xmlns:dc="http://purl.org/dc/elements/1.1/"
    xmlns:dct="http://purl.org/dc/terms/"
    xmlns:ows="http://www.opengis.net/ows"
    xmlns:csw="http://www.opengis.net/cat/csw/2.0.2"
    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    version="2.0.2"
    xsi:schemaLocation="http://www.opengis.net/cat/csw/2.0.2 http://schemas.opengis.net/csw/2.0.2/CSW-discovery.xsd">
  <csw:SearchStatus timestamp="2015-04-27T00:46:35Z"/>
  <csw:SearchResults nextRecord="0" numberOfRecordsMatched="1" numberOfRecordsReturned="1" recordSchema="http://www.isotc211.org/2005/gmd" elementSet="full">
    <csw:Record>
    <!-- lots of missing stuff -->
        <ows:BoundingBox crs="urn:x-ogc:def:crs:EPSG:6.11:4326" dimensions="2">
            <ows:LowerCorner>-90 -180</ows:LowerCorner>
            <ows:UpperCorner>90 180</ows:UpperCorner>
        </ows:BoundingBox>
    <!-- lots of missing stuff -->
    </csw:Record>
  </csw:SearchResults>
</csw:GetRecordsResponse>
""")
    lyr.ResetReading()
    f = lyr.GetNextFeature()
    if f['raw_xml'].find('<csw:Record') != 0 or \
       f['boundingbox'].ExportToWkt() != 'POLYGON ((-180 -90,-180 90,180 90,180 -90,-180 -90))':
        gdaltest.post_reason('fail')
        f.DumpReadable()
        return 'fail'

    return 'success'
Ejemplo n.º 20
0
def _raster_calculator_worker(block_offset_queue, base_canonical_arg_list,
                              local_op, stats_worker_queue, nodata_target,
                              target_raster_path, write_lock, processing_state,
                              result_array_shared_memory):
    """Process a single block of an array for raster_calculation.

    Args:
        block_offset (dict): contains 'xoff', 'yoff', 'xwin_size', 'ywin_size'
            and can be used to pass directly to Band.ReadAsArray.
        base_canonical_arg_list (list): list of RasterPathBand, numpy arrays,
            or 'raw' objects to pass to the ``local_op``.
        local_op (function): callable that a function that must take in as
            many parameters as there are elements in
            ``base_canonical_arg_list``. Full description can be found in the
            public facing ``raster_calculator`` operation.
        stats_worker_queue (queue): pass a shared memory object ``local_op``
            result to queue if stats are being calculated. None otherwise.
        nodata_target (numeric or None): desired target raster nodata
        target_raster_path (str): path to target raster.
        write_lock (multiprocessing.Lock): Lock object used to coordinate
            writes to raster_path.
        processing_state (multiprocessing.Manager.dict): a global object to
            pass to ``__block_success_handler`` for this execution context.
        result_array_shared_memory (multiprocessing.shared_memory): If
            Python version >= 3.8, this is a shared
            memory object used to pass data to the stats worker process if
            required. Should be pre-allocated with enough data to hold the
            largest result from ``local_op`` given any ``block_offset`` from
            ``block_offset_queue``. None otherwise.

    Returns:
        None.

    """
    # read input blocks
    while True:
        block_offset = block_offset_queue.get()
        if block_offset is None:
            # indicates this worker should terminate
            return

        offset_list = (block_offset['yoff'], block_offset['xoff'])
        blocksize = (block_offset['win_ysize'], block_offset['win_xsize'])
        data_blocks = []
        for value in base_canonical_arg_list:
            if isinstance(value, RasterPathBand):
                raster = gdal.OpenEx(value.path, gdal.OF_RASTER)
                band = raster.GetRasterBand(value.band_id)
                data_blocks.append(band.ReadAsArray(**block_offset))
                # I've encountered the following error when a gdal raster
                # is corrupt, often from multiple threads writing to the
                # same file. This helps to catch the error early rather
                # than lead to confusing values of ``data_blocks`` later.
                if not isinstance(data_blocks[-1], numpy.ndarray):
                    raise ValueError(
                        f"got a {data_blocks[-1]} when trying to read "
                        f"{band.GetDataset().GetFileList()} at "
                        f"{block_offset}, expected numpy.ndarray.")
                raster = None
                band = None
            elif isinstance(value, numpy.ndarray):
                # must be numpy array and all have been conditioned to be
                # 2d, so start with 0:1 slices and expand if possible
                slice_list = [slice(0, 1)] * 2
                tile_dims = list(blocksize)
                for dim_index in [0, 1]:
                    if value.shape[dim_index] > 1:
                        slice_list[dim_index] = slice(
                            offset_list[dim_index],
                            offset_list[dim_index] + blocksize[dim_index],
                        )
                        tile_dims[dim_index] = 1
                data_blocks.append(
                    numpy.tile(value[tuple(slice_list)], tile_dims))
            else:
                # must be a raw tuple
                data_blocks.append(value[0])

        target_block = local_op(*data_blocks)

        if (not isinstance(target_block, numpy.ndarray)
                or target_block.shape != blocksize):
            raise ValueError(
                "Expected `local_op` to return a numpy.ndarray of "
                "shape %s but got this instead: %s" %
                (blocksize, target_block))

        with write_lock:
            target_raster = gdal.OpenEx(target_raster_path,
                                        gdal.OF_RASTER | gdal.GA_Update)
            target_band = target_raster.GetRasterBand(1)
            target_band.WriteArray(target_block,
                                   yoff=block_offset['yoff'],
                                   xoff=block_offset['xoff'])
            _block_success_handler(processing_state)
            target_band = None
            target_raster = None

        # send result to stats calculator
        if not stats_worker_queue:
            continue

        # Construct shared memory object to pass to stats worker
        if nodata_target is not None:
            target_block = target_block[target_block != nodata_target]
        target_block = target_block.astype(numpy.float64).flatten()

        if result_array_shared_memory:
            shared_memory_array = numpy.ndarray(
                target_block.shape,
                dtype=target_block.dtype,
                buffer=result_array_shared_memory.buf)
            shared_memory_array[:] = target_block[:]

            stats_worker_queue.put(
                (shared_memory_array.shape, shared_memory_array.dtype,
                 result_array_shared_memory))
        else:
            stats_worker_queue.put(target_block)
Ejemplo n.º 21
0
def flowlinesInWBD(hucid=None,
                   keyFieldName=None,
                   shpFile=None,
                   lyrName=None,
                   wbdShpFile=None,
                   wbdLayer=None,
                   of=None):

    # open boundary shp file
    bds = gdal.OpenEx(wbdShpFile, gdal.OF_VECTOR | gdal.OF_READONLY)
    if bds is None:
        print("flowlinesInWBD(): ERROR Open failed: " + str(wbdShpFile))
        sys.exit(1)
    blyr = bds.GetLayerByName(wbdLyrName)
    if blyr is None:
        print("flowlinesInWBD(): ERROR fetch layer: " + str(wbdLyrName))
        sys.exit(1)
    blyr.ResetReading()
    bnd = None
    bndbbox = None
    bbb = None
    for f in blyr:
        bnd = f.GetGeometryRef().Clone()
        bbb = bnd.GetEnvelope()  # minX, maxX, minY, maxY
        bndbbox = ogr.CreateGeometryFromWkt("POLYGON ((%f %f, %f %f, %f %f, %f %f, %f %f))" % ( \
                  bbb[0], bbb[2], \
                  bbb[0], bbb[3], \
                  bbb[1], bbb[3], \
                  bbb[1], bbb[0], \
                  bbb[0], bbb[2]))
        #print("fetched boundary geometry:")
        #print(bnd.ExportToWkt())
        break  # assume the first feature has the boundary polygon
    bds = None

    # open flowline shape/db file
    ds = gdal.OpenEx(shpFile, gdal.OF_VECTOR | gdal.OF_READONLY)
    if ds is None:
        print("flowlinesInWBD(): ERROR Open failed: " + str(shpFile))
        sys.exit(1)
    lyr = ds.GetLayerByName(lyrName)
    if lyr is None:
        print("flowlinesInWBD(): ERROR fetch layer: " + str(lyrName))
        sys.exit(1)
    lyr.ResetReading()
    num_records = lyr.GetFeatureCount()
    lyr_defn = lyr.GetLayerDefn()
    srs = lyr.GetSpatialRef()
    geomType = lyr.GetGeomType()
    print("flowline shp has " + str(num_records) + " lines")

    o = []  # hold filtered features

    count_codematch = 0
    count_geommatch = 0
    #count = 0
    fi_comid = lyr_defn.GetFieldIndex(keyFieldName)
    for f in lyr:
        key = f.GetFieldAsString(fi_comid)
        #print("key: " + key)
        if key.startswith(hucid):
            o.append(f)
            count_codematch += 1
        else:
            geom = f.GetGeometryRef()
            #print(geom.ExportToWkt())
            # bb in bbb?
            bb = geom.GetEnvelope()  # minX, maxX, minY, maxY
            fineCheck = False
            for x in range(0,
                           2):  # any point is in bnd bbox means further check
                for y in range(2, 4):
                    if bb[x] > bbb[0] and bb[x] < bbb[1] and bb[y] > bbb[
                            2] and bb[y] < bbb[3]:
                        fineCheck = True
                        break
                if fineCheck:
                    break
            if fineCheck:
                if bndbbox.Contains(geom) or geom.Intersects(bndbbox):
                    if bnd.Contains(geom) or geom.Intersects(bnd):
                        o.append(f)
                        count_geommatch += 1
            #geom.Destroy()
        #count += 1
        #if count % 100000 == 0:
        #    print("progress --> %d %d %d" % (count, count_codematch, count_geommatch))
    bndbbox.Destroy()
    print("selected " + str(len(o)) + " features, " + str(count_codematch) +
          " code matched, " + str(count_geommatch) + " geom matched")

    # output flowline shp file
    driverName = "ESRI Shapefile"
    drv = gdal.GetDriverByName(driverName)
    if drv is None:
        print("flowlinesInWBD(): ERROR %s driver not available.\n" %
              (driverName))
        sys.exit(1)
    ods = drv.Create(of, 0, 0, 0, gdal.GDT_Unknown)
    if ods is None:
        print("flowlinesInWBD(): ERROR Creation of output file failed: " + of)
        sys.exit(1)
    oLyrName, ext = os.path.splitext(os.path.basename(of))
    olyr = ods.CreateLayer(oLyrName, srs, geomType)
    if olyr is None:
        print("flowlinesInWBD(): ERROR Layer creation failed: " + oLyrName)
        sys.exit(1)
    # create fields
    for i in range(lyr_defn.GetFieldCount()):
        if (olyr.CreateField(lyr_defn.GetFieldDefn(i)) != 0):
            print("flowlinesInWBD(): ERROR Creating fields in output .")
            sys.exit(1)

    for f in o:
        if olyr.CreateFeature(f) != 0:
            print("flowlinesInWBD(): ERROR Creating fields in output .")
            sys.exit(1)

    ods = None
    ds = None
Ejemplo n.º 22
0
def gdal_edit(datasetname = None, srs = None, ulx = None, uly = None, lrx = None,
              lry = None, nodata = None, unsetnodata = False, xres = None, 
              yres = None, unsetgt = False, unsetstats = False, stats = False,
              approx_stats = False, unsetmd = False, ro = False, molist = [], 
              gcp_list = [], open_options = [], offset = None, scale = None):



    if datasetname is None:
        return Usage()

    if (srs is None and lry is None and yres is None and not unsetgt
            and not unsetstats and not stats and nodata is None
            and len(molist) == 0 and not unsetmd and len(gcp_list) == 0
            and not unsetnodata
            and scale is None and offset is None):
        print('No option specified')
        print('')
        return Usage()

    exclusive_option = 0
    if lry is not None:
        exclusive_option = exclusive_option + 1
    if yres is not None:
        exclusive_option = exclusive_option + 1
    if unsetgt:
        exclusive_option = exclusive_option + 1
    if exclusive_option > 1:
        print('-a_ullr, -tr and -unsetgt options are exclusive.')
        print('')
        return Usage()

    if unsetstats and stats:
        print('-unsetstats and either -stats or -approx_stats options are exclusive.')
        print('')
        return Usage()

    if unsetnodata and nodata:
        print('-unsetnodata and -nodata options are exclusive.')
        print('')
        return Usage()

    if open_options is not None:
        if ro:
            ds = gdal.OpenEx(datasetname, gdal.OF_RASTER, open_options = open_options)
        else:
            ds = gdal.OpenEx(datasetname, gdal.OF_RASTER | gdal.OF_UPDATE, open_options = open_options)
    # GDAL 1.X compat
    elif ro:
        ds = gdal.Open(datasetname)
    else:
        ds = gdal.Open(datasetname, gdal.GA_Update)
    if ds is None:
        return -1

    wkt = None
    if srs == '' or srs == 'None':
        ds.SetProjection('')
    elif srs is not None:
        sr = osr.SpatialReference()
        if sr.SetFromUserInput(srs) != 0:
            print('Failed to process SRS definition: %s' % srs)
            return -1
        wkt = sr.ExportToWkt()
        if len(gcp_list) == 0:
            ds.SetProjection(wkt)

    if lry is not None:
        gt = [ ulx, (lrx - ulx) / ds.RasterXSize, 0,
               uly, 0, (lry - uly) / ds.RasterYSize ]
        ds.SetGeoTransform(gt)

    if yres is not None:
        gt = ds.GetGeoTransform()
        # Doh ! why is gt a tuple and not an array...
        gt = [ gt[j] for j in range(6) ]
        gt[1] = xres
        gt[5] = yres
        ds.SetGeoTransform(gt)

    if unsetgt:
        ds.SetGeoTransform([0,1,0,0,0,1])

    if len(gcp_list) > 0:
        if wkt is None:
            wkt = ds.GetGCPProjection()
        if wkt is None:
            wkt = ''
        ds.SetGCPs(gcp_list, wkt)

    if nodata is not None:
        for i in range(ds.RasterCount):
            ds.GetRasterBand(i+1).SetNoDataValue(nodata)
    elif unsetnodata:
        for i in range(ds.RasterCount):
            ds.GetRasterBand(i+1).DeleteNoDataValue()

    if scale is not None:
        for i in range(ds.RasterCount):
            ds.GetRasterBand(i+1).SetScale(scale)

    if offset is not None:
       for i in range(ds.RasterCount):
           ds.GetRasterBand(i+1).SetOffset(offset)
 
    if unsetstats:
        for i in range(ds.RasterCount):
            band = ds.GetRasterBand(i+1)
            for key in band.GetMetadata().keys():
                if key.startswith('STATISTICS_'):
                    band.SetMetadataItem(key, None)

    if stats:
        for i in range(ds.RasterCount):
            ds.GetRasterBand(i+1).ComputeStatistics(approx_stats)

    if len(molist) != 0:
        if unsetmd:
            md = {}
        else:
            md = ds.GetMetadata()
        for moitem in molist:
            equal_pos = moitem.find('=')
            if equal_pos > 0:
                md[moitem[0:equal_pos]] = moitem[equal_pos+1:]
        ds.SetMetadata(md)
    elif unsetmd:
        ds.SetMetadata({})

    ds = band = None

    return 0
Ejemplo n.º 23
0
    def test_calculate_land_to_grid_distance(self):
        """WindEnergy: testing 'point_to_polygon_distance' function."""
        from natcap.invest import wind_energy

        # Setup parameters for creating polygon and point shapefiles
        fields = {'vec_id': ogr.OFTInteger}
        attr_pt = [{'vec_id': 1}, {'vec_id': 2}, {'vec_id': 3}, {'vec_id': 4}]
        attr_poly = [{'vec_id': 1}, {'vec_id': 2}]

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(3157)
        projection_wkt = srs.ExportToWkt()
        origin = (443723.127327877911739, 4956546.905980412848294)
        pos_x = origin[0]
        pos_y = origin[1]

        poly_geoms = {
            'poly_1': [(pos_x + 200, pos_y), (pos_x + 250, pos_y),
                       (pos_x + 250, pos_y - 100), (pos_x + 200, pos_y - 100),
                       (pos_x + 200, pos_y)],
            'poly_2': [(pos_x, pos_y - 150), (pos_x + 100, pos_y - 150),
                       (pos_x + 100, pos_y - 200), (pos_x, pos_y - 200),
                       (pos_x, pos_y - 150)]
        }

        poly_geometries = [
            Polygon(poly_geoms['poly_1']),
            Polygon(poly_geoms['poly_2'])
        ]
        poly_vector_path = os.path.join(self.workspace_dir, 'poly_shape.shp')
        # Create polygon shapefile to use as testing input
        pygeoprocessing.shapely_geometry_to_vector(
            poly_geometries,
            poly_vector_path,
            projection_wkt,
            'ESRI Shapefile',
            fields=fields,
            attribute_list=attr_poly,
            ogr_geom_type=ogr.wkbPolygon)

        point_geometries = [
            Point(pos_x, pos_y),
            Point(pos_x + 100, pos_y),
            Point(pos_x, pos_y - 100),
            Point(pos_x + 100, pos_y - 100)
        ]
        point_vector_path = os.path.join(self.workspace_dir, 'point_shape.shp')
        # Create point shapefile to use as testing input
        pygeoprocessing.shapely_geometry_to_vector(point_geometries,
                                                   point_vector_path,
                                                   projection_wkt,
                                                   'ESRI Shapefile',
                                                   fields=fields,
                                                   attribute_list=attr_pt,
                                                   ogr_geom_type=ogr.wkbPoint)

        target_point_vector_path = os.path.join(self.workspace_dir,
                                                'target_point.shp')
        # Call function to test
        field_name = 'L2G'
        wind_energy._calculate_land_to_grid_distance(point_vector_path,
                                                     poly_vector_path,
                                                     field_name,
                                                     target_point_vector_path)

        exp_results = [.15, .1, .05, .05]

        point_vector = gdal.OpenEx(target_point_vector_path)
        point_layer = point_vector.GetLayer()
        field_index = point_layer.GetFeature(0).GetFieldIndex(field_name)
        for i, point_feat in enumerate(point_layer):
            result_val = point_feat.GetField(field_index)
            numpy.testing.assert_allclose(result_val, exp_results[i])
Ejemplo n.º 24
0
Archivo: init.py Proyecto: craigds/sno
 def _ogr_open(cls, ogr_source, **open_kwargs):
     return gdal.OpenEx(
         ogr_source,
         gdal.OF_VECTOR | gdal.OF_VERBOSE_ERROR | gdal.OF_READONLY,
         **open_kwargs,
     )
Ejemplo n.º 25
0
    def test_point_snapping(self):
        """DelineateIt: test point snapping."""

        from natcap.invest import delineateit

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(32731)  # WGS84/UTM zone 31s
        wkt = srs.ExportToWkt()

        # need stream layer, points
        stream_matrix = numpy.array(
            [[0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0],
             [0, 1, 1, 1, 1, 1],
             [0, 1, 0, 0, 0, 0],
             [0, 1, 0, 0, 0, 0]], dtype=numpy.int8)
        stream_raster_path = os.path.join(self.workspace_dir, 'streams.tif')
        pygeoprocessing.testing.create_raster_on_disk(
            [stream_matrix],
            origin=(2, -2),
            pixel_size=(2, -2),
            projection_wkt=wkt,
            nodata=255,  # byte datatype
            filename=stream_raster_path)

        source_points_path = os.path.join(self.workspace_dir,
                                          'source_features.geojson')
        source_features = [
            Point(-1, -1),  # off the edge of the stream raster.
            Point(3, -5),
            Point(7, -9),
            Point(13, -5),
            box(-2, -2, -1, -1),  # Off the edge
        ]
        pygeoprocessing.testing.create_vector_on_disk(
            source_features, wkt,
            fields={'foo': 'int',
                    'bar': 'string'},
            attributes=[
                {'foo': 0, 'bar': 0.1},
                {'foo': 1, 'bar': 1.1},
                {'foo': 2, 'bar': 2.1},
                {'foo': 3, 'bar': 3.1},
                {'foo': 4, 'bar': 4.1}],
            filename=source_points_path)

        snapped_points_path = os.path.join(self.workspace_dir,
                                           'snapped_points.gpkg')

        snap_distance = -1
        with self.assertRaises(ValueError) as cm:
            delineateit.snap_points_to_nearest_stream(
                source_points_path, (stream_raster_path, 1),
                snap_distance, snapped_points_path)
        self.assertTrue('must be >= 0' in str(cm.exception))

        snap_distance = 10  # large enough to get multiple streams per point.
        delineateit.snap_points_to_nearest_stream(
            source_points_path, (stream_raster_path, 1),
            snap_distance, snapped_points_path)

        snapped_points_vector = gdal.OpenEx(snapped_points_path,
                                            gdal.OF_VECTOR)
        snapped_points_layer = snapped_points_vector.GetLayer()

        # snapped layer will include 3 valid points and one polygon.
        self.assertEqual(4, snapped_points_layer.GetFeatureCount())

        expected_geometries_and_fields = [
            (Point(5, -5), {'foo': 1, 'bar': '1.1'}),
            (Point(5, -9), {'foo': 2, 'bar': '2.1'}),
            (Point(13, -11), {'foo': 3, 'bar': '3.1'}),
        ]
        for feature, (expected_geom, expected_fields) in zip(
                snapped_points_layer, expected_geometries_and_fields):
            shapely_feature = shapely.wkb.loads(
                feature.GetGeometryRef().ExportToWkb())

            self.assertTrue(shapely_feature.equals(expected_geom))
            self.assertEqual(expected_fields, feature.items())
Ejemplo n.º 26
0
if outfile is None:
    Usage()

if schema and feature_count:
    sys.stderr.write('Ignoring -feature_count when used with -schema.\n')
    feature_count = 0

if schema and extent:
    sys.stderr.write('Ignoring -extent when used with -schema.\n')
    extent = 0

#############################################################################
# Open the datasource to read.

src_ds = gdal.OpenEx(infile, gdal.OF_VECTOR, open_options=openoptions)

if schema:
    infile = '@dummy@'

if not layer_list:
    for lyr_idx in range(src_ds.GetLayerCount()):
        layer_list.append(src_ds.GetLayer(lyr_idx).GetLayerDefn().GetName())

#############################################################################
# Start the VRT file.

vrt = '<OGRVRTDataSource>\n'

#############################################################################
# Metadata
Ejemplo n.º 27
0
def sample_data(time_domain_mask_list, predictor_lookup, sample_rate,
                edge_index, sample_point_vector_path):
    """Sample data stack.

    All input rasters are aligned.

    Args:
        response_path (str): path to response raster
        predictor_lookup (dict): dictionary with keys 'predictor' and
            'time_predictor'. 'time_predictor' are either a tuple of rasters
            or a single multiband raster with indexes that conform to the
            bands in ``response_path``.
        edge_index (int): this is the edge raster in the predictor stack
            that should be used to randomly select samples from.


    """
    raster_info = pygeoprocessing.get_raster_info(
        predictor_lookup['predictor'][0][0])
    inv_gt = gdal.InvGeoTransform(raster_info['geotransform'])

    raster_srs = osr.SpatialReference()
    raster_srs.ImportFromWkt(raster_info['projection_wkt'])
    raster_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)

    gpkg_driver = gdal.GetDriverByName('GPKG')
    sample_point_vector = gpkg_driver.Create(sample_point_vector_path, 0, 0, 0,
                                             gdal.GDT_Unknown)
    sample_point_layer = sample_point_vector.CreateLayer(
        'sample_points', raster_srs, ogr.wkbPoint)
    sample_point_layer.StartTransaction()

    LOGGER.info(f'building sample data')
    predictor_band_nodata_list = []
    raster_list = []
    # simple lookup to map predictor band/nodata to a list
    for predictor_path, nodata in predictor_lookup['predictor']:
        predictor_raster = gdal.OpenEx(predictor_path, gdal.OF_RASTER)
        raster_list.append(predictor_raster)
        predictor_band = predictor_raster.GetRasterBand(1)

        if nodata is None:
            nodata = predictor_band.GetNoDataValue()
        predictor_band_nodata_list.append((predictor_band, nodata))

    # create a dictionary that maps time index to list of predictor band/nodata
    # values, will be used to create a stack of data with the previous
    # collection and one per timestep on this one
    time_predictor_lookup = collections.defaultdict(list)
    for payload in predictor_lookup['time_predictor']:
        # time predictors could either be a tuple of rasters or a single
        # raster with multiple bands
        if isinstance(payload, tuple):
            time_predictor_path, nodata = payload
            time_predictor_raster = gdal.OpenEx(time_predictor_path,
                                                gdal.OF_RASTER)
            raster_list.append(time_predictor_raster)
            for index in range(time_predictor_raster.RasterCount):
                time_predictor_band = time_predictor_raster.GetRasterBand(
                    index + 1)
                if nodata is None:
                    nodata = time_predictor_band.GetNoDataValue()
                time_predictor_lookup[index].append(
                    (time_predictor_band, nodata))
        elif isinstance(payload, list):
            for index, (time_predictor_path, nodata) in enumerate(payload):
                time_predictor_raster = gdal.OpenEx(time_predictor_path,
                                                    gdal.OF_RASTER)
                raster_list.append(time_predictor_raster)
                time_predictor_band = time_predictor_raster.GetRasterBand(1)
                if nodata is None:
                    nodata = time_predictor_band.GetNoDataValue()
                time_predictor_lookup[index].append(
                    (time_predictor_band, nodata))
        else:
            raise ValueError(f'expected str or tuple but got {payload}')
    mask_band_list = []
    for time_domain_mask_raster_path in time_domain_mask_list:
        mask_raster = gdal.OpenEx(time_domain_mask_raster_path, gdal.OF_RASTER)
        raster_list.append(mask_raster)
        mask_band = mask_raster.GetRasterBand(1)
        mask_band_list.append(mask_band)

    # build up an array of predictor stack
    response_raster = gdal.OpenEx(predictor_lookup['response'], gdal.OF_RASTER)
    raster_list.append(response_raster)

    y_list = []
    x_vector = None
    i = 0
    last_time = time.time()
    total_pixels = predictor_raster.RasterXSize * predictor_raster.RasterYSize
    for offset_dict in pygeoprocessing.iterblocks(
        (predictor_lookup['response'], 1),
            offset_only=True,
            largest_block=2**20):
        if time.time() - last_time > 5.0:
            n_pixels_processed = offset_dict[
                'xoff'] + offset_dict['yoff'] * predictor_raster.RasterXSize
            LOGGER.info(
                f"processed {100*n_pixels_processed/total_pixels:.3f}% so far ({n_pixels_processed}) (x/y {offset_dict['xoff']}/{offset_dict['yoff']}) y_list size {len(y_list)}"
            )
            last_time = time.time()
        predictor_stack = []  # N elements long
        valid_array = numpy.ones(
            (offset_dict['win_ysize'], offset_dict['win_xsize']), dtype=bool)
        # load all the regular predictors
        for predictor_band, predictor_nodata in predictor_band_nodata_list:
            predictor_array = predictor_band.ReadAsArray(**offset_dict)
            if predictor_nodata is not None:
                valid_array &= predictor_array != predictor_nodata
            predictor_stack.append(predictor_array)

        if not numpy.any(valid_array):
            continue

        # load the time based predictors
        for index, time_predictor_band_nodata_list in \
                time_predictor_lookup.items():
            if index > MAX_TIME_INDEX:
                break
            mask_array = mask_band_list[index].ReadAsArray(**offset_dict)
            valid_time_array = valid_array & (mask_array == 1)
            predictor_time_stack = []
            predictor_time_stack.extend(predictor_stack)
            for predictor_index, (predictor_band, predictor_nodata) in \
                    enumerate(time_predictor_band_nodata_list):
                predictor_array = predictor_band.ReadAsArray(**offset_dict)
                if predictor_nodata is not None:
                    valid_time_array &= predictor_array != predictor_nodata
                predictor_time_stack.append(predictor_array)

            # load the time based responses
            response_band = response_raster.GetRasterBand(index + 1)
            response_nodata = response_band.GetNoDataValue()
            response_array = response_band.ReadAsArray(**offset_dict)
            if response_nodata is not None:
                valid_time_array &= response_array != response_nodata

            if not numpy.any(valid_time_array):
                break

            sample_mask = numpy.random.rand(
                numpy.count_nonzero(valid_time_array)) < sample_rate

            X2D, Y2D = numpy.meshgrid(range(valid_time_array.shape[1]),
                                      range(valid_time_array.shape[0]))

            for i, j in zip((X2D[valid_time_array])[sample_mask],
                            (Y2D[valid_time_array])[sample_mask]):

                sample_point = ogr.Feature(sample_point_layer.GetLayerDefn())
                sample_geom = ogr.Geometry(ogr.wkbPoint)
                x, y = gdal.ApplyGeoTransform(inv_gt,
                                              i + 0.5 + offset_dict['xoff'],
                                              j + 0.5 + offset_dict['yoff'])
                sample_geom.AddPoint(x, y)
                sample_point.SetGeometry(sample_geom)
                sample_point_layer.CreateFeature(sample_point)

            # all of response_time_stack and response_array are valid, clip and add to set
            local_x_list = []
            # each element in array should correspond with an element in y
            for array in predictor_time_stack:
                local_x_list.append((array[valid_time_array])[sample_mask])
            if x_vector is None:
                x_vector = numpy.array(local_x_list)
            else:
                local_x_vector = numpy.array(local_x_list)
                # THE LAST ELEMENT IS THE FLOW ACCUMULATION THAT I WANT LOGGED
                x_vector = numpy.append(x_vector, local_x_vector, axis=1)
            y_list.extend(list(
                (response_array[valid_time_array])[sample_mask]))

        i += 1
    y_vector = numpy.array(y_list)
    LOGGER.debug(f'got all done {x_vector.shape} {y_vector.shape}')
    sample_point_layer.CommitTransaction()
    return (x_vector.T).astype(numpy.float32), (y_vector.astype(numpy.float32))
Ejemplo n.º 28
0
from osgeo import gdal
from osgeo import ogr
from osgeo import osr

if int(gdal.VersionInfo('VERSION_NUM')) < 2020000:
    print('Requires GDAL >= 2.2(dev)')
    sys.exit(1)

sr4326 = osr.SpatialReference()
sr4326.SetFromUserInput('WGS84')

sr32631 = osr.SpatialReference()
sr32631.ImportFromEPSG(32631)

byte_src_ds = gdal.OpenEx(
    'https://raw.githubusercontent.com/OSGeo/gdal/master/autotest/gcore/data/byte.tif',
    allowed_drivers=['GTIFF', 'HTTP'])
elev_src_ds = gdal.OpenEx(
    'https://raw.githubusercontent.com/OSGeo/gdal/master/autotest/gdrivers/data/n43.dt0',
    allowed_drivers=['DTED', 'HTTP'])

gdal.SetConfigOption('CREATE_METADATA_TABLES', 'NO')

for (out_filename, options) in [
    ('gdal_sample_v1.2_no_extensions.gpkg', {}),
    ('gdal_sample_v1.2_no_extensions_with_gpkg_ogr_contents.gpkg', {
        'gpkg_ogr_contents': True
    }), ('gdal_sample_v1.2_spatial_index_extension.gpkg', {
        'spi': True
    }),
    ('gdal_sample_v1.2_spi_nonlinear_webp_elevation.gpkg', {
Ejemplo n.º 29
0
def test_wcs_6():
    driver = gdal.GetDriverByName('WCS')
    if driver is None:
        pytest.skip()
    # Generating various URLs from the driver and comparing them to ones
    # that have worked.
    first_call = True
    size = 60
    cache = 'CACHE=wcs_cache'
    global urls
    urls = read_urls()
    (process, port) = webserver.launch(handler=WCSHTTPHandler)
    url = "http://127.0.0.1:" + str(port)
    setup = setupFct()
    servers = []
    for server in setup:
        servers.append(server)
    for server in sorted(servers):
        for i, v in enumerate(setup[server]['Versions']):
            version = str(int(v / 100)) + '.' + str(int(
                v % 100 / 10)) + '.' + str((v % 10))
            if not server + '-' + version in urls:
                print("Error: " + server + '-' + version + " not in urls")
                global wcs_6_ok
                wcs_6_ok = False
                continue
            options = [cache]
            if first_call:
                options.append('CLEAR_CACHE')
                first_call = False
            query = 'server=' + server + '&version=' + version
            ds = gdal.OpenEx(utf8_path="WCS:" + url + "/?" + query,
                             open_options=options)

            coverage = setup[server]['Coverage']
            if isinstance(coverage, list):
                coverage = coverage[i]
            if isinstance(coverage, numbers.Number):
                coverage = str(coverage)
            query += '&coverage=' + coverage

            options = [cache]
            if isinstance(setup[server]['Options'], list):
                oo = setup[server]['Options'][i]
            else:
                oo = setup[server]['Options']
            oo = oo.split()
            for o in oo:
                if o != '-oo':
                    options.append(o)
            options.append('GetCoverageExtra=test=none')
            ds = gdal.OpenEx(utf8_path="WCS:" + url + "/?" + query,
                             open_options=options)
            ds = 0
            options = [cache]
            options.append('GetCoverageExtra=test=scaled')
            options.append('INTERLEAVE=PIXEL')
            ds = gdal.OpenEx(utf8_path="WCS:" + url + "/?" + query,
                             open_options=options)
            if not ds:
                print("OpenEx failed: WCS:" + url + "/?" + query)
                wcs_6_ok = False
                break
            projwin = setup[server]['Projwin'].replace('-projwin ', '').split()
            for i, c in enumerate(projwin):
                projwin[i] = int(c)
            options = [cache]
            tmpfile = "tmp/" + server + version + ".tiff"
            gdal.Translate(tmpfile,
                           ds,
                           projWin=projwin,
                           width=size,
                           options=options)
            os.remove(tmpfile)

            if os.path.isfile('data/wcs/' + server + '-' + version +
                              '-non_scaled.tiff'):
                options = [cache]
                options.append('GetCoverageExtra=test=non_scaled')
                options.append('INTERLEAVE=PIXEL')
                ds = gdal.OpenEx(utf8_path="WCS:" + url + "/?" + query,
                                 open_options=options)
                if not ds:
                    print("OpenEx failed: WCS:" + url + "/?" + query)
                    wcs_6_ok = False
                    break
                options = [cache]
                gdal.Translate(tmpfile,
                               ds,
                               srcWin=[0, 0, 2, 2],
                               options=options)
                os.remove(tmpfile)
            else:
                print(server + ' ' + version +
                      ' non_scaled skipped (no response file)')
    webserver.server_stop(process, port)

    assert wcs_6_ok
Ejemplo n.º 30
0
def plmosaic_17():

    if gdaltest.plmosaic_drv is None:
        return 'skip'

    gdal.SetConfigOption('PL_URL', '/vsimem/root')
    ds = gdal.OpenEx(
        'PLMosaic:',
        gdal.OF_RASTER,
        open_options=['API_KEY=foo', 'MOSAIC=my_mosaic', 'CACHE_PATH=tmp'])
    gdal.SetConfigOption('PL_URL', None)
    if ds is None:
        gdaltest.post_reason('fail')
        return 'fail'
    if ds.GetMetadata() != {
            'LAST_ACQUIRED': 'last_date',
            'NAME': 'my_mosaic',
            'FIRST_ACQUIRED': 'first_date'
    }:
        gdaltest.post_reason('fail')
        print(ds.GetMetadata())
        return 'fail'
    if ds.GetProjectionRef().find('3857') < 0:
        gdaltest.post_reason('fail')
        print(ds.GetProjectionRef())
        return 'fail'
    if ds.RasterXSize != 8388608:
        gdaltest.post_reason('fail')
        print(ds.RasterXSize)
        return 'fail'
    if ds.RasterYSize != 8388608:
        gdaltest.post_reason('fail')
        print(ds.RasterYSize)
        return 'fail'
    got_gt = ds.GetGeoTransform()
    expected_gt = (-20037508.34, 4.7773142671600004, 0.0, 20037508.34, 0.0,
                   -4.7773142671600004)
    for i in range(6):
        if abs(got_gt[i] - expected_gt[i]) > 1e-8:
            gdaltest.post_reason('fail')
            print(ds.GetGeoTransform())
            return 'fail'
    if ds.GetMetadataItem('INTERLEAVE', 'IMAGE_STRUCTURE') != 'PIXEL':
        gdaltest.post_reason('fail')
        print(ds.GetMetadata('IMAGE_STRUCTURE'))
        print(ds.GetMetadataItem('INTERLEAVE', 'IMAGE_STRUCTURE'))
        return 'fail'
    if ds.GetRasterBand(1).GetOverviewCount() != 15:
        gdaltest.post_reason('fail')
        print(ds.GetRasterBand(1).GetOverviewCount())
        return 'fail'
    if ds.GetRasterBand(1).GetOverview(-1) is not None:
        gdaltest.post_reason('fail')
        return 'fail'
    if ds.GetRasterBand(1).GetOverview(
            ds.GetRasterBand(1).GetOverviewCount()) is not None:
        gdaltest.post_reason('fail')
        return 'fail'
    if ds.GetRasterBand(1).GetOverview(0) is None:
        gdaltest.post_reason('fail')
        return 'fail'

    try:
        shutil.rmtree('tmp/plmosaic_cache')
    except:
        pass

    for i in range(12):
        # Read at one nonexistent position.
        ds.GetRasterBand(1).ReadRaster(4096 * i, 0, 1, 1)
        if gdal.GetLastErrorMsg() != '':
            gdaltest.post_reason('fail')
            return 'fail'
    for i in range(11, -1, -1):
        # Again in the same quad, but in different block, to test cache
        ds.GetRasterBand(1).ReadRaster(4096 * i + 256, 0, 1, 1)
        if gdal.GetLastErrorMsg() != '':
            gdaltest.post_reason('fail')
            return 'fail'
    for i in range(12):
        # Again in the same quad, but in different block, to test cache
        ds.GetRasterBand(1).ReadRaster(4096 * i + 512, 256, 1, 1)
        if gdal.GetLastErrorMsg() != '':
            gdaltest.post_reason('fail')
            return 'fail'

    ds.FlushCache()

    # Invalid tile content
    gdal.FileFromMemBuffer('/vsimem/root/my_mosaic_id/quads/0-2047/full',
                           'garbage')
    gdal.PushErrorHandler()
    ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1)
    gdal.PopErrorHandler()

    os.stat('tmp/plmosaic_cache/my_mosaic/my_mosaic_0-2047.tif')

    ds.FlushCache()
    shutil.rmtree('tmp/plmosaic_cache')

    # GeoTIFF but with wrong dimensions
    gdal.GetDriverByName('GTiff').Create(
        '/vsimem/root/my_mosaic_id/quads/0-2047/full', 1, 1, 1)
    gdal.PushErrorHandler()
    ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1)
    gdal.PopErrorHandler()

    os.stat('tmp/plmosaic_cache/my_mosaic/my_mosaic_0-2047.tif')

    ds.FlushCache()
    shutil.rmtree('tmp/plmosaic_cache')

    # Good GeoTIFF
    tmp_ds = gdal.GetDriverByName('GTiff').Create(
        '/vsimem/root/my_mosaic_id/quads/0-2047/full',
        4096,
        4096,
        4,
        options=['INTERLEAVE=BAND', 'SPARSE_OK=YES'])
    tmp_ds.GetRasterBand(1).Fill(255)
    tmp_ds = None

    val = ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1)
    val = struct.unpack('B', val)[0]
    if val != 255:
        gdaltest.post_reason('fail')
        print(val)
        return 'fail'

    os.stat('tmp/plmosaic_cache/my_mosaic/my_mosaic_0-2047.tif')

    ds.FlushCache()

    # Read again from file cache.
    # We change the file behind the scene (but not changing its size)
    # to demonstrate that the cached tile is still use
    tmp_ds = gdal.GetDriverByName('GTiff').Create(
        '/vsimem/root/my_mosaic_id/quads/0-2047/full',
        4096,
        4096,
        4,
        options=['INTERLEAVE=BAND', 'SPARSE_OK=YES'])
    tmp_ds.GetRasterBand(1).Fill(1)
    tmp_ds = None
    val = ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1)
    val = struct.unpack('B', val)[0]
    if val != 255:
        gdaltest.post_reason('fail')
        print(val)
        return 'fail'

    ds = None

    # Read again from file cache, but with TRUST_CACHE=YES
    # delete the full GeoTIFF before
    gdal.Unlink('/vsimem/root/my_mosaic_id/quads/0-2047/full')
    gdal.SetConfigOption('PL_URL', '/vsimem/root')
    ds = gdal.OpenEx(
        'PLMosaic:API_KEY=foo,MOSAIC=my_mosaic,CACHE_PATH=tmp,TRUST_CACHE=YES',
        gdal.OF_RASTER)
    gdal.SetConfigOption('PL_URL', None)

    val = ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1)
    val = struct.unpack('B', val)[0]
    if val != 255:
        gdaltest.post_reason('fail')
        print(val)
        return 'fail'
    ds = None

    # Read again from file cache but the metatile has changed in between
    gdal.SetConfigOption('PL_URL', '/vsimem/root')
    ds = gdal.OpenEx(
        'PLMosaic:',
        gdal.OF_RASTER,
        open_options=['API_KEY=foo', 'MOSAIC=my_mosaic', 'CACHE_PATH=tmp'])
    gdal.SetConfigOption('PL_URL', None)

    tmp_ds = gdal.GetDriverByName('GTiff').Create(
        '/vsimem/root/my_mosaic_id/quads/0-2047/full',
        4096,
        4096,
        4,
        options=['INTERLEAVE=BAND', 'SPARSE_OK=YES'])
    tmp_ds.SetMetadataItem('foo', 'bar')
    tmp_ds.GetRasterBand(1).Fill(254)
    tmp_ds = None

    val = ds.ReadRaster(0, 0, 1, 1)
    val = struct.unpack('B' * 4, val)
    if val != (254, 0, 0, 0):
        gdaltest.post_reason('fail')
        print(val)
        return 'fail'

    return 'success'