def test_ogr_index_creating_index_in_separate_steps_works(): with create_join_t_test_file(create_index=True): s_ds = ogr.OpenShared('join_t.dbf', update=1) s_ds.ExecuteSQL('DROP INDEX ON join_t USING value') s_ds.ExecuteSQL('DROP INDEX ON join_t USING skey') s_ds.Release() # Re-create an index s_ds = ogr.OpenShared('join_t.dbf', update=1) s_ds.ExecuteSQL('CREATE INDEX ON join_t USING value') s_ds.Release() # Close the dataset and re-open s_ds = ogr.OpenShared('join_t.dbf', update=1) # At this point the .ind was opened in read-only. Now it # will be re-opened in read-write mode s_ds.ExecuteSQL('CREATE INDEX ON join_t USING skey') s_ds.Release() with open('join_t.idm', 'rt') as f: xml = f.read() assert xml.find('VALUE') != -1, 'VALUE column is not indexed (2)' assert xml.find('SKEY') != -1, 'SKEY column is not indexed (2)'
def test_ogr_index_9(): gdaltest.s_ds.ExecuteSQL('DROP INDEX ON join_t USING value') gdaltest.s_ds.ExecuteSQL('DROP INDEX ON join_t USING skey') gdaltest.s_lyr.SetAttributeFilter('SKEY = 5') expect = ['Value 5'] tr = ogrtest.check_features_against_list(gdaltest.s_lyr, 'VALUE', expect) assert tr gdaltest.s_ds.Release() # After dataset closing, check that the index files do not exist after # dropping the index for filename in ['join_t.idm', 'join_t.ind']: with pytest.raises(OSError, message="%s should not exist" % filename): os.stat(filename) # Re-create an index gdaltest.s_ds = ogr.OpenShared('join_t.dbf', update=1) gdaltest.s_ds.ExecuteSQL('CREATE INDEX ON join_t USING value') gdaltest.s_ds.Release() for filename in ['join_t.idm', 'join_t.ind']: try: os.stat(filename) except OSError: pytest.fail("%s should exist" % filename) f = open('join_t.idm', 'rt') xml = f.read() f.close() assert xml.find('VALUE') != -1, 'VALUE column is not indexed (1)' # Close the dataset and re-open gdaltest.s_ds = ogr.OpenShared('join_t.dbf', update=1) # At this point the .ind was opened in read-only. Now it # will be re-opened in read-write mode gdaltest.s_ds.ExecuteSQL('CREATE INDEX ON join_t USING skey') gdaltest.s_ds.Release() f = open('join_t.idm', 'rt') xml = f.read() f.close() assert xml.find('VALUE') != -1, 'VALUE column is not indexed (2)' assert xml.find('SKEY') != -1, 'SKEY column is not indexed (2)'
def test_ogr_refcount_1(): # if ogr.GetOpenDSCount() != 0: # gdaltest.post_reason( 'Initial Open DS count is not zero!' ) # return 'failed' gdaltest.ds_1 = ogr.OpenShared('data/idlink.dbf') gdaltest.ds_2 = ogr.OpenShared('data/poly.shp') # if ogr.GetOpenDSCount() != 2: # gdaltest.post_reason( 'Open DS count not 2 after shared opens.' ) # return 'failed' if gdaltest.ds_1.GetRefCount() != 1 or gdaltest.ds_2.GetRefCount() != 1: gdaltest.post_reason('Reference count not 1 on one of datasources.') return 'failed'
def queryByPoint( fileshp, x=0, y=0, point_srs=None, mode="single"): """ queryByPoint """ res = [] point = ogr.Geometry(ogr.wkbPoint) point.AddPoint(x,y) dataset = ogr.OpenShared(fileshp) if dataset: layer = dataset.GetLayer(0) srs = layer.GetSpatialRef() if point_srs: psrs = osr.SpatialReference() psrs.ImportFromEPSG(int(point_srs)) if not psrs.IsSame(srs): transform = osr.CoordinateTransformation(psrs, srs) point.Transform(transform) for feature in layer: geom = feature.GetGeometryRef() if point.Intersects( geom ): res.append(feature) if mode.lower()=="single": break dataset = None return res
def ogr_index_2(): drv = ogr.GetDriverByName('ESRI Shapefile') gdaltest.s_ds = drv.CreateDataSource('join_t.dbf') gdaltest.s_lyr = gdaltest.s_ds.CreateLayer('join_t', geom_type=ogr.wkbNone) ogrtest.quick_create_layer_def(gdaltest.s_lyr, [('SKEY', ogr.OFTInteger), ('VALUE', ogr.OFTString, 16)]) for i in range(20): ogrtest.quick_create_feature(gdaltest.s_lyr, [i, 'Value ' + str(i)], None) if gdaltest.s_lyr.GetFeatureCount() != 20: gdaltest.post_reason('FeatureCount wrong') return 'fail' gdaltest.s_ds.Release() gdaltest.s_lyr = None gdaltest.s_ds = None gdaltest.s_ds = ogr.OpenShared('join_t.dbf', update=1) gdaltest.s_lyr = gdaltest.s_ds.GetLayerByName('join_t') return 'success'
def GetSpatialRef(filename): """ GetSpatialRef """ if isinstance(filename, osr.SpatialReference): srs = filename elif isinstance(filename, int): srs = osr.SpatialReference() srs.ImportFromEPSG(filename) elif isinstance(filename, str) and filename.lower().startswith("epsg:"): code = int(filename.split(":")[1]) srs = osr.SpatialReference() srs.ImportFromEPSG(code) elif isinstance(filename, str) and os.path.isfile( filename) and filename.lower().endswith(".shp"): ds = ogr.OpenShared(filename) if ds: srs = ds.GetLayer().GetSpatialRef() ds = None elif isinstance(filename, str) and os.path.isfile( filename) and filename.lower().endswith(".tif"): ds = gdal.Open(filename, gdalconst.GA_ReadOnly) if ds: wkt = ds.GetProjection() srs = osr.SpatialReference() srs.ImportFromWkt(wkt) ds = None else: srs = osr.SpatialReference() return srs
def test_ogr_index_recreating_index_causes_index_to_be_populated(): with create_join_t_test_file(create_index=True): s_ds = ogr.OpenShared('join_t.dbf', update=1) s_ds.ExecuteSQL('DROP INDEX ON join_t USING value') s_ds.ExecuteSQL('DROP INDEX ON join_t USING skey') s_ds.Release() # Re-create an index s_ds = ogr.OpenShared('join_t.dbf', update=1) s_ds.ExecuteSQL('CREATE INDEX ON join_t USING value') s_ds.Release() with open('join_t.idm', 'rt') as f: xml = f.read() assert xml.find('VALUE') != -1, 'VALUE column is not indexed (1)'
def queryByShape( fileshp, feature, feature_epsg=None, mode="single"): """ queryByShape """ res = [] if not feature: return [] dataset = ogr.OpenShared(fileshp) if dataset: layer = dataset.GetLayer(0) srs = layer.GetSpatialRef() qshape = feature.GetGeometryRef() if isinstance(feature, ogr.Feature) else feature if feature_epsg: qsrs = osr.SpatialReference() qsrs.ImportFromEPSG(int(feature_epsg)) if not qsrs.IsSame(srs): #transform the query feature in layer srs transform = osr.CoordinateTransformation(qsrs, srs) qshape.Transform(transform) """ # 2) 1st sequential approach for feature in layer: geom = feature.GetGeometryRef() if qshape.Intersects( geom ): res.append(feature) if mode.lower()=="single": break """ # 2) Rtree index approach fileidx = forceext(fileshp,".idx") if os.path.isfile(fileidx): minx,miny,maxx,maxy = qshape.GetEnvelope() for fid in list(index.intersection((minx, maxx, miny, maxy))): feature = layer.GetFeature(fid) if feature: res.append(feature) if mode.lower() == "single": break else: # 3) Spatial filter approach geom = feature.GetGeometryRef() layer.SetSpatialFilter(qshape) layer.ResetReading() for feature in layer: res.append(feature) if mode.lower() == "single": break dataset = None return res
def GetFeatureByFid(fileshp, layername=0, fid=0): """ GetFeatureByFid """ feature = None dataset = ogr.OpenShared(fileshp) if dataset: layer = dataset.GetLayer(layername) feature = layer.GetFeature(fid) dataset = None return feature
def test_ogr_index_unimplemented_range_query_works(): expect = [0, 1, 2] with create_join_t_test_file(create_index=True): s_ds = ogr.OpenShared('join_t.dbf', update=1) s_lyr = s_ds.GetLayerByName('join_t') s_lyr.SetAttributeFilter('SKEY < 3') tr = ogrtest.check_features_against_list(s_lyr, 'SKEY', expect) assert tr
def test_ogr_index_indexed_single_string_works(): expect = [5] with create_join_t_test_file(create_index=True): s_ds = ogr.OpenShared('join_t.dbf', update=1) s_lyr = s_ds.GetLayerByName('join_t') s_lyr.SetAttributeFilter("VALUE='Value 5'") tr = ogrtest.check_features_against_list(s_lyr, 'SKEY', expect) assert tr
def test_ogr_index_recreating_index_causes_index_files_to_be_created(): with create_join_t_test_file(create_index=True): s_ds = ogr.OpenShared('join_t.dbf', update=1) s_ds.ExecuteSQL('DROP INDEX ON join_t USING value') s_ds.ExecuteSQL('DROP INDEX ON join_t USING skey') s_ds.Release() # Re-create an index s_ds = ogr.OpenShared('join_t.dbf', update=1) s_ds.ExecuteSQL('CREATE INDEX ON join_t USING value') s_ds.Release() for filename in ['join_t.idm', 'join_t.ind']: try: os.stat(filename) except (OSError, FileNotFoundError): pytest.fail("%s should exist" % filename)
def tryopends(filepath): dsogr, dsgdal = False, False try: #print "trying" + filepath dsgdal = gdal.OpenShared(filepath) except gdal.GDALError: return False try: dsogr = ogr.OpenShared(filepath) except ogr.OGRError: return False return dsgdal, dsogr
def test_ogr_index_6(): gdaltest.s_ds.Release() gdaltest.s_ds = ogr.OpenShared('join_t.dbf', update=1) gdaltest.s_lyr = gdaltest.s_ds.GetLayerByName('join_t') gdaltest.s_lyr.SetAttributeFilter("VALUE='Value 5'") expect = [5] tr = ogrtest.check_features_against_list(gdaltest.s_lyr, 'SKEY', expect) assert tr
def GetFeatures(fileshp): """ GetFeatures """ res = [] dataset = ogr.OpenShared(fileshp) if dataset: layer = dataset.GetLayer(0) for feature in layer: res.append(feature) dataset = None return res
def GetFieldNames(fileshp): """ GetFieldNames """ res = [] ds = ogr.OpenShared(fileshp) if ds: layer = ds.GetLayer() defn = layer.GetLayerDefn() for j in range(defn.GetFieldCount()): res.append(defn.GetFieldDefn(j).GetName()) return res
def test_ogr_index_drop_index_removes_files(): with create_join_t_test_file(create_index=True): s_ds = ogr.OpenShared('join_t.dbf', update=1) s_ds.ExecuteSQL('DROP INDEX ON join_t USING value') s_ds.ExecuteSQL('DROP INDEX ON join_t USING skey') s_ds.Release() # After dataset closing, check that the index files do not exist after # dropping the index for filename in ['join_t.idm', 'join_t.ind']: assert not os.path.exists(filename)
def test_ogr_index_attribute_filter_works_after_drop_index(): expect = ['Value 5'] with create_join_t_test_file(create_index=True): s_ds = ogr.OpenShared('join_t.dbf', update=1) s_lyr = s_ds.GetLayerByName('join_t') s_ds.ExecuteSQL('DROP INDEX ON join_t USING value') s_ds.ExecuteSQL('DROP INDEX ON join_t USING skey') s_lyr.SetAttributeFilter('SKEY = 5') tr = ogrtest.check_features_against_list(s_lyr, 'VALUE', expect) assert tr
def ogr_index_6(): gdaltest.s_ds.Release() gdaltest.s_ds = ogr.OpenShared('join_t.dbf', update=1) gdaltest.s_lyr = gdaltest.s_ds.GetLayerByName('join_t') gdaltest.s_lyr.SetAttributeFilter('VALUE="Value 5"') expect = [5] tr = ogrtest.check_features_against_list(gdaltest.s_lyr, 'SKEY', expect) if tr: return 'success' else: return 'fail'
def queryByAttributes( fileshp, fieldname, fieldvalues, mode="multiple"): """ queryByAttributes """ res = [] dataset = ogr.OpenShared(fileshp) if dataset: layer = dataset.GetLayer(0) for feature in layer: if feature.GetFieldIndex(fieldname)>=0: if feature.GetField(fieldname) in listify(fieldvalues): res.append(feature) if mode.lower()=="single": break dataset = None return res
def test_ogr_index_can_join_without_index(): expect = [ 'Value 5', 'Value 10', 'Value 9', 'Value 4', 'Value 3', 'Value 1' ] with create_index_p_test_file(), create_join_t_test_file(): p_ds = ogr.OpenShared('index_p.mif', update=0) sql_lyr = p_ds.ExecuteSQL( 'SELECT * FROM index_p p ' + 'LEFT JOIN "join_t.dbf".join_t j ON p.PKEY = j.SKEY ') tr = ogrtest.check_features_against_list(sql_lyr, 'VALUE', expect) p_ds.ReleaseResultSet(sql_lyr) assert tr
def GetAttributeTableByFid(fileshp, layername=0, fid=0): """ GetAttributeTableByFid """ res = {} dataset = ogr.OpenShared(fileshp) if dataset: layer = dataset.GetLayer(layername) feature = layer.GetFeature(fid) geom = feature.GetGeometryRef() res["geometry"] = geom.ExportToWkt() layerDefinition = layer.GetLayerDefn() for j in range(layerDefinition.GetFieldCount()): fieldname = layerDefinition.GetFieldDefn(j).GetName() res[fieldname] = feature.GetField(j) dataset = None return res
def RasterizeAs(file_shp, px, py=0, epsg=None, dtype=np.float32, nodata=0, file_tif="", burn_fieldname=""): """ RasterizeAs """ GDT = { 'uint8': gdal.GDT_Byte, 'uint16': gdal.GDT_UInt16, 'uint32': gdal.GDT_UInt32, 'int16': gdal.GDT_Int16, 'int32': gdal.GDT_Int32, 'float32': gdal.GDT_Float32, 'float64': gdal.GDT_Float64 } dtype = str(np.dtype(dtype)).lower() fmt = GDT[dtype] if dtype in GDT else gdal.GDT_Float64 file_tif = file_tif if file_tif else forceext(file_shp, "tif") vector = ogr.OpenShared(file_shp) if px and vector: srs = GetSpatialRef(epsg) if epsg else GetSpatialRef(file_shp) minx, miny, maxx, maxy = GetExtent(file_shp) py = py if py else px m, n = abs(int(math.ceil(maxy-miny)/py)),abs(int(math.ceil(maxx-minx)/px)) # Open the data source and read in the extent layer = vector.GetLayer() # Create the destination data source CO = ["BIGTIFF=YES", "TILED=YES", "BLOCKXSIZE=256", "BLOCKYSIZE=256", 'COMPRESS=LZW'] target_ds = gdal.GetDriverByName('GTiff').Create(file_tif, n, m, 1, fmt, CO) gt = (minx, px, 0, maxy, 0, -abs(py)) target_ds.SetGeoTransform(gt) prj = srs.ExportToWkt() target_ds.SetProjection(prj) band = target_ds.GetRasterBand(1) band.SetNoDataValue(nodata) # Rasterize # gdal.RasterizeLayer(target_ds, [1], layer, burn_values=[0]) if burn_fieldname: gdal.RasterizeLayer(target_ds, [1], layer, options=["ATTRIBUTE=%s" % (burn_fieldname.upper())]) else: gdal.RasterizeLayer(target_ds, [1], layer, burn_values=[1]) dataset, vector, target_ds = None, None, None return file_tif if os.path.isfile(file_tif) else None return None
def ogr_index_1(): from osgeo import gdal gdal.PushErrorHandler('CPLQuietErrorHandler') try: ogr.GetDriverByName('MapInfo File').DeleteDataSource('index_p.mif') except: pass try: ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('join_t.dbf') except: pass gdal.PopErrorHandler() drv = ogr.GetDriverByName('MapInfo File') gdaltest.p_ds = drv.CreateDataSource('index_p.mif') gdaltest.p_lyr = gdaltest.p_ds.CreateLayer('index_p') ogrtest.quick_create_layer_def(gdaltest.p_lyr, [('PKEY', ogr.OFTInteger)]) ogrtest.quick_create_feature(gdaltest.p_lyr, [5], None) ogrtest.quick_create_feature(gdaltest.p_lyr, [10], None) ogrtest.quick_create_feature(gdaltest.p_lyr, [9], None) ogrtest.quick_create_feature(gdaltest.p_lyr, [4], None) ogrtest.quick_create_feature(gdaltest.p_lyr, [3], None) ogrtest.quick_create_feature(gdaltest.p_lyr, [1], None) # It turns out mapinfo format doesn't allow GetFeatureCount() calls while # writing ... it just blows an assert! # if gdaltest.p_lyr.GetFeatureCount() != 7: # gdaltest.post_reason( 'FeatureCount wrong' ) # return 'fail' # Close and reopen, since it seems the .mif driver does not allow reading # from a newly created (updatable) file. gdaltest.p_ds = None gdaltest.p_ds = ogr.OpenShared('index_p.mif', update=0) gdaltest.p_lyr = gdaltest.p_ds.GetLayerByName('index_p') return 'success'
def test_ogr_refcount_2(): ds_3 = ogr.OpenShared('data/idlink.dbf') # if ogr.GetOpenDSCount() != 2: # gdaltest.post_reason( 'Open DS count not 2 after third open.' ) # return 'failed' # This test only works with the old bindings. try: if ds_3._o != gdaltest.ds_1._o: gdaltest.post_reason('We did not get the expected pointer.') return 'failed' except: pass if ds_3.GetRefCount() != 2: gdaltest.post_reason('Refcount not 2 after reopened.') return 'failed' gdaltest.ds_3 = ds_3
def open_vector(path, with_geopandas=False, read_only=True): """Open a vector dataset using OGR or GeoPandas. Parameters ---------- path : str Path to vector file. with_geopandas : bool Set to True to open with geopandas, else use OGR. read_only : bool If opening with OGR, set to False to open in "update" mode. Returns ------- GeoDataFrame if ``with_geopandas`` else OGR datsource. """ if with_geopandas: return gpd.read_file(path) update = False if read_only else True return ogr.OpenShared(path, update=update)
def GetFeatureBy(fileshp, layername=0, attrname="ogr_id", attrvalue=0 ): """ GetFeatureByAttr - get the first feature with attrname=attrvalue """ feature = None dataset = ogr.OpenShared(fileshp) if dataset: layer = dataset.GetLayer(layername) layerDefinition = layer.GetLayerDefn() fieldnames = [layerDefinition.GetFieldDefn(j).GetName().lower() for j in range(layerDefinition.GetFieldCount())] if attrname.lower() in fieldnames: for feature in layer: if feature.GetField(attrname) == attrvalue: dataset = None #patch geometry that sometime is invalid #create a buffer of 0 meters buff0m = feature.GetGeometryRef().Buffer(0) feature.SetGeometry(buff0m) return feature dataset = None return None
def CreateSpatialIndex(fileshp): """ CreateSpatialIndex """ fileidx = forceext(fileshp,"idx") dataset = ogr.OpenShared(fileshp) if dataset: indexname = forceext(fileidx,"") if not os.path.isfile(fileidx): index = rtree.index.Index(indexname) layer = dataset.GetLayer(0) layer.ResetReading() for feature in layer: if feature.GetGeometryRef(): minx,miny,maxx,maxy = feature.GetGeometryRef().GetEnvelope() index.insert(feature.GetFID(), (minx,maxx,miny,maxy)) else: try: index = rtree.index.Index(indexname) except Exception as ex: print("CreateSpatialIndex:",ex) index=None return index return None
def RasterizeLike(file_shp, file_dem, file_tif="", burn_fieldname=""): """ RasterizeLike """ file_tif = file_tif if file_tif else forceext(file_shp,"tif") dataset = gdal.Open(file_dem, gdalconst.GA_ReadOnly) vector = ogr.OpenShared(file_shp) if dataset and vector: band = dataset.GetRasterBand(1) m,n = dataset.RasterYSize,dataset.RasterXSize gt,prj = dataset.GetGeoTransform(),dataset.GetProjection() nodata = band.GetNoDataValue() bandtype = gdal.GetDataTypeName(band.DataType) _, px, _, _, _, py = gt # Open the data source and read in the extent layer = vector.GetLayer() # Create the destination data source CO = ["BIGTIFF=YES", "TILED=YES", "BLOCKXSIZE=256", "BLOCKYSIZE=256", 'COMPRESS=LZW'] target_ds = gdal.GetDriverByName('GTiff').Create(file_tif, n, m, 1, band.DataType, CO) if (gt != None): target_ds.SetGeoTransform(gt) if (prj != None): target_ds.SetProjection(prj) band = target_ds.GetRasterBand(1) band.SetNoDataValue(nodata) # Rasterize # gdal.RasterizeLayer(target_ds, [1], layer, burn_values=[0]) if burn_fieldname: gdal.RasterizeLayer(target_ds, [1], layer, options=["ATTRIBUTE=%s" % (burn_fieldname.upper())]) else: gdal.RasterizeLayer(target_ds, [1], layer, burn_values=[1]) dataset, vector, target_ds = None, None, None
def get_ogr_datasource(path, use_streaming=False): path = get_path(path, use_streaming=use_streaming) ds = ogr.OpenShared(path, update=False) if ds is None: raise PyspatialIOError("Unable to read path: %s" % path) return ds