def vsis3_6(): if gdaltest.webserver_port == 0: return 'skip' gdal.SetConfigOption('VSIS3_CHUNK_SIZE', '1') # 1 MB f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket4/large_file.bin', 'wb') gdal.SetConfigOption('VSIS3_CHUNK_SIZE', None) if f is None: gdaltest.post_reason('fail') return 'fail' size = 1024 * 1024 + 1 ret = gdal.VSIFWriteL(''.join('a' for i in range(size)), 1, size, f) if ret != size: gdaltest.post_reason('fail') return 'fail' gdal.ErrorReset() gdal.VSIFCloseL(f) if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' for filename in [ '/vsis3/s3_fake_bucket4/large_file_initiate_403_error.bin', '/vsis3/s3_fake_bucket4/large_file_initiate_empty_result.bin', '/vsis3/s3_fake_bucket4/large_file_initiate_invalid_xml_result.bin', '/vsis3/s3_fake_bucket4/large_file_initiate_no_uploadId.bin' ]: gdal.SetConfigOption('VSIS3_CHUNK_SIZE', '1') # 1 MB f = gdal.VSIFOpenL(filename, 'wb') gdal.SetConfigOption('VSIS3_CHUNK_SIZE', None) if f is None: gdaltest.post_reason('fail') return 'fail' size = 1024 * 1024 + 1 with gdaltest.error_handler(): ret = gdal.VSIFWriteL(''.join('a' for i in range(size)), 1, size, f) if ret != 0: gdaltest.post_reason('fail') print(ret) return 'fail' gdal.ErrorReset() gdal.VSIFCloseL(f) if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' for filename in [ '/vsis3/s3_fake_bucket4/large_file_upload_part_403_error.bin', '/vsis3/s3_fake_bucket4/large_file_upload_part_no_etag.bin' ]: gdal.SetConfigOption('VSIS3_CHUNK_SIZE', '1') # 1 MB f = gdal.VSIFOpenL(filename, 'wb') gdal.SetConfigOption('VSIS3_CHUNK_SIZE', None) if f is None: gdaltest.post_reason('fail') return 'fail' size = 1024 * 1024 + 1 with gdaltest.error_handler(): ret = gdal.VSIFWriteL(''.join('a' for i in range(size)), 1, size, f) if ret != 0: gdaltest.post_reason('fail') print(ret) return 'fail' gdal.ErrorReset() gdal.VSIFCloseL(f) if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' return 'success'
def ogr_sql_28(): ds = ogr.GetDriverByName("Memory").CreateDataSource("my_ds") lyr = ds.CreateLayer("my_layer") lyr.GetLayerDefn().GetGeomFieldDefn(0).SetName( 'geom') # a bit border line but OK for Memory driver... field_defn = ogr.FieldDefn("strfield", ogr.OFTString) lyr.CreateField(field_defn) field_defn = ogr.FieldDefn("intfield", ogr.OFTInteger) lyr.CreateField(field_defn) lyr = ds.CreateLayer("my_layer2") field_defn = ogr.FieldDefn("strfield", ogr.OFTString) lyr.CreateField(field_defn) field_defn = ogr.FieldDefn("strfield2", ogr.OFTString) lyr.CreateField(field_defn) try: sql_lyr = ds.ExecuteSQL(None) gdaltest.post_reason('expected error on NULL query') return 'fail' except: pass queries = [ '', '1', '*', 'SELECT', "SELECT ' FROM my_layer", 'SELECT + FROM my_layer', 'SELECT (1 FROM my_layer', 'SELECT (1)) FROM my_layer', 'SELECT (1,) FROM my_layer', 'SELECT 1 + FROM my_layer', "SELECT 1 + 'a' FROM my_layer", 'SELECT 1 - FROM my_layer', 'SELECT 1 * FROM my_layer', 'SELECT 1 % FROM my_layer', 'SELECT *', 'SELECT * FROM', 'SELECT * FROM foo', 'SELECT FROM my_layer', 'SELECT FROM FROM my_layer', "SELECT ('strfield'", "SELECT 'strfield' +", "SELECT 'strfield' 'strfield'", "SELECT CONCAT('strfield')", 'SELECT foo(strfield) FROM my_layer', # Undefined function 'foo' used. 'SELECT strfield, FROM my_layer', 'SELECT strfield, foo FROM my_layer', 'SELECT strfield AS FROM my_layer', 'SELECT strfield AS 1 FROM my_layer', 'SELECT strfield AS strfield2 FROM', 'SELECT strfield + intfield FROM my_layer', 'SELECT CAST', 'SELECT CAST(', 'SELECT CAST(strfield', 'SELECT CAST(strfield AS', 'SELECT CAST(strfield AS foo', 'SELECT CAST(strfield AS foo)', 'SELECT CAST(strfield AS foo) FROM', 'SELECT CAST(strfield AS foo) FROM my_layer', 'SELECT CAST(strfield AS CHARACTER', 'SELECT CAST(strfield AS CHARACTER)', 'SELECT CAST(strfield AS CHARACTER) FROM', 'SELECT CAST(strfield AS CHARACTER) FROM foo', 'SELECT CAST(strfield AS CHARACTER(', 'SELECT CAST(strfield AS CHARACTER(2', 'SELECT CAST(strfield AS CHARACTER(2)', 'SELECT CAST(strfield AS CHARACTER(2))', 'SELECT CAST(strfield AS CHARACTER(2)) FROM', 'SELECT CAST(strfield AS CHARACTER(2)) FROM foo', 'SELECT CAST(strfield AS 1) FROM my_layer', 'SELECT * FROM my_layer WHERE', # 'SELECT * FROM my_layer WHERE strfield', 'SELECT * FROM my_layer WHERE strfield = ', 'SELECT * FROM my_layer WHERE strfield = foo', "SELECT * FROM my_layer WHERE foo = 'a'", "SELECT * FROM my_layer WHERE strfield = 'a" "SELECT * FROM my_layer WHERE strfield = 'a' ORDER ", "SELECT * FROM my_layer WHERE strfield = 'a' ORDER BY", "SELECT * FROM my_layer WHERE strfield = 'a' ORDER BY foo", "SELECT * FROM my_layer WHERE strfield = 'a' ORDER BY strfield UNK", "SELECT * FROM my_layer ORDER BY geom", # Cannot use geometry field 'geom' in a ORDER BY clause "SELECT FOO(*) FROM my_layer", "SELECT FOO(*) AS bar FROM my_layer", "SELECT COUNT", "SELECT COUNT(", "SELECT COUNT() FROM my_layer", "SELECT COUNT(*", "SELECT COUNT(*)", "SELECT COUNT(*) FROM", "SELECT COUNT(*) AS foo FROM", "SELECT COUNT(* FROM my_layer", "SELECT COUNT(FOO intfield) FROM my_layer", "SELECT COUNT(DISTINCT intfield FROM my_layer", "SELECT COUNT(DISTINCT *) FROM my_layer", "SELECT FOO(DISTINCT intfield) FROM my_layer", "SELECT FOO(DISTINCT intfield) as foo FROM my_layer", "SELECT DISTINCT foo FROM my_layer", "SELECT DISTINCT foo AS 'id' 'id2' FROM", "SELECT DISTINCT foo AS id id2 FROM", "SELECT DISTINCT FROM my_layer", "SELECT DISTINCT strfield, COUNT(DISTINCT intfield) FROM my_layer", "SELECT MIN(intfield*2) FROM my_layer", "SELECT MIN(intfield,2) FROM my_layer", "SELECT MIN(foo) FROM my_layer", "SELECT MAX(foo) FROM my_layer", "SELECT SUM(foo) FROM my_layer", "SELECT AVG(foo) FROM my_layer", "SELECT MIN(strfield) FROM my_layer", "SELECT MAX(strfield) FROM my_layer", "SELECT SUM(strfield) FROM my_layer", "SELECT AVG(strfield) FROM my_layer", "SELECT AVG(intfield, intfield) FROM my_layer", "SELECT * FROM my_layer WHERE AVG(intfield) = 1", "SELECT * FROM 'foo' foo", "SELECT * FROM my_layer WHERE strfield =", "SELECT * FROM my_layer WHERE strfield = foo", "SELECT * FROM my_layer WHERE strfield = intfield", "SELECT * FROM my_layer WHERE strfield = 1", "SELECT * FROM my_layer WHERE strfield = '1' AND", # "SELECT * FROM my_layer WHERE 1 AND 2" , "SELECT * FROM my_layer WHERE strfield LIKE", "SELECT * FROM my_layer WHERE strfield LIKE 1", "SELECT * FROM my_layer WHERE strfield IS", "SELECT * FROM my_layer WHERE strfield IS NOT", "SELECT * FROM my_layer WHERE strfield IS foo", "SELECT * FROM my_layer WHERE strfield IS NOT foo", "SELECT * FROM my_layer WHERE (strfield IS NOT NULL", "SELECT * FROM my_layer WHERE strfield IN", "SELECT * FROM my_layer WHERE strfield IN(", "SELECT * FROM my_layer WHERE strfield IN()", "SELECT * FROM my_layer WHERE strfield IN('a'", "SELECT * FROM my_layer WHERE strfield IN('a',", "SELECT * FROM my_layer WHERE strfield IN('a','b'", "SELECT * FROM my_layer WHERE strfield IN('a','b'))", "SELECT * FROM my_layer LEFT", "SELECT * FROM my_layer LEFT JOIN", "SELECT * FROM my_layer LEFT JOIN foo", "SELECT * FROM my_layer LEFT JOIN foo ON my_layer.strfield = my_layer2.strfield", "SELECT * FROM my_layer LEFT JOIN my_layer2 ON my_layer.strfield = foo.strfield", "SELECT * FROM my_layer LEFT JOIN my_layer2 ON my_layer.strfield = my_layer2.foo", # "SELECT * FROM my_layer LEFT JOIN my_layer2 ON my_layer.strfield != my_layer2.strfield", "SELECT *, my_layer2. FROM my_layer LEFT JOIN my_layer2 ON my_layer.strfield = my_layer2.strfield", "SELECT *, my_layer2.foo FROM my_layer LEFT JOIN my_layer2 ON my_layer.strfield = my_layer2.strfield", "SELECT * FROM my_layer UNION", "SELECT * FROM my_layer UNION ALL", "SELECT * FROM my_layer UNION ALL SELECT", "SELECT * FROM my_layer UNION ALL SELECT *", "SELECT * FROM my_layer UNION ALL SELECT * FROM", ] for query in queries: gdal.ErrorReset() # print query gdal.PushErrorHandler('CPLQuietErrorHandler') sql_lyr = ds.ExecuteSQL(query) gdal.PopErrorHandler() if sql_lyr is not None: gdaltest.post_reason('expected None result on "%s"' % query) ds.ReleaseResultSet(sql_lyr) return 'fail' if gdal.GetLastErrorType() == 0: gdaltest.post_reason('expected error on "%s"' % query) return 'fail' ds = None return 'success'
def ogr_rfc41_6(): ds = ogr.GetDriverByName('memory').CreateDataSource('') sr = osr.SpatialReference() lyr = ds.CreateLayer('poly', geom_type=ogr.wkbPolygon, srs=sr) lyr.GetLayerDefn().GetGeomFieldDefn(0).SetName('geomfield') lyr.CreateField(ogr.FieldDefn('intfield', ogr.OFTInteger)) lyr.CreateField(ogr.FieldDefn('wkt', ogr.OFTString)) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField('intfield', 1) feat.SetField('wkt', 'POINT (0 0)') feat.SetGeometryDirectly(ogr.CreateGeometryFromWkt('POLYGON EMPTY')) lyr.CreateFeature(feat) feat = ogr.Feature(lyr.GetLayerDefn()) lyr.CreateFeature(feat) feat = None # Test implicit geometry column (since poly has one single geometry column) # then explicit geometry column for sql in [ 'SELECT intfield FROM poly', 'SELECT * FROM poly', 'SELECT intfield, geomfield FROM poly', 'SELECT geomfield, intfield FROM poly' ]: sql_lyr = ds.ExecuteSQL(sql) if sql_lyr.GetLayerDefn().GetGeomFieldDefn( 0).GetType() != ogr.wkbPolygon: gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetSpatialRef() is None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetField('intfield') != 1: gdaltest.post_reason('fail') return 'fail' if feat.GetGeomFieldRef('geomfield') is None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('geomfield') is not None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(geometry_field AS GEOMETRY) sql_lyr = ds.ExecuteSQL( 'SELECT CAST(geomfield AS GEOMETRY) AS mygeom FROM poly WHERE CAST(geomfield AS GEOMETRY) IS NOT NULL' ) if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbUnknown: gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetSpatialRef() is not None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('mygeom') is None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(xxx AS GEOMETRY(POLYGON)) sql_lyr = ds.ExecuteSQL( 'SELECT CAST(geomfield AS GEOMETRY(POLYGON)) AS mygeom FROM poly WHERE CAST(geomfield AS GEOMETRY(POLYGON)) IS NOT NULL' ) if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbPolygon: gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetSpatialRef() is not None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('mygeom') is None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(xxx AS GEOMETRY(POLYGON,4326)) sql_lyr = ds.ExecuteSQL( 'SELECT CAST(geomfield AS GEOMETRY(POLYGON,4326)) AS mygeom FROM poly WHERE CAST(geomfield AS GEOMETRY(POLYGON,4326)) IS NOT NULL' ) if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbPolygon: gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetLayerDefn().GetGeomFieldDefn( 0).GetSpatialRef().ExportToWkt().find('4326') < 0: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('mygeom') is None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(a_multipolygon AS GEOMETRY(POLYGON)) sql_lyr = ds.ExecuteSQL( "SELECT CAST('MULTIPOLYGON (((0 0,0 1,1 1,1 0,0 0)))' AS GEOMETRY(POLYGON)) AS mygeom FROM poly" ) feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef( 'mygeom').ExportToWkt() != 'POLYGON ((0 0,0 1,1 1,1 0,0 0))': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(a_polygon AS GEOMETRY(MULTIPOLYGON)) sql_lyr = ds.ExecuteSQL( "SELECT CAST('POLYGON ((0 0,0 1,1 1,1 0,0 0))' AS GEOMETRY(MULTIPOLYGON)) AS mygeom FROM poly" ) feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('mygeom').ExportToWkt( ) != 'MULTIPOLYGON (((0 0,0 1,1 1,1 0,0 0)))': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(a_multilinestring AS GEOMETRY(LINESTRING)) sql_lyr = ds.ExecuteSQL( "SELECT CAST('MULTILINESTRING ((0 0,0 1,1 1,1 0,0 0))' AS GEOMETRY(LINESTRING)) AS mygeom FROM poly" ) feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef( 'mygeom').ExportToWkt() != 'LINESTRING (0 0,0 1,1 1,1 0,0 0)': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(a_linestring AS GEOMETRY(MULTILINESTRING)) sql_lyr = ds.ExecuteSQL( "SELECT CAST('LINESTRING (0 0,0 1,1 1,1 0,0 0)' AS GEOMETRY(MULTILINESTRING)) AS mygeom FROM poly" ) feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('mygeom').ExportToWkt( ) != 'MULTILINESTRING ((0 0,0 1,1 1,1 0,0 0))': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test expression with cast CHARACTER <--> GEOMETRY sql_lyr = ds.ExecuteSQL( 'SELECT CAST(CAST(geomfield AS CHARACTER) AS GEOMETRY) AS mygeom, intfield FROM poly' ) if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbUnknown: gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetSpatialRef() is not None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetField('intfield') != 1: gdaltest.post_reason('fail') return 'fail' if feat.GetGeomFieldRef('mygeom') is None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(NULL AS GEOMETRY) sql_lyr = ds.ExecuteSQL('SELECT CAST(NULL AS GEOMETRY) FROM poly') if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbUnknown: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('') is not None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test CAST(stringfield AS GEOMETRY) sql_lyr = ds.ExecuteSQL('SELECT CAST(wkt AS GEOMETRY) FROM poly') if sql_lyr.GetLayerDefn().GetGeomFieldDefn(0).GetType() != ogr.wkbUnknown: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('wkt').ExportToWkt() != 'POINT (0 0)': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test COUNT(geometry) sql_lyr = ds.ExecuteSQL('SELECT COUNT(geomfield) FROM poly') feat = sql_lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' if feat.GetField(0) != 1: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) wrong_sql_list = [ ('SELECT DISTINCT geomfield FROM poly', 'SELECT DISTINCT on a geometry not supported'), ('SELECT COUNT(DISTINCT geomfield) FROM poly', 'SELECT COUNT DISTINCT on a geometry not supported'), ('SELECT MAX(geomfield) FROM poly', 'Use of field function MAX() on geometry field'), ('SELECT CAST(5 AS GEOMETRY) FROM poly', 'Cannot cast integer to geometry'), ('SELECT CAST(geomfield AS integer) FROM poly', 'Cannot cast geometry to integer'), ('SELECT CAST(geomfield AS GEOMETRY(2)) FROM poly', 'First argument of CAST operator should be an geometry type identifier' ), ('SELECT CAST(geomfield AS GEOMETRY(UNSUPPORTED_TYPE)) FROM poly', 'SQL Expression Parsing Error: syntax error'), ('SELECT CAST(geomfield AS GEOMETRY(UNSUPPORTED_TYPE,5)) FROM poly', 'SQL Expression Parsing Error: syntax error'), ] for (sql, error_msg) in wrong_sql_list: gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') sql_lyr = ds.ExecuteSQL(sql) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find(error_msg) != 0: gdaltest.post_reason('fail') print('For %s, expected error %s, got %s' % (sql, error_msg, gdal.GetLastErrorMsg())) return 'fail' if sql_lyr is not None: gdaltest.post_reason('fail') return 'fail' # Test invalid expressions with geometry for sql in [ "SELECT geomfield + 'a' FROM poly", "SELECT geomfield * 'a' FROM poly", "SELECT geomfield + 'a' FROM poly", "SELECT geomfield - 'a' FROM poly", "SELECT geomfield % 'a' FROM poly", "SELECT CONCAT(geomfield, 'a') FROM poly", "SELECT SUBSTR(geomfield, 0, 1) FROM poly", "SELECT * FROM poly WHERE geomfield = CAST('POINT EMPTY' AS GEOMETRY)", "SELECT * FROM poly WHERE geomfield LIKE 'a'", "SELECT * FROM poly WHERE geomfield IN( 'a' )" ]: gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') sql_lyr = ds.ExecuteSQL(sql) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find( 'Cannot use geometry field in this operation') != 0: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' if sql_lyr is not None: gdaltest.post_reason('fail') return 'fail' # Test expression with geometry in WHERE sql_lyr = ds.ExecuteSQL('SELECT * FROM poly WHERE geomfield IS NOT NULL') feat = sql_lyr.GetNextFeature() if feat.GetField('intfield') != 1: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) sql_lyr = ds.ExecuteSQL('SELECT * FROM poly WHERE geomfield IS NULL') feat = sql_lyr.GetNextFeature() if feat.IsFieldSet(0): gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) sql_lyr = ds.ExecuteSQL( "SELECT * FROM poly WHERE CAST(geomfield AS CHARACTER) = 'POLYGON EMPTY'" ) feat = sql_lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) sql_lyr = ds.ExecuteSQL( 'SELECT count(*) FROM poly WHERE geomfield IS NULL') feat = sql_lyr.GetNextFeature() if feat.GetField(0) != 1: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) sql_lyr = ds.ExecuteSQL( 'SELECT count(*) FROM poly WHERE geomfield IS NOT NULL') feat = sql_lyr.GetNextFeature() if feat.GetField(0) != 1: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test spatial filter feat = lyr.GetFeature(0) feat.SetGeometryDirectly(ogr.CreateGeometryFromWkt('POINT(1 2)')) lyr.SetFeature(feat) feat = None lyr.DeleteFeature(1) sql_lyr = ds.ExecuteSQL("SELECT * FROM poly") sql_lyr.SetSpatialFilterRect(0, 0, 0, 0) feat = sql_lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' feat = None sql_lyr.SetSpatialFilterRect(0, 1, 2, 1, 2) feat = sql_lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' feat = None # Test invalid spatial filter index gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') sql_lyr.SetSpatialFilterRect(2, 0, 0, 0, 0) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' # Test invalid geometry field index gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') sql_lyr.GetExtent(geom_field=2) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' ds.ReleaseResultSet(sql_lyr) # Test querying several geometry fields sql_lyr = ds.ExecuteSQL( 'SELECT geomfield as geom1, geomfield as geom2 FROM poly') feat = sql_lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' if feat.GetGeomFieldRef('geom1') is None: gdaltest.post_reason('fail') return 'fail' if feat.GetGeomFieldRef('geom2') is None: gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Test querying a layer with several geometry fields lyr.CreateGeomField(ogr.GeomFieldDefn('secondarygeom', ogr.wkbPoint)) lyr.ResetReading() feat = lyr.GetNextFeature() feat.SetGeomField('secondarygeom', ogr.CreateGeometryFromWkt('POINT (10 100)')) lyr.SetFeature(feat) feat = None for sql in [ 'SELECT * FROM poly', 'SELECT geomfield, secondarygeom FROM poly', 'SELECT secondarygeom, geomfield FROM poly' ]: sql_lyr = ds.ExecuteSQL(sql) feat = sql_lyr.GetNextFeature() if feat.GetGeomFieldRef('geomfield').ExportToWkt() != 'POINT (1 2)': gdaltest.post_reason('fail') return 'fail' if feat.GetGeomFieldRef( 'secondarygeom').ExportToWkt() != 'POINT (10 100)': gdaltest.post_reason('fail') return 'fail' feat = None ds.ReleaseResultSet(sql_lyr) # Check that we don't get an implicit geometry field sql_lyr = ds.ExecuteSQL('SELECT intfield FROM poly') if sql_lyr.GetLayerDefn().GetGeomFieldCount() != 0: gdaltest.post_reason('fail') return 'fail' ds.ReleaseResultSet(sql_lyr) # Check GetExtent() and SetSpatialFilter() sql_lyr = ds.ExecuteSQL('SELECT * FROM poly') if sql_lyr.GetExtent(geom_field=0) != (1.0, 1.0, 2.0, 2.0): gdaltest.post_reason('fail') return 'fail' if sql_lyr.GetExtent(geom_field=1) != (10.0, 10.0, 100.0, 100.0): gdaltest.post_reason('fail') return 'fail' sql_lyr.SetSpatialFilterRect(0, 0.5, 1.5, 1.5, 2.5) if sql_lyr.GetFeatureCount() != 1: gdaltest.post_reason('fail') return 'fail' sql_lyr.SetSpatialFilterRect(0, 0, 0, 0.5, 0.5) if sql_lyr.GetFeatureCount() != 0: gdaltest.post_reason('fail') return 'fail' sql_lyr.SetSpatialFilterRect(1, 9, 99, 11, 101) if sql_lyr.GetFeatureCount() != 1: gdaltest.post_reason('fail') return 'fail' sql_lyr.SetSpatialFilterRect(1, 0, 0, 0.5, 0.5) if sql_lyr.GetFeatureCount() != 0: gdaltest.post_reason('fail') return 'fail' ds.ReleaseResultSet(sql_lyr) ds = None return 'success'
def test_pds4_14(): filename = '/vsimem/test.xml' gdal.FileFromMemBuffer( filename, "Product_Observational http://pds.nasa.gov/pds4/pds/v1") with gdaltest.error_handler(): ds = gdal.Open(filename) assert ds is None gdal.FileFromMemBuffer( filename, """ <Product_Observational xmlns="http://pds.nasa.gov/pds4/pds/v1"> <File_Area_Observational/> <File_Area_Observational> <File/> </File_Area_Observational> <File_Area_Observational> <File> <file_name>i_do_not_exist.img</file_name> </File> <Array> <axes>3</axes> </Array> </File_Area_Observational> <File_Area_Observational> <File> <file_name>i_do_not_exist.img</file_name> </File> <Array> <axes>3</axes> <axis_index_order>Last Index Fastest</axis_index_order> </Array> </File_Area_Observational> <File_Area_Observational> <File> <file_name>i_do_not_exist.img</file_name> </File> <Array> <axes>3</axes> <axis_index_order>Last Index Fastest</axis_index_order> <Element_Array> <data_type>SignedByte</data_type> </Element_Array> <Axis_Array> </Axis_Array> <Axis_Array> <axis_name>x</axis_name> <elements>1</elements> <sequence_number>1</sequence_number> </Axis_Array> <Axis_Array> <axis_name>Band</axis_name> <elements>0</elements> <sequence_number>1</sequence_number> </Axis_Array> <Axis_Array> <axis_name>Band</axis_name> <elements>1</elements> <sequence_number>0</sequence_number> </Axis_Array> <Axis_Array> <axis_name>Band</axis_name> <elements>1</elements> <sequence_number>4</sequence_number> </Axis_Array> <Axis_Array> <axis_name>Band</axis_name> <elements>1</elements> <sequence_number>1</sequence_number> </Axis_Array> <Axis_Array> <axis_name>Band</axis_name> <elements>1</elements> <sequence_number>1</sequence_number> </Axis_Array> </Array> </File_Area_Observational> </Product_Observational>""") with gdaltest.error_handler(): ds = gdal.Open(filename) assert ds is None gdal.FileFromMemBuffer( filename, """ <Product_Observational xmlns="http://pds.nasa.gov/pds4/pds/v1"> <File_Area_Observational> <File> <file_name>i_do_not_exist.img</file_name> </File> <Array_3D> <axes>3</axes> <axis_index_order>Last Index Fastest</axis_index_order> <Element_Array> <data_type>UnsignedByte</data_type> </Element_Array> <Axis_Array> <axis_name>Band</axis_name> <elements>65537</elements> <sequence_number>1</sequence_number> </Axis_Array> <Axis_Array> <axis_name>Line</axis_name> <elements>1</elements> <sequence_number>2</sequence_number> </Axis_Array> <Axis_Array> <axis_name>Sample</axis_name> <elements>1</elements> <sequence_number>3</sequence_number> </Axis_Array> </Array_3D> </File_Area_Observational> </Product_Observational>""") with gdaltest.error_handler(): ds = gdal.Open(filename) assert ds is None gdal.FileFromMemBuffer( filename, """ <Product_Observational xmlns="http://pds.nasa.gov/pds4/pds/v1"> <File_Area_Observational> <File> <file_name>i_do_not_exist.img</file_name> </File> <Array_2D> <axes>2</axes> <axis_index_order>Last Index Fastest</axis_index_order> <Element_Array> <data_type>SignedByte</data_type> </Element_Array> <Axis_Array> <axis_name>Line</axis_name> <elements>1</elements> <sequence_number>1</sequence_number> </Axis_Array> <Axis_Array> <axis_name>Sample</axis_name> <elements>1</elements> <sequence_number>2</sequence_number> </Axis_Array> </Array_2D> </File_Area_Observational> </Product_Observational>""") with gdaltest.error_handler(): ds = gdal.Open(filename) assert ds is None gdal.FileFromMemBuffer( filename, """ <Product_Observational xmlns="http://pds.nasa.gov/pds4/pds/v1"> <File_Area_Observational> <File> <file_name>i_do_not_exist.img</file_name> </File> <Array_2D> <axes>2</axes> <axis_index_order>Last Index Fastest</axis_index_order> <Element_Array> <data_type>ComplexMSB16</data_type> </Element_Array> <Axis_Array> <axis_name>Line</axis_name> <elements>1</elements> <sequence_number>1</sequence_number> </Axis_Array> <Axis_Array> <axis_name>Sample</axis_name> <elements>2000000000</elements> <sequence_number>2</sequence_number> </Axis_Array> </Array_2D> </File_Area_Observational> </Product_Observational>""") with gdaltest.error_handler(): ds = gdal.Open(filename) assert ds is None gdal.FileFromMemBuffer( filename, """ <Product_Observational xmlns="http://pds.nasa.gov/pds4/pds/v1"> <File_Area_Observational> <File> <file_name>i_do_not_exist.img</file_name> </File> <Array_2D> <axes>2</axes> <axis_index_order>Last Index Fastest</axis_index_order> <Element_Array> <data_type>ComplexMSB16</data_type> </Element_Array> <Axis_Array> <axis_name>Sample</axis_name> <elements>1</elements> <sequence_number>1</sequence_number> </Axis_Array> <Axis_Array> <axis_name>Line</axis_name> <elements>2000000000</elements> <sequence_number>2</sequence_number> </Axis_Array> </Array_2D> </File_Area_Observational> </Product_Observational>""") with gdaltest.error_handler(): ds = gdal.Open(filename) assert ds is None gdal.Unlink(filename) # Invalid value for INTERLEAVE with gdaltest.error_handler(): ds = gdal.GetDriverByName('PDS4').Create( '/vsimem/out.xml', 1, 1, options=['INTERLEAVE=INVALID']) assert ds is None # INTERLEAVE=BIL not supported for GeoTIFF in PDS4 with gdaltest.error_handler(): ds = gdal.GetDriverByName('PDS4').Create( '/vsimem/out.xml', 1, 1, options=['INTERLEAVE=BIL', 'IMAGE_FORMAT=GEOTIFF']) assert ds is None # Cannot create GeoTIFF file with gdaltest.error_handler(): ds = gdal.GetDriverByName('PDS4').Create( '/i/do_not/exist.xml', 1, 1, options=['IMAGE_FORMAT=GEOTIFF']) assert ds is None gdal.Translate('/vsimem/test.tif', 'data/byte.tif') # Output file has same name as input file with gdaltest.error_handler(): ds = gdal.Translate('/vsimem/test.xml', '/vsimem/test.tif', format='PDS4', creationOptions=['IMAGE_FORMAT=GEOTIFF']) assert ds is None gdal.Unlink('/vsimem/test.tif') template = '/vsimem/template.xml' # Missing Product_Observational root gdal.FileFromMemBuffer(template, """<foo/>""") ds = gdal.GetDriverByName('PDS4').Create(filename, 1, 1, options=['TEMPLATE=' + template]) gdal.ErrorReset() with gdaltest.error_handler(): ds = None assert gdal.GetLastErrorMsg( ) == 'Cannot find Product_Observational element in template' # Missing Target_Identification gdal.FileFromMemBuffer( template, """ <Product_Observational xmlns="http://pds.nasa.gov/pds4/pds/v1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pds.nasa.gov/pds4/pds/v1 https://pds.nasa.gov/pds4/pds/v1/PDS4_PDS_1800.xsd"> </Product_Observational>""") ds = gdal.GetDriverByName('PDS4').Create(filename, 1, 1, options=['TEMPLATE=' + template]) sr = osr.SpatialReference() sr.ImportFromProj4('+proj=longlat +R=2439400 +no_defs') ds.SetProjection(sr.ExportToWkt()) ds.SetGeoTransform([2, 1, 0, 49, 0, -2]) gdal.ErrorReset() with gdaltest.error_handler(): ds = None assert gdal.GetLastErrorMsg( ) == 'Cannot find Target_Identification element in template' # Missing Observation_Area gdal.FileFromMemBuffer( template, """ <Product_Observational xmlns="http://pds.nasa.gov/pds4/pds/v1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pds.nasa.gov/pds4/pds/v1 https://pds.nasa.gov/pds4/pds/v1/PDS4_PDS_1800.xsd"> </Product_Observational>""") ds = gdal.GetDriverByName('PDS4').Create(filename, 1, 1, options=['TEMPLATE=' + template]) gdal.ErrorReset() with gdaltest.error_handler(): ds = None assert gdal.GetLastErrorMsg() == 'Cannot find Observation_Area in template' # Unexpected content found after Observation_Area in template gdal.FileFromMemBuffer( template, """ <Product_Observational xmlns="http://pds.nasa.gov/pds4/pds/v1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pds.nasa.gov/pds4/pds/v1 https://pds.nasa.gov/pds4/pds/v1/PDS4_PDS_1800.xsd"> <Observation_Area/> <!-- --> <foo/> </Product_Observational>""") ds = gdal.GetDriverByName('PDS4').Create(filename, 1, 1, options=['TEMPLATE=' + template]) gdal.ErrorReset() with gdaltest.error_handler(): ds = None assert gdal.GetLastErrorMsg( ) == 'Unexpected content found after Observation_Area in template' gdal.Unlink(template) gdal.Unlink(filename) gdal.Unlink('/vsimem/test.img')
def pam_11(): # Create a read-only directory try: os.chmod('tmpdirreadonly', stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) shutil.rmtree('tmpdirreadonly') except: pass os.mkdir('tmpdirreadonly') shutil.copy('data/byte.tif', 'tmpdirreadonly/byte.tif') # FIXME: how do we create a read-only dir on windows ? # The following has no effect os.chmod('tmpdirreadonly', stat.S_IRUSR | stat.S_IXUSR) # Test that the directory is really read-only try: f = open('tmpdirreadonly/test', 'w') if f is not None: f.close() return 'skip' except: pass # Compute statistics --> the saving as .aux.xml should fail ds = gdal.Open('tmpdirreadonly/byte.tif') stats = ds.GetRasterBand(1).ComputeStatistics(False) if stats[0] != 74: gdaltest.post_reason('did not get expected minimum') return 'fail' gdal.ErrorReset() ds = None error_msg = gdal.GetLastErrorMsg() if error_msg.find('Unable to save auxilary information') != 0: gdaltest.post_reason('warning was expected at that point') return 'fail' # Check that we actually have no saved statistics ds = gdal.Open('tmpdirreadonly/byte.tif') stats = ds.GetRasterBand(1).GetStatistics(False, False) if stats[3] != -1: gdaltest.post_reason('did not expected to have stats at that point') return 'fail' ds = None # This must be run as an external process so we can override GDAL_PAM_PROXY_DIR # at the beginning of the process import test_py_scripts ret = test_py_scripts.run_py_script_as_external_script('.', 'pamproxydb', '-test1') #print(ret) if ret.find('success') == -1: gdaltest.post_reason('pamproxydb.py -test1 failed') print(ret) return 'fail' # Test loading an existing proxydb ret = test_py_scripts.run_py_script_as_external_script('.', 'pamproxydb', '-test2') #print(ret) if ret.find('success') == -1: gdaltest.post_reason('pamproxydb.py -test2 failed') print(ret) return 'fail' return 'success'
def main( argv = None ): bComputeMinMax = False bShowGCPs = True bShowMetadata = True bShowRAT=True bStats = False bApproxStats = True bShowColorTable = True bComputeChecksum = False bReportHistograms = False pszFilename = None papszExtraMDDomains = [ ] pszProjection = None hTransform = None bShowFileList = True # Must process GDAL_SKIP before GDALAllRegister(), but we can't call # GDALGeneralCmdLineProcessor before it needs the drivers to be registered # for the --format or --formats options #for( i = 1; i < argc; i++ ) #{ # if EQUAL(argv[i],"--config") and i + 2 < argc and EQUAL(argv[i + 1], "GDAL_SKIP"): # { # CPLSetConfigOption( argv[i+1], argv[i+2] ); # # i += 2; # } #} # #GDALAllRegister(); if argv is None: argv = sys.argv argv = gdal.GeneralCmdLineProcessor( argv ) if argv is None: return 1 nArgc = len(argv) # -------------------------------------------------------------------- # Parse arguments. # -------------------------------------------------------------------- i = 1 while i < nArgc: if EQUAL(argv[i], "--utility_version"): print("%s is running against GDAL %s" % (argv[0], gdal.VersionInfo("RELEASE_NAME"))) return 0 elif EQUAL(argv[i], "-mm"): bComputeMinMax = True elif EQUAL(argv[i], "-hist"): bReportHistograms = True elif EQUAL(argv[i], "-stats"): bStats = True bApproxStats = False elif EQUAL(argv[i], "-approx_stats"): bStats = True bApproxStats = True elif EQUAL(argv[i], "-checksum"): bComputeChecksum = True elif EQUAL(argv[i], "-nogcp"): bShowGCPs = False elif EQUAL(argv[i], "-nomd"): bShowMetadata = False elif EQUAL(argv[i], "-norat"): bShowRAT = False elif EQUAL(argv[i], "-noct"): bShowColorTable = False elif EQUAL(argv[i], "-mdd") and i < nArgc-1: i = i + 1 papszExtraMDDomains.append( argv[i] ) elif EQUAL(argv[i], "-nofl"): bShowFileList = False elif argv[i][0] == '-': return Usage() elif pszFilename is None: pszFilename = argv[i] else: return Usage() i = i + 1 if pszFilename is None: return Usage() # -------------------------------------------------------------------- # Open dataset. # -------------------------------------------------------------------- hDataset = gdal.Open( pszFilename, gdal.GA_ReadOnly ) if hDataset is None: print("gdalinfo failed - unable to open '%s'." % pszFilename ) return 1 # -------------------------------------------------------------------- # Report general info. # -------------------------------------------------------------------- hDriver = hDataset.GetDriver(); print( "Driver: %s/%s" % ( \ hDriver.ShortName, \ hDriver.LongName )) papszFileList = hDataset.GetFileList(); if papszFileList is None or len(papszFileList) == 0: print( "Files: none associated" ) else: print( "Files: %s" % papszFileList[0] ) if bShowFileList: for i in range(1, len(papszFileList)): print( " %s" % papszFileList[i] ) print( "Size is %d, %d" % (hDataset.RasterXSize, hDataset.RasterYSize)) # -------------------------------------------------------------------- # Report projection. # -------------------------------------------------------------------- pszProjection = hDataset.GetProjectionRef() if pszProjection is not None: hSRS = osr.SpatialReference() if hSRS.ImportFromWkt(pszProjection ) == gdal.CE_None: pszPrettyWkt = hSRS.ExportToPrettyWkt(False) print( "Coordinate System is:\n%s" % pszPrettyWkt ) else: print( "Coordinate System is `%s'" % pszProjection ) # -------------------------------------------------------------------- # Report Geotransform. # -------------------------------------------------------------------- adfGeoTransform = hDataset.GetGeoTransform(can_return_null = True) if adfGeoTransform is not None: if adfGeoTransform[2] == 0.0 and adfGeoTransform[4] == 0.0: print( "Origin = (%.15f,%.15f)" % ( \ adfGeoTransform[0], adfGeoTransform[3] )) print( "Pixel Size = (%.15f,%.15f)" % ( \ adfGeoTransform[1], adfGeoTransform[5] )) else: print( "GeoTransform =\n" \ " %.16g, %.16g, %.16g\n" \ " %.16g, %.16g, %.16g" % ( \ adfGeoTransform[0], \ adfGeoTransform[1], \ adfGeoTransform[2], \ adfGeoTransform[3], \ adfGeoTransform[4], \ adfGeoTransform[5] )) # -------------------------------------------------------------------- # Report GCPs. # -------------------------------------------------------------------- if bShowGCPs and hDataset.GetGCPCount() > 0: pszProjection = hDataset.GetGCPProjection() if pszProjection is not None: hSRS = osr.SpatialReference() if hSRS.ImportFromWkt(pszProjection ) == gdal.CE_None: pszPrettyWkt = hSRS.ExportToPrettyWkt(False) print( "GCP Projection = \n%s" % pszPrettyWkt ) else: print( "GCP Projection = %s" % \ pszProjection ) gcps = hDataset.GetGCPs() i = 0 for gcp in gcps: print( "GCP[%3d]: Id=%s, Info=%s\n" \ " (%.15g,%.15g) -> (%.15g,%.15g,%.15g)" % ( \ i, gcp.Id, gcp.Info, \ gcp.GCPPixel, gcp.GCPLine, \ gcp.GCPX, gcp.GCPY, gcp.GCPZ )) i = i + 1 # -------------------------------------------------------------------- # Report metadata. # -------------------------------------------------------------------- if bShowMetadata: papszMetadata = hDataset.GetMetadata_List() else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( "Metadata:" ) for metadata in papszMetadata: print( " %s" % metadata ) if bShowMetadata: for extra_domain in papszExtraMDDomains: papszMetadata = hDataset.GetMetadata_List(extra_domain) if papszMetadata is not None and len(papszMetadata) > 0 : print( "Metadata (%s):" % extra_domain) for metadata in papszMetadata: print( " %s" % metadata ) # -------------------------------------------------------------------- # Report "IMAGE_STRUCTURE" metadata. # -------------------------------------------------------------------- if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("IMAGE_STRUCTURE") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( "Image Structure Metadata:" ) for metadata in papszMetadata: print( " %s" % metadata ) # -------------------------------------------------------------------- # Report subdatasets. # -------------------------------------------------------------------- papszMetadata = hDataset.GetMetadata_List("SUBDATASETS") if papszMetadata is not None and len(papszMetadata) > 0 : print( "Subdatasets:" ) for metadata in papszMetadata: print( " %s" % metadata ) # -------------------------------------------------------------------- # Report geolocation. # -------------------------------------------------------------------- if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("GEOLOCATION") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( "Geolocation:" ) for metadata in papszMetadata: print( " %s" % metadata ) # -------------------------------------------------------------------- # Report RPCs # -------------------------------------------------------------------- if bShowMetadata: papszMetadata = hDataset.GetMetadata_List("RPC") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( "RPC Metadata:" ) for metadata in papszMetadata: print( " %s" % metadata ) # -------------------------------------------------------------------- # Setup projected to lat/long transform if appropriate. # -------------------------------------------------------------------- if pszProjection is not None and len(pszProjection) > 0: hProj = osr.SpatialReference( pszProjection ) if hProj is not None: hLatLong = hProj.CloneGeogCS() if hLatLong is not None: gdal.PushErrorHandler( 'CPLQuietErrorHandler' ) hTransform = osr.CoordinateTransformation( hProj, hLatLong ) gdal.PopErrorHandler() if gdal.GetLastErrorMsg().find( 'Unable to load PROJ.4 library' ) != -1: hTransform = None # -------------------------------------------------------------------- # Report corners. # -------------------------------------------------------------------- print( "Corner Coordinates:" ) GDALInfoReportCorner( hDataset, hTransform, "Upper Left", \ 0.0, 0.0 ); GDALInfoReportCorner( hDataset, hTransform, "Lower Left", \ 0.0, hDataset.RasterYSize); GDALInfoReportCorner( hDataset, hTransform, "Upper Right", \ hDataset.RasterXSize, 0.0 ); GDALInfoReportCorner( hDataset, hTransform, "Lower Right", \ hDataset.RasterXSize, \ hDataset.RasterYSize ); GDALInfoReportCorner( hDataset, hTransform, "Center", \ hDataset.RasterXSize/2.0, \ hDataset.RasterYSize/2.0 ); # ==================================================================== # Loop over bands. # ==================================================================== for iBand in range(hDataset.RasterCount): hBand = hDataset.GetRasterBand(iBand+1 ) #if( bSample ) #{ # float afSample[10000]; # int nCount; # # nCount = GDALGetRandomRasterSample( hBand, 10000, afSample ); # print( "Got %d samples.\n", nCount ); #} (nBlockXSize, nBlockYSize) = hBand.GetBlockSize() print( "Band %d Block=%dx%d Type=%s, ColorInterp=%s" % ( iBand+1, \ nBlockXSize, nBlockYSize, \ gdal.GetDataTypeName(hBand.DataType), \ gdal.GetColorInterpretationName( \ hBand.GetRasterColorInterpretation()) )) if hBand.GetDescription() is not None \ and len(hBand.GetDescription()) > 0 : print( " Description = %s" % hBand.GetDescription() ) dfMin = hBand.GetMinimum() dfMax = hBand.GetMaximum() if dfMin is not None or dfMax is not None or bComputeMinMax: line = " " if dfMin is not None: line = line + ("Min=%.3f " % dfMin) if dfMax is not None: line = line + ("Max=%.3f " % dfMax) if bComputeMinMax: gdal.ErrorReset() adfCMinMax = hBand.ComputeRasterMinMax(False) if gdal.GetLastErrorType() == gdal.CE_None: line = line + ( " Computed Min/Max=%.3f,%.3f" % ( \ adfCMinMax[0], adfCMinMax[1] )) print( line ) stats = hBand.GetStatistics( bApproxStats, bStats) # Dirty hack to recognize if stats are valid. If invalid, the returned # stddev is negative if stats[3] >= 0.0: print( " Minimum=%.3f, Maximum=%.3f, Mean=%.3f, StdDev=%.3f" % ( \ stats[0], stats[1], stats[2], stats[3] )) if bReportHistograms: hist = hBand.GetDefaultHistogram(force = True, callback = gdal.TermProgress) if hist is not None: dfMin = hist[0] dfMax = hist[1] nBucketCount = hist[2] panHistogram = hist[3] print( " %d buckets from %g to %g:" % ( \ nBucketCount, dfMin, dfMax )) line = ' ' for bucket in panHistogram: line = line + ("%d " % bucket) print(line) if bComputeChecksum: print( " Checksum=%d" % hBand.Checksum()) dfNoData = hBand.GetNoDataValue() if dfNoData is not None: if dfNoData != dfNoData: print( " NoData Value=nan" ) else: print( " NoData Value=%.18g" % dfNoData ) if hBand.GetOverviewCount() > 0: line = " Overviews: " for iOverview in range(hBand.GetOverviewCount()): if iOverview != 0 : line = line + ", " hOverview = hBand.GetOverview( iOverview ); if hOverview is not None: line = line + ( "%dx%d" % (hOverview.XSize, hOverview.YSize)) pszResampling = \ hOverview.GetMetadataItem( "RESAMPLING", "" ) if pszResampling is not None \ and len(pszResampling) >= 12 \ and EQUAL(pszResampling[0:12],"AVERAGE_BIT2"): line = line + "*" else: line = line + "(null)" print(line) if bComputeChecksum: line = " Overviews checksum: " for iOverview in range(hBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hBand.GetOverview( iOverview ); if hOverview is not None: line = line + ( "%d" % hOverview.Checksum()) else: line = line + "(null)" print(line) if hBand.HasArbitraryOverviews(): print( " Overviews: arbitrary" ) nMaskFlags = hBand.GetMaskFlags() if (nMaskFlags & (gdal.GMF_NODATA|gdal.GMF_ALL_VALID)) == 0: hMaskBand = hBand.GetMaskBand() line = " Mask Flags: " if (nMaskFlags & gdal.GMF_PER_DATASET) != 0: line = line + "PER_DATASET " if (nMaskFlags & gdal.GMF_ALPHA) != 0: line = line + "ALPHA " if (nMaskFlags & gdal.GMF_NODATA) != 0: line = line + "NODATA " if (nMaskFlags & gdal.GMF_ALL_VALID) != 0: line = line + "ALL_VALID " print(line) if hMaskBand is not None and \ hMaskBand.GetOverviewCount() > 0: line = " Overviews of mask band: " for iOverview in range(hMaskBand.GetOverviewCount()): if iOverview != 0: line = line + ", " hOverview = hMaskBand.GetOverview( iOverview ); if hOverview is not None: line = line + ( "%d" % hOverview.Checksum()) else: line = line + "(null)" if len(hBand.GetUnitType()) > 0: print( " Unit Type: %s" % hBand.GetUnitType()) papszCategories = hBand.GetRasterCategoryNames() if papszCategories is not None: print( " Categories:" ); i = 0 for category in papszCategories: print( " %3d: %s" % (i, category) ) i = i + 1 if hBand.GetScale() != 1.0 or hBand.GetOffset() != 0.0: print( " Offset: %.15g, Scale:%.15g" % \ ( hBand.GetOffset(), hBand.GetScale())) if bShowMetadata: papszMetadata = hBand.GetMetadata_List() else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( " Metadata:" ) for metadata in papszMetadata: print( " %s" % metadata ) if bShowMetadata: papszMetadata = hBand.GetMetadata_List("IMAGE_STRUCTURE") else: papszMetadata = None if bShowMetadata and papszMetadata is not None and len(papszMetadata) > 0 : print( " Image Structure Metadata:" ) for metadata in papszMetadata: print( " %s" % metadata ) hTable = hBand.GetRasterColorTable() if hBand.GetRasterColorInterpretation() == gdal.GCI_PaletteIndex \ and hTable is not None: print( " Color Table (%s with %d entries)" % (\ gdal.GetPaletteInterpretationName( \ hTable.GetPaletteInterpretation( )), \ hTable.GetCount() )) if bShowColorTable: for i in range(hTable.GetCount()): sEntry = hTable.GetColorEntry(i) print( " %3d: %d,%d,%d,%d" % ( \ i, \ sEntry[0],\ sEntry[1],\ sEntry[2],\ sEntry[3] )) if bShowRAT: pass #hRAT = hBand.GetDefaultRAT() #GDALRATDumpReadable( hRAT, None ); return 0
def ogr_cartodb_vsimem(): if ogrtest.cartodb_drv is None: return 'skip' ogrtest.cartodb_api_key_ori = gdal.GetConfigOption('CARTODB_API_KEY') gdal.SetConfigOption('CARTODB_API_URL', '/vsimem/cartodb') gdal.SetConfigOption('CPL_CURL_ENABLE_VSIMEM', 'YES') gdal.PushErrorHandler() ds = ogr.Open('CARTODB:foo') gdal.PopErrorHandler() if ds is not None: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """Content-Type: text/html\r Error""") gdal.PushErrorHandler() ds = ogr.Open('CARTODB:foo') gdal.PopErrorHandler() if ds is not None or gdal.GetLastErrorMsg().find('HTML error page') < 0: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """""") ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{""") gdal.PushErrorHandler() ds = ogr.Open('CARTODB:foo') gdal.PopErrorHandler() if ds is not None or gdal.GetLastErrorMsg().find('JSON parsing error') < 0: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """ "not_expected_json" """) ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{ "error" : [ "bla"] }""") gdal.PushErrorHandler() ds = ogr.Open('CARTODB:foo') gdal.PopErrorHandler() if ds is not None or gdal.GetLastErrorMsg().find( 'Error returned by server : bla') < 0: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{ "fields" : null } """) ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{ "fields" : "invalid" } """) ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{ "fields" : {} } """) ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{ "fields" : { "foo": "invalid" } } """) ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{ "fields" : { "foo": {} } } """) ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{ "fields" : { "foo": { "type" : null } } } """) ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{ "fields" : { "foo": { "type" : {} } } } """) ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{ "fields" : { "foo": { "type" : "string" } } } """) ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{"rows":[ {"field1": "foo", "field2": "bar"} ],"fields":{"field1":{"type":"string"}, "field2":{"type":"string"}}}""" ) ds = ogr.Open('CARTODB:foo') if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{"rows":[],"fields":{"current_schema":{"type":"string"}}}""") gdal.PushErrorHandler() ds = ogr.Open('CARTODB:foo') gdal.PopErrorHandler() if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0', """{"rows":[{"current_schema":"public"}],"fields":{"current_schema":{"type":"unknown(19)"}}}""" ) gdal.PushErrorHandler() ds = ogr.Open('CARTODB:foo') gdal.PopErrorHandler() if ds is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT CDB_UserTables() LIMIT 500 OFFSET 0', """{"rows":[{"cdb_usertables":"table1"}],"fields":{"cdb_usertables":{"type":"string"}}}""" ) ds = ogr.Open('CARTODB:foo') if ds is None or ds.GetLayerCount() != 1: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' gdal.PushErrorHandler() lyr_defn = ds.GetLayer(0).GetLayerDefn() gdal.PopErrorHandler() if lyr_defn.GetFieldCount() != 0: gdaltest.post_reason('fail') return 'fail' # Empty layer gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT * FROM "table1" LIMIT 0', """{"rows":[],"fields":{}}""") ds = ogr.Open('CARTODB:foo') lyr = ds.GetLayer(0) lyr_defn = lyr.GetLayerDefn() if lyr_defn.GetFieldCount() != 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT * FROM "table1" LIMIT 500 OFFSET 0', """{"rows":[{}],"fields":{}}}""") f = lyr.GetNextFeature() if f.GetFID() != 0: gdaltest.post_reason('fail') f.DumpReadable() return 'fail' # Layer without geometry or primary key gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT * FROM "table1" LIMIT 0', """{"rows":[],"fields":{"strfield":{"type":"string"}, "realfield":{"type":"number"}, "boolfield":{"type":"boolean"}, "datefield":{"type":"date"}}}""" ) ds = ogr.Open('CARTODB:foo') lyr = ds.GetLayer(0) lyr_defn = lyr.GetLayerDefn() if lyr_defn.GetFieldCount() != 4: gdaltest.post_reason('fail') return 'fail' if lyr_defn.GetFieldDefn(0).GetName() != 'strfield' or \ lyr_defn.GetFieldDefn(0).GetType() != ogr.OFTString: gdaltest.post_reason('fail') return 'fail' if lyr_defn.GetFieldDefn(1).GetName() != 'realfield' or \ lyr_defn.GetFieldDefn(1).GetType() != ogr.OFTReal: gdaltest.post_reason('fail') return 'fail' if lyr_defn.GetFieldDefn(2).GetName() != 'boolfield' or \ lyr_defn.GetFieldDefn(2).GetType() != ogr.OFTInteger or \ lyr_defn.GetFieldDefn(2).GetSubType() != ogr.OFSTBoolean: gdaltest.post_reason('fail') return 'fail' if lyr_defn.GetFieldDefn(3).GetName() != 'datefield' or \ lyr_defn.GetFieldDefn(3).GetType() != ogr.OFTDateTime: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT "strfield", "realfield", "boolfield", "datefield" FROM "table1" LIMIT 500 OFFSET 0', """{"rows":[{ "strfield": "foo", "realfield": 1.23, "boolfield": true, "datefield": "2015-04-24T12:34:56.123Z" }],"fields":{"strfield":{"type":"string"}, "realfield":{"type":"number"}, "boolfield":{"type":"boolean"}, "datefield":{"type":"date"}}}""" ) f = lyr.GetNextFeature() if f['strfield'] != 'foo' or f['realfield'] != 1.23 or f['boolfield'] != 1 or \ f['datefield'] != '2015/04/24 12:34:56.123+00': gdaltest.post_reason('fail') f.DumpReadable() return 'fail' gdal.SetConfigOption('CARTODB_API_KEY', 'foo') gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0&api_key=foo', """{"rows":[{"current_schema":"public"}],"fields":{"current_schema":{"type":"unknown(19)"}}}""" ) gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT CDB_UserTables() LIMIT 500 OFFSET 0&api_key=foo', """{"rows":[{"cdb_usertables":"table1"}],"fields":{"cdb_usertables":{"type":"string"}}}""" ) ds = ogr.Open('CARTODB:foo') gdal.PushErrorHandler() lyr_defn = ds.GetLayer(0).GetLayerDefn() gdal.PopErrorHandler() if lyr_defn.GetFieldCount() != 0: gdaltest.post_reason('fail') return 'fail' get_full_details_fields_url = """/vsimem/cartodb&POSTFIELDS=q=SELECT a.attname, t.typname, a.attlen, format_type(a.atttypid,a.atttypmod), a.attnum, a.attnotnull, i.indisprimary, pg_get_expr(def.adbin, c.oid) AS defaultexpr, postgis_typmod_dims(a.atttypmod) dim, postgis_typmod_srid(a.atttypmod) srid, postgis_typmod_type(a.atttypmod)::text geomtyp, srtext FROM pg_class c JOIN pg_attribute a ON a.attnum > 0 AND a.attrelid = c.oid AND c.relname = 'table1' JOIN pg_type t ON a.atttypid = t.oid JOIN pg_namespace n ON c.relnamespace=n.oid AND n.nspname= 'public' LEFT JOIN pg_index i ON c.oid = i.indrelid AND i.indisprimary = 't' AND a.attnum = ANY(i.indkey) LEFT JOIN pg_attrdef def ON def.adrelid = c.oid AND def.adnum = a.attnum LEFT JOIN spatial_ref_sys srs ON srs.srid = postgis_typmod_srid(a.atttypmod) ORDER BY a.attnum LIMIT 500 OFFSET 0&api_key=foo""" gdal.FileFromMemBuffer(get_full_details_fields_url, '') ds = ogr.Open('CARTODB:foo') gdal.PushErrorHandler() lyr_defn = ds.GetLayer(0).GetLayerDefn() gdal.PopErrorHandler() if lyr_defn.GetFieldCount() != 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( get_full_details_fields_url, """{"rows":[{"attname":"foo"}], "fields":{"attname":{"type":"string"}}}""" ) ds = ogr.Open('CARTODB:foo') lyr = ds.GetLayer(0) gdal.PushErrorHandler() lyr_defn = lyr.GetLayerDefn() gdal.PopErrorHandler() if lyr_defn.GetFieldCount() != 1: gdaltest.post_reason('fail') return 'fail' gdal.PushErrorHandler() f = lyr.GetFeature(0) gdal.PopErrorHandler() if f is not None: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( get_full_details_fields_url, """{"rows":[{"attname":"strfield", "typname":"varchar", "attnotnull": true, "defaultexpr": "def_value"}, {"attname":"intfield", "typname":"int4"}, {"attname":"doublefield", "typname":"float"}, {"attname":"boolfield", "typname":"bool"}, {"attname":"datetimefield", "typname":"timestamp"}, {"attname":"cartodb_id","typname":"int4","indisprimary":true}, {"attname":"created_at","typname":"date"}, {"attname":"updated_at","typname":"date"}, {"attname":"my_geom","typname":"geometry","dim":3,"srid":4326,"geomtyp":"Point", "srtext":"GEOGCS[\\"WGS 84\\",DATUM[\\"WGS_1984\\",SPHEROID[\\"WGS 84\\",6378137,298.257223563,AUTHORITY[\\"EPSG\\",\\"7030\\"]],AUTHORITY[\\"EPSG\\",\\"6326\\"]],PRIMEM[\\"Greenwich\\",0,AUTHORITY[\\"EPSG\\",\\"8901\\"]],UNIT[\\"degree\\",0.0174532925199433,AUTHORITY[\\"EPSG\\",\\"9122\\"]],AUTHORITY[\\"EPSG\\",\\"4326\\"]]"}, {"attname":"the_geom_webmercator","typname":"geometry"}], "fields":{"attname":{"type":"string"}, "typname":{"type":"string"}, "attlen":{"type":"number"}, "format_type":{"type":"string"}, "attnum":{"type":"number"}, "attnotnull":{"type":"boolean"}, "indisprimary":{"type":"boolean"}, "defaultexpr":{"type":"string"}, "dim":{"type":"number"}, "srid":{"type":"number"}, "geomtyp":{"type":"string"}, "srtext":{"type":"string"}}}""") ds = ogr.Open('CARTODB:foo') lyr = ds.GetLayer(0) lyr_defn = lyr.GetLayerDefn() if lyr_defn.GetFieldCount() != 5: gdaltest.post_reason('fail') return 'fail' if lyr_defn.GetFieldDefn(0).GetName() != 'strfield' or \ lyr_defn.GetFieldDefn(0).GetType() != ogr.OFTString or \ lyr_defn.GetFieldDefn(0).IsNullable() or \ lyr_defn.GetFieldDefn(0).GetDefault() != 'def_value': gdaltest.post_reason('fail') return 'fail' if lyr_defn.GetGeomFieldCount() != 1: gdaltest.post_reason('fail') return 'fail' if lyr_defn.GetGeomFieldDefn(0).GetName() != 'my_geom': gdaltest.post_reason('fail') return 'fail' if lyr_defn.GetGeomFieldDefn(0).GetType() != ogr.wkbPoint25D: gdaltest.post_reason('fail') return 'fail' if lyr_defn.GetGeomFieldDefn(0).GetSpatialRef().ExportToWkt().find( '4326') < 0: gdaltest.post_reason('fail') return 'fail' gdal.PushErrorHandler() fc = lyr.GetFeatureCount() gdal.PopErrorHandler() if fc != 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT COUNT(*) FROM "table1"&api_key=foo""", """{"rows":[{"foo":1}], "fields":{"foo":{"type":"number"}}}""") gdal.PushErrorHandler() fc = lyr.GetFeatureCount() gdal.PopErrorHandler() if fc != 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT COUNT(*) FROM "table1"&api_key=foo""", """{"rows":[{"count":9876543210}], "fields":{"count":{"type":"number"}}}""") if lyr.GetFeatureCount() != 9876543210: gdaltest.post_reason('fail') return 'fail' gdal.PushErrorHandler() extent = lyr.GetExtent() gdal.PopErrorHandler() if extent != (0, 0, 0, 0): gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT ST_Extent("my_geom") FROM "table1"&api_key=foo""", """{"rows":[{"foo":1}], "fields":{"foo":{"type":"number"}}}""") gdal.PushErrorHandler() extent = lyr.GetExtent() gdal.PopErrorHandler() if extent != (0, 0, 0, 0): gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT ST_Extent("my_geom") FROM "table1"&api_key=foo""", """{"rows":[{"st_extent":""}], "fields":{"st_extent":{"type":"string"}}}""") gdal.ErrorReset() gdal.PushErrorHandler() lyr.GetExtent() gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT ST_Extent("my_geom") FROM "table1"&api_key=foo""", """{"rows":[{"st_extent":"("}], "fields":{"st_extent":{"type":"string"}}}""") gdal.ErrorReset() gdal.PushErrorHandler() lyr.GetExtent() gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT ST_Extent("my_geom") FROM "table1"&api_key=foo""", """{"rows":[{"st_extent":"BOX()"}], "fields":{"st_extent":{"type":"string"}}}""") gdal.ErrorReset() gdal.PushErrorHandler() lyr.GetExtent() gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT ST_Extent("my_geom") FROM "table1"&api_key=foo""", """{"rows":[{"st_extent":"BOX(0,1,2,3)"}], "fields":{"st_extent":{"type":"string"}}}""") if lyr.GetExtent() != (0.0, 2.0, 1.0, 3.0): gdaltest.post_reason('fail') print(lyr.GetExtent()) return 'fail' gdal.PushErrorHandler() f = lyr.GetFeature(0) gdal.PopErrorHandler() if f is not None: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT "cartodb_id", "my_geom", "strfield", "intfield", "doublefield", "boolfield", "datetimefield" FROM "table1" WHERE "cartodb_id" = 0&api_key=foo""", """""") gdal.PushErrorHandler() f = lyr.GetFeature(0) gdal.PopErrorHandler() if f is not None: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT "cartodb_id", "my_geom", "strfield", "intfield", "doublefield", "boolfield", "datetimefield" FROM "table1" WHERE "cartodb_id" = 0&api_key=foo""", """{"rows":[{"st_extent":"BOX(0,1,2,3)"}], "fields":{"st_extent":{"type":"string"}}}""") f = lyr.GetFeature(0) if f.GetFID() != -1 or f.IsFieldSet(0): gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT "cartodb_id", "my_geom", "strfield", "intfield", "doublefield", "boolfield", "datetimefield" FROM "table1" WHERE "cartodb_id" = 0&api_key=foo""", """{"rows":[{"cartodb_id":0}], "fields":{"cartodb_id":{"type":"numeric"}}}""") f = lyr.GetFeature(0) if f.GetFID() != 0: gdaltest.post_reason('fail') return 'fail' lyr.ResetReading() gdal.PushErrorHandler() f = lyr.GetNextFeature() gdal.PopErrorHandler() if f is not None: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT "cartodb_id", "my_geom", "strfield", "intfield", "doublefield", "boolfield", "datetimefield" FROM "table1" WHERE "cartodb_id" >= 0 ORDER BY "cartodb_id" ASC LIMIT 500&api_key=foo""", """{"rows":[{"cartodb_id":0}], "fields":{"cartodb_id":{"type":"numeric"}}}""") lyr.ResetReading() f = lyr.GetNextFeature() if f.GetFID() != 0: gdaltest.post_reason('fail') return 'fail' gdal.PushErrorHandler() f = lyr.GetNextFeature() gdal.PopErrorHandler() if f is not None: gdaltest.post_reason('fail') return 'fail' gdal.SetConfigOption('CARTODB_PAGE_SIZE', '2') gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT "cartodb_id", "my_geom", "strfield", "intfield", "doublefield", "boolfield", "datetimefield" FROM "table1" WHERE "cartodb_id" >= 0 ORDER BY "cartodb_id" ASC LIMIT 2&api_key=foo""", """{"rows":[{"cartodb_id":0},{"cartodb_id":10}], "fields":{"cartodb_id":{"type":"numeric"}}}""") lyr.ResetReading() f = lyr.GetNextFeature() if f.GetFID() != 0: gdaltest.post_reason('fail') return 'fail' f = lyr.GetNextFeature() if f.GetFID() != 10: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT "cartodb_id", "my_geom", "strfield", "intfield", "doublefield", "boolfield", "datetimefield" FROM "table1" WHERE "cartodb_id" >= 11 ORDER BY "cartodb_id" ASC LIMIT 2&api_key=foo""", """{"rows":[{"cartodb_id":12}], "fields":{"cartodb_id":{"type":"numeric"}}}""") f = lyr.GetNextFeature() if f.GetFID() != 12: gdaltest.post_reason('fail') return 'fail' gdal.ErrorReset() f = lyr.GetNextFeature() if f is not None or gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' lyr.SetAttributeFilter('strfield is NULL') gdal.PushErrorHandler() fc = lyr.GetFeatureCount() gdal.PopErrorHandler() if fc != 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT "cartodb_id", "my_geom", "strfield", "intfield", "doublefield", "boolfield", "datetimefield" FROM "table1" WHERE (strfield is NULL) AND "cartodb_id" >= 0 ORDER BY "cartodb_id" ASC LIMIT 2&api_key=foo""", """{"rows":[{"cartodb_id":0}], "fields":{"cartodb_id":{"type":"numeric"}}}""") lyr.ResetReading() f = lyr.GetNextFeature() if f is None: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT "cartodb_id", "my_geom", "strfield", "intfield", "doublefield", "boolfield", "datetimefield" FROM "table1" WHERE (strfield is NULL) AND "cartodb_id" >= 1 ORDER BY "cartodb_id" ASC LIMIT 2&api_key=foo""", """{"rows":[], "fields":{"cartodb_id":{"type":"numeric"}}}""") gdal.ErrorReset() f = lyr.GetNextFeature() if f is not None or gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT COUNT(*) FROM "table1" WHERE (strfield is NULL)&api_key=foo""", """{"rows":[{"count":9876543210}], "fields":{"count":{"type":"number"}}}""") if lyr.GetFeatureCount() != 9876543210: gdaltest.post_reason('fail') return 'fail' lyr.SetSpatialFilterRect(-180, -90, 180, 90) gdal.PushErrorHandler() fc = lyr.GetFeatureCount() gdal.PopErrorHandler() if fc != 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT "cartodb_id", "my_geom", "strfield", "intfield", "doublefield", "boolfield", "datetimefield" FROM "table1" WHERE ("my_geom" %26%26 'BOX3D(-180 -90, 180 90)'::box3d) AND (strfield is NULL) AND "cartodb_id" >= 0 ORDER BY "cartodb_id" ASC LIMIT 2&api_key=foo""", """{"rows":[{"cartodb_id":20, "my_geom": "010100000000000000000000400000000000804840" }], "fields":{"cartodb_id":{"type":"numeric"}, "my_geom":{"type":"string"}}}""" ) lyr.ResetReading() f = lyr.GetNextFeature() if f is None or f.GetGeometryRef().ExportToWkt() != 'POINT (2 49)': gdaltest.post_reason('fail') return 'fail' gdal.PushErrorHandler() fc = lyr.GetFeatureCount() gdal.PopErrorHandler() if fc != 1: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT COUNT(*) FROM "table1" WHERE ("my_geom" %26%26 'BOX3D(-180 -90, 180 90)'::box3d) AND (strfield is NULL)&api_key=foo""", """{"rows":[{"count":9876543210}], "fields":{"count":{"type":"number"}}}""") if lyr.GetFeatureCount() != 9876543210: gdaltest.post_reason('fail') return 'fail' # Not permitted in read-only mode f = ogr.Feature(lyr.GetLayerDefn()) gdal.PushErrorHandler() ds.CreateLayer('foo') ds.DeleteLayer(0) lyr.CreateFeature(f) lyr.SetFeature(f) lyr.DeleteFeature(0) gdal.PopErrorHandler() ds = None gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=DROP FUNCTION IF EXISTS ogr_table_metadata(TEXT,TEXT); CREATE OR REPLACE FUNCTION ogr_table_metadata(schema_name TEXT, table_name TEXT) RETURNS TABLE (attname TEXT, typname TEXT, attlen INT, format_type TEXT, attnum INT, attnotnull BOOLEAN, indisprimary BOOLEAN, defaultexpr TEXT, dim INT, srid INT, geomtyp TEXT, srtext TEXT) AS $$ SELECT a.attname::text, t.typname::text, a.attlen::int, format_type(a.atttypid,a.atttypmod)::text, a.attnum::int, a.attnotnull::boolean, i.indisprimary::boolean, pg_get_expr(def.adbin, c.oid)::text AS defaultexpr, (CASE WHEN t.typname = 'geometry' THEN postgis_typmod_dims(a.atttypmod) ELSE NULL END)::int dim, (CASE WHEN t.typname = 'geometry' THEN postgis_typmod_srid(a.atttypmod) ELSE NULL END)::int srid, (CASE WHEN t.typname = 'geometry' THEN postgis_typmod_type(a.atttypmod) ELSE NULL END)::text geomtyp, srtext FROM pg_class c JOIN pg_attribute a ON a.attnum > 0 AND a.attrelid = c.oid AND c.relname = $2 AND c.relname IN (SELECT CDB_UserTables())JOIN pg_type t ON a.atttypid = t.oid JOIN pg_namespace n ON c.relnamespace=n.oid AND n.nspname = $1 LEFT JOIN pg_index i ON c.oid = i.indrelid AND i.indisprimary = 't' AND a.attnum = ANY(i.indkey) LEFT JOIN pg_attrdef def ON def.adrelid = c.oid AND def.adnum = a.attnum LEFT JOIN spatial_ref_sys srs ON srs.srid = postgis_typmod_srid(a.atttypmod) ORDER BY a.attnum $$ LANGUAGE SQL&api_key=foo""", """""" "") gdal.SetConfigOption('CARTODB_PAGE_SIZE', None) ds = ogr.Open('CARTODB:foo', update=1) lyr = ds.CreateLayer('MY_LAYER') gdal.ErrorReset() gdal.PushErrorHandler() lyr.GetNextFeature() gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' ds = ogr.Open('CARTODB:foo', update=1) gdal.SetConfigOption('CARTODB_MAX_CHUNK_SIZE', '0') sr = osr.SpatialReference() sr.ImportFromEPSG(4326) lyr = ds.CreateLayer('MY_LAYER', srs=sr) fld_defn = ogr.FieldDefn('STRFIELD', ogr.OFTString) fld_defn.SetNullable(0) fld_defn.SetDefault("'DEFAULT VAL'") fld_defn.SetWidth(20) lyr.CreateField(fld_defn) gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=CREATE TABLE "my_layer" ( cartodb_id SERIAL,the_geom GEOMETRY(GEOMETRY, 4326), the_geom_webmercator GEOMETRY(GEOMETRY, 3857),"strfield" VARCHAR NOT NULL DEFAULT 'DEFAULT VAL',PRIMARY KEY (cartodb_id) );DROP SEQUENCE IF EXISTS "my_layer_cartodb_id_seq" CASCADE;CREATE SEQUENCE "my_layer_cartodb_id_seq" START 1;ALTER TABLE "my_layer" ALTER COLUMN cartodb_id SET DEFAULT nextval('"my_layer_cartodb_id_seq"')&api_key=foo""", """{"rows":[], "fields":{}}""") gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT cdb_cartodbfytable('my_layer')&api_key=foo""", """{"rows":[], "fields":{}}""") f = ogr.Feature(lyr.GetLayerDefn()) gdal.PushErrorHandler() ret = lyr.CreateFeature(f) gdal.PopErrorHandler() if ret == 0: gdaltest.post_reason('fail') return 'fail' f = None fld_defn = ogr.FieldDefn('INTFIELD', ogr.OFTInteger) gdal.PushErrorHandler() ret = lyr.CreateField(fld_defn) gdal.PopErrorHandler() if ret == 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=ALTER TABLE "my_layer" ADD COLUMN "intfield" INTEGER&api_key=foo""", """{"rows":[], "fields":{}}""") if lyr.CreateField(fld_defn) != 0: gdaltest.post_reason('fail') return 'fail' fld_defn = ogr.FieldDefn('boolfield', ogr.OFTInteger) fld_defn.SetSubType(ogr.OFSTBoolean) gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=ALTER TABLE "my_layer" ADD COLUMN "boolfield" BOOLEAN&api_key=foo""", """{"rows":[], "fields":{}}""") if lyr.CreateField(fld_defn) != 0: gdaltest.post_reason('fail') return 'fail' f = ogr.Feature(lyr.GetLayerDefn()) gdal.PushErrorHandler() ret = lyr.CreateFeature(f) gdal.PopErrorHandler() if ret == 0: gdaltest.post_reason('fail') return 'fail' f = ogr.Feature(lyr.GetLayerDefn()) f.SetField('strfield', 'foo') f.SetField('intfield', 1) f.SetField('boolfield', 1) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(2 49)')) gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=INSERT INTO "my_layer" ("strfield", "intfield", "boolfield", "the_geom", "cartodb_id") VALUES ('foo', 1, 't', '0101000020E610000000000000000000400000000000804840', nextval('my_layer_cartodb_id_seq')) RETURNING "cartodb_id"&api_key=foo""", """{"rows":[ {"cartodb_id": 1} ], "fields":{"cartodb_id":{"type":"integer"}}}""") ret = lyr.CreateFeature(f) if ret != 0 or f.GetFID() != 1: gdaltest.post_reason('fail') return 'fail' f.SetFID(-1) gdal.PushErrorHandler() ret = lyr.SetFeature(f) gdal.PopErrorHandler() if ret == 0: gdaltest.post_reason('fail') return 'fail' f.SetFID(3) gdal.PushErrorHandler() ret = lyr.SetFeature(f) gdal.PopErrorHandler() if ret == 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=UPDATE "my_layer" SET "strfield" = 'foo', "intfield" = 1, "boolfield" = 't', "the_geom" = '0101000020E610000000000000000000400000000000804840' WHERE "cartodb_id" = 3&api_key=foo""", """{"total_rows": 0}""") ret = lyr.SetFeature(f) if ret != ogr.OGRERR_NON_EXISTING_FEATURE: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=UPDATE "my_layer" SET "strfield" = 'foo', "intfield" = 1, "boolfield" = 't', "the_geom" = '0101000020E610000000000000000000400000000000804840' WHERE "cartodb_id" = 3&api_key=foo""", """{"total_rows": 1}""") ret = lyr.SetFeature(f) if ret != 0: gdaltest.post_reason('fail') return 'fail' f = ogr.Feature(lyr.GetLayerDefn()) gdal.PushErrorHandler() ret = lyr.CreateFeature(f) gdal.PopErrorHandler() if ret == 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=INSERT INTO "my_layer" DEFAULT VALUES RETURNING "cartodb_id"&api_key=foo""", """{"rows":[ {"cartodb_id": 4} ], "fields":{"cartodb_id":{"type":"integer"}}}""") ret = lyr.CreateFeature(f) if ret != 0 or f.GetFID() != 4: gdaltest.post_reason('fail') return 'fail' gdal.PushErrorHandler() ret = lyr.DeleteFeature(0) gdal.PopErrorHandler() if ret == 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=DELETE FROM "my_layer" WHERE "cartodb_id" = 0&api_key=foo""", """{"total_rows": 0}""") ret = lyr.DeleteFeature(0) if ret != ogr.OGRERR_NON_EXISTING_FEATURE: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=DELETE FROM "my_layer" WHERE "cartodb_id" = 0&api_key=foo""", """{"total_rows": 1}""") ret = lyr.DeleteFeature(0) if ret != 0: gdaltest.post_reason('fail') return 'fail' gdal.SetConfigOption('CARTODB_MAX_CHUNK_SIZE', None) ds = ogr.Open('CARTODB:foo', update=1) lyr = ds.GetLayer(0) gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT nextval('table1_cartodb_id_seq') AS nextid&api_key=foo""", """{"rows":[{"nextid":11}],"fields":{"nextid":{"type":"number"}}}""") f = ogr.Feature(lyr.GetLayerDefn()) f.SetField('strfield', 'foo') ret = lyr.CreateFeature(f) if ret != 0 or f.GetFID() != 11: gdaltest.post_reason('fail') return 'fail' f = ogr.Feature(lyr.GetLayerDefn()) ret = lyr.CreateFeature(f) if ret != 0 or f.GetFID() != 12: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=BEGIN;INSERT INTO "table1" ("strfield", "cartodb_id") VALUES ('foo', 11);INSERT INTO "table1" ("cartodb_id") VALUES (nextval('table1_cartodb_id_seq'));COMMIT;&api_key=foo""", """{"rows":[], "fields":{}}""") gdal.ErrorReset() ds = None if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' ds = ogr.Open('CARTODB:foo', update=1) gdal.PushErrorHandler() lyr = ds.CreateLayer('table1') gdal.PopErrorHandler() if lyr is not None: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=DROP TABLE "table1"&api_key=foo""", """{"rows":[], "fields":{}}""") lyr = ds.CreateLayer('table1', geom_type=ogr.wkbPolygon, options=['OVERWRITE=YES', 'CARTODBFY=NO']) gdal.Unlink( """/vsimem/cartodb&POSTFIELDS=q=DROP TABLE "table1"&api_key=foo""") gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=CREATE TABLE "table1" ( cartodb_id SERIAL,the_geom GEOMETRY(MULTIPOLYGON, 0), the_geom_webmercator GEOMETRY(MULTIPOLYGON, 3857),PRIMARY KEY (cartodb_id) );DROP SEQUENCE IF EXISTS "table1_cartodb_id_seq" CASCADE;CREATE SEQUENCE "table1_cartodb_id_seq" START 1;ALTER TABLE "table1" ALTER COLUMN cartodb_id SET DEFAULT nextval('"table1_cartodb_id_seq"')&api_key=foo""", """{"rows":[], "fields":{}}""") f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((0 0,0 1,1 0,0 0))')) if lyr.CreateFeature(f) != 0: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=BEGIN;INSERT INTO "table1" ("the_geom", "cartodb_id") VALUES ('0106000020E61000000100000001030000000100000004000000000000000000000000000000000000000000000000000000000000000000F03F000000000000F03F000000000000000000000000000000000000000000000000', nextval('table1_cartodb_id_seq'));COMMIT;&api_key=foo""", """{"rows":[], "fields":{}}""") gdal.ErrorReset() ds = None if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' ds = ogr.Open('CARTODB:foo', update=1) gdal.PushErrorHandler() ret = ds.DeleteLayer(0) gdal.PopErrorHandler() if ret == 0: gdaltest.post_reason('fail') return 'fail' ds = ogr.Open('CARTODB:foo', update=1) gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=DROP TABLE "table1"&api_key=foo""", """{"rows":[], "fields":{}}""") gdal.ErrorReset() ds.ExecuteSQL('DELLAYER:table1') if gdal.GetLastErrorMsg() != '' or ds.GetLayerByName('table1') is not None: gdaltest.post_reason('fail') return 'fail' gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT current_schema() LIMIT 500 OFFSET 0&api_key=foo', """{"rows":[{"current_schema":"my_schema"}],"fields":{"current_schema":{"type":"unknown(19)"}}}""" ) gdal.FileFromMemBuffer( '/vsimem/cartodb&POSTFIELDS=q=SELECT CDB_UserTables() LIMIT 500 OFFSET 0&api_key=foo', """{"rows":[],"fields":{"cdb_usertables":{"type":"string"}}}""") gdal.FileFromMemBuffer( """/vsimem/cartodb&POSTFIELDS=q=SELECT c.relname FROM pg_class c, pg_namespace n WHERE c.relkind in ('r', 'v') AND c.relname !~ '^pg_' AND c.relnamespace=n.oid AND n.nspname = 'my_schema' LIMIT 500 OFFSET 0&api_key=foo""", """{"rows":[{"relname": "a_layer"}],"fields":{"relname":{"type":"string"}}}""" ) ds = ogr.Open('CARTODB:foo') if ds.GetLayerByName('a_layer') is None: gdaltest.post_reason('fail') return 'fail' return 'success'
def test_vsiaz_extra_1(): if not gdaltest.built_against_curl(): pytest.skip() az_resource = gdal.GetConfigOption('AZ_RESOURCE') if az_resource is None: pytest.skip('Missing AZ_RESOURCE') if '/' not in az_resource: path = '/vsiaz/' + az_resource statres = gdal.VSIStatL(path) assert statres is not None and stat.S_ISDIR(statres.mode), \ ('%s is not a valid bucket' % path) readdir = gdal.ReadDir(path) assert readdir is not None, 'ReadDir() should not return empty list' for filename in readdir: if filename != '.': subpath = path + '/' + filename assert gdal.VSIStatL(subpath) is not None, \ ('Stat(%s) should not return an error' % subpath) unique_id = 'vsiaz_test' subpath = path + '/' + unique_id ret = gdal.Mkdir(subpath, 0) assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath) readdir = gdal.ReadDir(path) assert unique_id in readdir, \ ('ReadDir(%s) should contain %s' % (path, unique_id)) ret = gdal.Mkdir(subpath, 0) assert ret != 0, ('Mkdir(%s) repeated should return an error' % subpath) ret = gdal.Rmdir(subpath) assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath) readdir = gdal.ReadDir(path) assert unique_id not in readdir, \ ('ReadDir(%s) should not contain %s' % (path, unique_id)) ret = gdal.Rmdir(subpath) assert ret != 0, ('Rmdir(%s) repeated should return an error' % subpath) ret = gdal.Mkdir(subpath, 0) assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath) f = gdal.VSIFOpenL(subpath + '/test.txt', 'wb') assert f is not None gdal.VSIFWriteL('hello', 1, 5, f) gdal.VSIFCloseL(f) ret = gdal.Rmdir(subpath) assert ret != 0, \ ('Rmdir(%s) on non empty directory should return an error' % subpath) f = gdal.VSIFOpenL(subpath + '/test.txt', 'rb') assert f is not None data = gdal.VSIFReadL(1, 5, f).decode('utf-8') assert data == 'hello' gdal.VSIFCloseL(f) assert gdal.Rename(subpath + '/test.txt', subpath + '/test2.txt') == 0 f = gdal.VSIFOpenL(subpath + '/test2.txt', 'rb') assert f is not None data = gdal.VSIFReadL(1, 5, f).decode('utf-8') assert data == 'hello' gdal.VSIFCloseL(f) ret = gdal.Unlink(subpath + '/test2.txt') assert ret >= 0, \ ('Unlink(%s) should not return an error' % (subpath + '/test2.txt')) ret = gdal.Rmdir(subpath) assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath) return f = open_for_read('/vsiaz/' + az_resource) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1 # Same with /vsiaz_streaming/ f = open_for_read('/vsiaz_streaming/' + az_resource) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1 if False: # pylint: disable=using-constant-test # we actually try to read at read() time and bSetError = false # Invalid bucket : "The specified bucket does not exist" gdal.ErrorReset() f = open_for_read('/vsiaz/not_existing_bucket/foo') with gdaltest.error_handler(): gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert gdal.VSIGetLastErrorMsg() != '' # Invalid resource gdal.ErrorReset() f = open_for_read('/vsiaz_streaming/' + az_resource + '/invalid_resource.baz') assert f is None, gdal.VSIGetLastErrorMsg() # Test GetSignedURL() signed_url = gdal.GetSignedURL('/vsiaz/' + az_resource) f = open_for_read('/vsicurl_streaming/' + signed_url) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1
def wms_8(): if gdaltest.wms_drv is None: return 'skip' # server_url = 'http://tilecache.osgeo.org/wms-c/Basic.py' # wmstms_version = '/1.0.0/basic' # zero_tile = wmstms_version + '/0/0/0.png' # server_url_mask = server_url # ovr_upper_level = 18 # tms = """<GDAL_WMS> # <Service name="TMS"> # <ServerUrl>%s</ServerUrl> # <Layer>basic</Layer> # <Format>png</Format> # </Service> # <DataWindow> # <UpperLeftX>-180.0</UpperLeftX> # <UpperLeftY>90.0</UpperLeftY> # <LowerRightX>180.0</LowerRightX> # <LowerRightY>-90.0</LowerRightY> # <TileLevel>19</TileLevel> # <TileCountX>2</TileCountX> # <TileCountY>1</TileCountY> # </DataWindow> # <Projection>EPSG:4326</Projection> # <BlockSizeX>256</BlockSizeX> # <BlockSizeY>256</BlockSizeY> # <BandsCount>3</BandsCount> # <Cache><Path>./tmp/gdalwmscache</Path></Cache> # </GDAL_WMS>""" % server_url_mask # tms_nocache = """<GDAL_WMS> # <Service name="TMS"> # <ServerUrl>%s</ServerUrl> # <Layer>basic</Layer> # <Format>png</Format> # </Service> # <DataWindow> # <UpperLeftX>-180.0</UpperLeftX> # <UpperLeftY>90.0</UpperLeftY> # <LowerRightX>180.0</LowerRightX> # <LowerRightY>-90.0</LowerRightY> # <TileLevel>19</TileLevel> # <TileCountX>2</TileCountX> # <TileCountY>1</TileCountY> # </DataWindow> # <Projection>EPSG:4326</Projection> # <BlockSizeX>256</BlockSizeX> # <BlockSizeY>256</BlockSizeY> # <BandsCount>3</BandsCount> # <Cache/> <!-- this is needed for GDAL_DEFAULT_WMS_CACHE_PATH to be triggered --> # </GDAL_WMS>""" % server_url_mask server_url = 'http://tile.openstreetmap.org' wmstms_version = '' zero_tile = '/0/0/0.png' server_url_mask = server_url + '/${z}/${x}/${y}.png' ovr_upper_level = 16 tms = """<GDAL_WMS> <Service name="TMS"> <ServerUrl>%s</ServerUrl> </Service> <DataWindow> <UpperLeftX>-20037508.34</UpperLeftX> <UpperLeftY>20037508.34</UpperLeftY> <LowerRightX>20037508.34</LowerRightX> <LowerRightY>-20037508.34</LowerRightY> <TileLevel>18</TileLevel> <TileCountX>1</TileCountX> <TileCountY>1</TileCountY> <YOrigin>top</YOrigin> </DataWindow> <Projection>EPSG:3857</Projection> <BlockSizeX>256</BlockSizeX> <BlockSizeY>256</BlockSizeY> <BandsCount>3</BandsCount> <Cache><Path>./tmp/gdalwmscache</Path></Cache> </GDAL_WMS>""" % server_url_mask tms_nocache = """<GDAL_WMS> <Service name="TMS"> <ServerUrl>%s</ServerUrl> </Service> <DataWindow> <UpperLeftX>-20037508.34</UpperLeftX> <UpperLeftY>20037508.34</UpperLeftY> <LowerRightX>20037508.34</LowerRightX> <LowerRightY>-20037508.34</LowerRightY> <TileLevel>18</TileLevel> <TileCountX>1</TileCountX> <TileCountY>1</TileCountY> <YOrigin>top</YOrigin> </DataWindow> <Projection>EPSG:3857</Projection> <BlockSizeX>256</BlockSizeX> <BlockSizeY>256</BlockSizeY> <BandsCount>3</BandsCount> <Cache/> <!-- this is needed for GDAL_DEFAULT_WMS_CACHE_PATH to be triggered --> </GDAL_WMS>""" % server_url_mask if gdaltest.gdalurlopen(server_url) is None: return 'skip' try: shutil.rmtree('tmp/gdalwmscache') except OSError: pass ds = gdal.Open(tms) if ds is None: gdaltest.post_reason('open failed.') return 'fail' # Check cache metadata item cache_path = ds.GetMetadataItem("CACHE_PATH") if not cache_path: gdaltest.post_reason('did not get expected cache path metadata item') return 'fail' cache_subfolder = hashlib.md5(server_url_mask.encode('utf-8')).hexdigest() gdal.ErrorReset() data = ds.GetRasterBand(1).GetOverview(ovr_upper_level).ReadRaster( 0, 0, 512, 512) if gdal.GetLastErrorMsg() != '': if gdaltest.gdalurlopen(server_url + zero_tile) is None: return 'skip' ds = None file1 = hashlib.md5((server_url + wmstms_version + '/1/0/0.png').encode('utf-8')).hexdigest() file2 = hashlib.md5((server_url + wmstms_version + '/1/1/0.png').encode('utf-8')).hexdigest() file3 = hashlib.md5((server_url + wmstms_version + '/1/0/1.png').encode('utf-8')).hexdigest() file4 = hashlib.md5((server_url + wmstms_version + '/1/1/1.png').encode('utf-8')).hexdigest() expected_files = [ 'tmp/gdalwmscache/%s/%s/%s/%s' % (cache_subfolder, file1[0], file1[1], file1), 'tmp/gdalwmscache/%s/%s/%s/%s' % (cache_subfolder, file2[0], file2[1], file2), 'tmp/gdalwmscache/%s/%s/%s/%s' % (cache_subfolder, file3[0], file3[1], file3), 'tmp/gdalwmscache/%s/%s/%s/%s' % (cache_subfolder, file4[0], file4[1], file4) ] for expected_file in expected_files: try: os.stat(expected_file) except OSError: gdaltest.post_reason('%s should exist' % expected_file) return 'fail' # Now, we should read from the cache ds = gdal.Open(tms) cached_data = ds.GetRasterBand(1).GetOverview(ovr_upper_level).ReadRaster( 0, 0, 512, 512) ds = None if data != cached_data: gdaltest.post_reason('data != cached_data') return 'fail' # Replace the cache with fake data for expected_file in expected_files: ds = gdal.GetDriverByName('GTiff').Create(expected_file, 256, 256, 4) ds.GetRasterBand(1).Fill(0) ds.GetRasterBand(2).Fill(0) ds.GetRasterBand(3).Fill(0) ds.GetRasterBand(4).Fill(255) ds = None # Read again from the cache, and check that it is actually used ds = gdal.Open(tms) cs = ds.GetRasterBand(1).GetOverview(ovr_upper_level).Checksum() ds = None if cs != 0: gdaltest.post_reason('cs != 0') return 'fail' # Test with GDAL_DEFAULT_WMS_CACHE_PATH # Now, we should read from the cache gdal.SetConfigOption("GDAL_DEFAULT_WMS_CACHE_PATH", "./tmp/gdalwmscache") ds = gdal.Open(tms_nocache) cs = ds.GetRasterBand(1).GetOverview(ovr_upper_level).Checksum() ds = None gdal.SetConfigOption("GDAL_DEFAULT_WMS_CACHE_PATH", None) if cs != 0: gdaltest.post_reason('cs != 0') return 'fail' # Check maxsize and expired tags tms_expires = """<GDAL_WMS> <Service name="TMS"> <ServerUrl>%s</ServerUrl> </Service> <DataWindow> <UpperLeftX>-20037508.34</UpperLeftX> <UpperLeftY>20037508.34</UpperLeftY> <LowerRightX>20037508.34</LowerRightX> <LowerRightY>-20037508.34</LowerRightY> <TileLevel>18</TileLevel> <TileCountX>1</TileCountX> <TileCountY>1</TileCountY> <YOrigin>top</YOrigin> </DataWindow> <Projection>EPSG:3857</Projection> <BlockSizeX>256</BlockSizeX> <BlockSizeY>256</BlockSizeY> <BandsCount>3</BandsCount> <Cache><Path>./tmp/gdalwmscache</Path><Expires>1</Expires></Cache> </GDAL_WMS>""" % server_url_mask mod_time = 0 for expected_file in expected_files: tm = os.path.getmtime(expected_file) if tm > mod_time: mod_time = tm ds = gdal.Open(tms_expires) sleep(1.05) data = ds.GetRasterBand(1).GetOverview(ovr_upper_level).ReadRaster( 0, 0, 512, 512) # tiles should be overwritten by new ones for expected_file in expected_files: if os.path.getmtime(expected_file) <= mod_time: return 'fail' return 'success'
def main(shapefile_path, raster_path): # Load the source data as a gdalnumeric array srcArray = gdalnumeric.LoadFile(raster_path) # Also load as a gdal image to get geotransform # (world file) info srcImage = gdal.Open(raster_path) geoTrans = srcImage.GetGeoTransform() # Create an OGR layer from a boundary shapefile shapef = ogr.Open(shapefile_path) lyr = shapef.GetLayer( os.path.split(os.path.splitext(shapefile_path)[0])[1]) # Convert the layer extent to image pixel coordinates minX, maxX, minY, maxY = lyr.GetExtent() ulX, ulY = world2Pixel(geoTrans, minX, maxY) lrX, lrY = world2Pixel(geoTrans, maxX, minY) # Calculate the pixel size of the new image pxWidth = int(lrX - ulX) pxHeight = int(lrY - ulY) clip = srcArray[:, ulY:lrY, ulX:lrX] # # EDIT: create pixel offset to pass to new image Projection info # xoffset = ulX yoffset = ulY # print "Xoffset, Yoffset = ( %f, %f )" % ( xoffset, yoffset ) # Create a new geomatrix for the image geoTrans = list(geoTrans) geoTrans[0] = minX geoTrans[3] = maxY # Map points to pixels for drawing the # boundary on a blank 8-bit, # black and white, mask image. points = [] pixels = [] rasterPoly = Image.new("L", (pxWidth, pxHeight), 1) poly = lyr.GetNextFeature() c = 0 while poly: geom = poly.GetGeometryRef() pts = geom.GetGeometryRef(0) # print pts for p in range(pts.GetPointCount()): points.append((pts.GetX(p), pts.GetY(p))) for p in points: pixels.append(world2Pixel(geoTrans, p[0], p[1])) poly = lyr.GetNextFeature() print c, len(points), len(pixels) c += 1 # print "Width, Height = ( %f, %f )" % ( pxWidth, pxHeight ) rasterize = ImageDraw.Draw(rasterPoly) rasterize.polygon(pixels, 0) mask = imageToArray(rasterPoly) # Clip the image using the mask clip = gdalnumeric.choose(mask, \ (clip, 0)).astype(gdalnumeric.uint8) # This image has 3 bands so we stretch each one to make them # visually brighter for i in range(3): clip[i, :, :] = stretch(clip[i, :, :]) # Save new tiff # # EDIT: instead of SaveArray, let's break all the # SaveArray steps out more explicity so # we can overwrite the offset of the destination # raster # ### the old way using SaveArray # # gdalnumeric.SaveArray(clip, "OUTPUT.tif", format="GTiff", prototype=raster_path) # ### # gtiffDriver = gdal.GetDriverByName('GTiff') if gtiffDriver is None: raise ValueError("Can't find GeoTiff Driver") gtiffDriver.CreateCopy( "OUTPUT.tif", OpenArray(clip, prototype_ds=raster_path, xoff=xoffset, yoff=yoffset)) # Save as an 8-bit jpeg for an easy, quick preview clip = clip.astype(gdalnumeric.uint8) gdalnumeric.SaveArray(clip, "OUTPUT.jpg", format="JPEG") gdal.ErrorReset()
def vsizip_1(): # We can keep the handle open during all the ZIP writing hZIP = gdal.VSIFOpenL("/vsizip/vsimem/test.zip", "wb") if hZIP is None: gdaltest.post_reason('fail 1') return 'fail' # One way to create a directory f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir2/", "wb") if f is None: gdaltest.post_reason('fail 2') return 'fail' gdal.VSIFCloseL(f) # A more natural one gdal.Mkdir("/vsizip/vsimem/test.zip/subdir1", 0) # Create 1st file f2 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "wb") if f2 is None: gdaltest.post_reason('fail 3') return 'fail' gdal.VSIFWriteL("abcd", 1, 4, f2) gdal.VSIFCloseL(f2) # Test that we cannot read a zip file being written gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "rb") gdal.PopErrorHandler() if gdal.GetLastErrorMsg() != 'Cannot read a zip file being written': gdaltest.post_reason('expected error') print(gdal.GetLastErrorMsg()) return 'fail' if f is not None: gdaltest.post_reason('should not have been successful 1') return 'fail' # Create 2nd file f3 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/efghi", "wb") if f3 is None: gdaltest.post_reason('fail 4') return 'fail' gdal.VSIFWriteL("efghi", 1, 5, f3) # Try creating a 3d file gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') f4 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/that_wont_work", "wb") gdal.PopErrorHandler() if gdal.GetLastErrorMsg( ) != 'Cannot create that_wont_work while another file is being written in the .zip': gdaltest.post_reason('expected error') print(gdal.GetLastErrorMsg()) return 'fail' if f4 is not None: gdaltest.post_reason('should not have been successful 2') return 'fail' gdal.VSIFCloseL(f3) # Now we can close the main handle gdal.VSIFCloseL(hZIP) f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "rb") if f is None: gdaltest.post_reason('fail 5') return 'fail' data = gdal.VSIFReadL(1, 4, f) gdal.VSIFCloseL(f) if data.decode('ASCII') != 'abcd': gdaltest.post_reason('fail') print(data) return 'fail' # Test alternate uri syntax gdal.Rename("/vsimem/test.zip", "/vsimem/test.xxx") f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}/subdir3/abcd", "rb") if f is None: gdaltest.post_reason('fail') return 'fail' data = gdal.VSIFReadL(1, 4, f) gdal.VSIFCloseL(f) if data.decode('ASCII') != 'abcd': gdaltest.post_reason('fail') print(data) return 'fail' # With a trailing slash f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}/subdir3/abcd/", "rb") if f is None: gdaltest.post_reason('fail') return 'fail' gdal.VSIFCloseL(f) # Test ReadDir() if len(gdal.ReadDir("/vsizip/{/vsimem/test.xxx}")) != 3: gdaltest.post_reason('fail') print(gdal.ReadDir("/vsizip/{/vsimem/test.xxx}")) return 'fail' # Unbalanced curls f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx", "rb") if f is not None: gdaltest.post_reason('fail') return 'fail' # Non existing mainfile f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}/bla", "rb") if f is not None: gdaltest.post_reason('fail') return 'fail' # Non existing subfile f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.zzz}/bla", "rb") if f is not None: gdaltest.post_reason('fail') return 'fail' # Wrong syntax f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}.aux.xml", "rb") if f is not None: gdaltest.post_reason('fail') return 'fail' # Test nested { { } } hZIP = gdal.VSIFOpenL("/vsizip/{/vsimem/zipinzip.yyy}", "wb") if hZIP is None: gdaltest.post_reason('fail 1') return 'fail' f = gdal.VSIFOpenL("/vsizip/{/vsimem/zipinzip.yyy}/test.xxx", "wb") f_src = gdal.VSIFOpenL("/vsimem/test.xxx", "rb") data = gdal.VSIFReadL(1, 10000, f_src) gdal.VSIFCloseL(f_src) gdal.VSIFWriteL(data, 1, len(data), f) gdal.VSIFCloseL(f) gdal.VSIFCloseL(hZIP) f = gdal.VSIFOpenL( "/vsizip/{/vsizip/{/vsimem/zipinzip.yyy}/test.xxx}/subdir3/abcd/", "rb") if f is None: gdaltest.post_reason('fail') return 'fail' data = gdal.VSIFReadL(1, 4, f) gdal.VSIFCloseL(f) if data.decode('ASCII') != 'abcd': gdaltest.post_reason('fail') print(data) return 'fail' gdal.Unlink("/vsimem/test.xxx") gdal.Unlink("/vsimem/zipinzip.yyy") return 'success'
def wms_8(): if gdaltest.wms_drv is None: return 'skip' if gdaltest.metacarta_tms is not True: return 'skip' tms = """<GDAL_WMS> <Service name="TMS"> <ServerUrl>http://tilecache.osgeo.org/wms-c/Basic.py</ServerUrl> <Layer>basic</Layer> <Format>png</Format> </Service> <DataWindow> <UpperLeftX>-180.0</UpperLeftX> <UpperLeftY>90.0</UpperLeftY> <LowerRightX>180.0</LowerRightX> <LowerRightY>-90.0</LowerRightY> <TileLevel>19</TileLevel> <TileCountX>2</TileCountX> <TileCountY>1</TileCountY> </DataWindow> <Projection>EPSG:4326</Projection> <BlockSizeX>256</BlockSizeX> <BlockSizeY>256</BlockSizeY> <BandsCount>3</BandsCount> <Cache><Path>./tmp/gdalwmscache</Path></Cache> </GDAL_WMS>""" try: shutil.rmtree('tmp/gdalwmscache') except: pass ds = gdal.Open(tms) if ds is None: gdaltest.post_reason('open failed.') return 'fail' gdal.ErrorReset() data = ds.GetRasterBand(1).GetOverview(18).ReadRaster(0, 0, 512, 256) if gdal.GetLastErrorMsg() != '': if gdaltest.gdalurlopen( 'http://tilecache.osgeo.org/wms-c/Basic.py/1.0.0/basic/0/0/0.png' ) is None: return 'skip' ds = None expected_files = [ 'tmp/gdalwmscache/d/b/dbbfe17f22c9d54f2c45ec7dc5042bc8', 'tmp/gdalwmscache/5/4/548f0e98b56a8c104cfe2df9f7ef8685' ] for expected_file in expected_files: try: os.stat(expected_file) except: gdaltest.post_reason('%s should exist' % expected_file) return 'fail' # Now, we should read from the cache ds = gdal.Open(tms) cached_data = ds.GetRasterBand(1).GetOverview(18).ReadRaster( 0, 0, 512, 256) ds = None if data != cached_data: gdaltest.post_reason('data != cached_data') return 'fail' # Replace the cache with fake data for expected_file in expected_files: ds = gdal.GetDriverByName('GTiff').Create(expected_file, 256, 256, 4) ds.GetRasterBand(1).Fill(0) ds.GetRasterBand(2).Fill(0) ds.GetRasterBand(3).Fill(0) ds.GetRasterBand(4).Fill(255) ds = None # Read again from the cache, and check that it is actually used ds = gdal.Open(tms) cs = ds.GetRasterBand(1).GetOverview(18).Checksum() ds = None if cs != 0: gdaltest.post_reason('cs != 0') return 'fail' # Test with GDAL_DEFAULT_WMS_CACHE_PATH tms_nocache = """<GDAL_WMS> <Service name="TMS"> <ServerUrl>http://tilecache.osgeo.org/wms-c/Basic.py</ServerUrl> <Layer>basic</Layer> <Format>png</Format> </Service> <DataWindow> <UpperLeftX>-180.0</UpperLeftX> <UpperLeftY>90.0</UpperLeftY> <LowerRightX>180.0</LowerRightX> <LowerRightY>-90.0</LowerRightY> <TileLevel>19</TileLevel> <TileCountX>2</TileCountX> <TileCountY>1</TileCountY> </DataWindow> <Projection>EPSG:4326</Projection> <BlockSizeX>256</BlockSizeX> <BlockSizeY>256</BlockSizeY> <BandsCount>3</BandsCount> <Cache/> <!-- this is needed for GDAL_DEFAULT_WMS_CACHE_PATH to be triggered --> </GDAL_WMS>""" # Now, we should read from the cache gdal.SetConfigOption("GDAL_DEFAULT_WMS_CACHE_PATH", "./tmp/gdalwmscache") ds = gdal.Open(tms_nocache) cs = ds.GetRasterBand(1).GetOverview(18).Checksum() ds = None gdal.SetConfigOption("GDAL_DEFAULT_WMS_CACHE_PATH", None) if cs != 0: gdaltest.post_reason('cs != 0') return 'fail' return 'success'
def test_vsigs_1(): if not gdaltest.built_against_curl(): pytest.skip() # Invalid header filename gdal.ErrorReset() with gdaltest.config_option('GDAL_HTTP_HEADER_FILE', '/i_dont/exist.py'): with gdaltest.config_option('CPL_GCE_SKIP', 'YES'): with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar') if f is not None: gdal.VSIFCloseL(f) pytest.fail() last_err = gdal.GetLastErrorMsg() assert 'Cannot read' in last_err # Invalid content for header file with gdaltest.config_option('GDAL_HTTP_HEADER_FILE', 'vsigs.py'): with gdaltest.config_option('CPL_GCE_SKIP', 'YES'): f = open_for_read('/vsigs/foo/bar') if f is not None: gdal.VSIFCloseL(f) pytest.fail() # Missing GS_SECRET_ACCESS_KEY gdal.ErrorReset() with gdaltest.config_option('CPL_GCE_SKIP', 'YES'): with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar') assert f is None and gdal.VSIGetLastErrorMsg().find( 'GS_SECRET_ACCESS_KEY') >= 0 gdal.ErrorReset() with gdaltest.config_option('CPL_GCE_SKIP', 'YES'): with gdaltest.error_handler(): f = open_for_read('/vsigs_streaming/foo/bar') assert f is None and gdal.VSIGetLastErrorMsg().find( 'GS_SECRET_ACCESS_KEY') >= 0 gdal.SetConfigOption('GS_SECRET_ACCESS_KEY', 'GS_SECRET_ACCESS_KEY') # Missing GS_ACCESS_KEY_ID gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar') assert f is None and gdal.VSIGetLastErrorMsg().find( 'GS_ACCESS_KEY_ID') >= 0 gdal.SetConfigOption('GS_ACCESS_KEY_ID', 'GS_ACCESS_KEY_ID') # ERROR 1: The User Id you provided does not exist in our records. gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar.baz') if f is not None or gdal.VSIGetLastErrorMsg() == '': if f is not None: gdal.VSIFCloseL(f) if gdal.GetConfigOption('APPVEYOR') is not None: return pytest.fail(gdal.VSIGetLastErrorMsg()) gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs_streaming/foo/bar.baz') assert f is None and gdal.VSIGetLastErrorMsg() != ''
def vsis3_extra_1(): try: drv = gdal.GetDriverByName('HTTP') except: drv = None if drv is None: return 'skip' if gdal.GetConfigOption('AWS_SECRET_ACCESS_KEY') is None: print('Missing AWS_SECRET_ACCESS_KEY for running gdaltest_list_extra') return 'skip' elif gdal.GetConfigOption('AWS_ACCESS_KEY_ID') is None: print('Missing AWS_ACCESS_KEY_ID for running gdaltest_list_extra') return 'skip' elif gdal.GetConfigOption('S3_RESOURCE') is None: print('Missing S3_RESOURCE for running gdaltest_list_extra') return 'skip' f = gdal.VSIFOpenL('/vsis3/' + gdal.GetConfigOption('S3_RESOURCE'), 'rb') if f is None: gdaltest.post_reason('fail') return 'fail' ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) if len(ret) != 1: gdaltest.post_reason('fail') print(ret) return 'fail' # Same with /vsis3_streaming/ f = gdal.VSIFOpenL( '/vsis3_streaming/' + gdal.GetConfigOption('S3_RESOURCE'), 'rb') if f is None: gdaltest.post_reason('fail') return 'fail' ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) if len(ret) != 1: gdaltest.post_reason('fail') print(ret) return 'fail' # Invalid bucket : "The specified bucket does not exist" gdal.ErrorReset() f = gdal.VSIFOpenL('/vsis3/not_existing_bucket/foo', 'rb') with gdaltest.error_handler(): gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' # Invalid resource gdal.ErrorReset() f = gdal.VSIFOpenL( '/vsis3_streaming/' + gdal.GetConfigOption('S3_RESOURCE') + '/invalid_resource.baz', 'rb') if f is not None: gdaltest.post_reason('fail') print(gdal.GetLastErrorMsg()) return 'fail' return 'success'
def test_rasterio_5(): ds = gdal.Open('data/byte.tif') for obj in [ds, ds.GetRasterBand(1)]: obj.ReadRaster(0, 0, -2000000000, 1, 1, 1) obj.ReadRaster(0, 0, 1, -2000000000, 1, 1) for band_number in [-1, 0, 2]: gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') res = ds.ReadRaster(0, 0, 1, 1, band_list=[band_number]) gdal.PopErrorHandler() error_msg = gdal.GetLastErrorMsg() assert res is None, 'expected None' assert error_msg.find('this band does not exist on dataset') != -1, \ 'did not get expected error msg' res = ds.ReadRaster(0, 0, 1, 1, band_list=[1, 1]) assert res is not None, 'expected non None' for obj in [ds, ds.GetRasterBand(1)]: gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') res = obj.ReadRaster(0, 0, 21, 21) gdal.PopErrorHandler() error_msg = gdal.GetLastErrorMsg() assert res is None, 'expected None' assert error_msg.find('Access window out of range in RasterIO()') != -1, \ 'did not get expected error msg (1)' # This should only fail on a 32bit build try: maxsize = sys.maxint except AttributeError: maxsize = sys.maxsize # On win64, maxsize == 2147483647 and ReadRaster() # fails because of out of memory condition, not # because of integer overflow. I'm not sure on how # to detect win64 better. if maxsize == 2147483647 and sys.platform != 'win32': gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') res = obj.ReadRaster(0, 0, 1, 1, 1000000, 1000000) gdal.PopErrorHandler() error_msg = gdal.GetLastErrorMsg() assert res is None, 'expected None' assert error_msg.find('Integer overflow') != -1, \ 'did not get expected error msg (2)' gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') res = obj.ReadRaster(0, 0, 0, 1) gdal.PopErrorHandler() error_msg = gdal.GetLastErrorMsg() assert res is None, 'expected None' assert error_msg.find('Illegal values for buffer size') != -1, \ 'did not get expected error msg (3)' ds = None
def ogr_osm_10(): if ogrtest.osm_drv is None: return 'skip' # A file that does not exist. ds = ogr.Open('/nonexistent/foo.osm') if ds is not None: gdaltest.post_reason('fail') return 'fail' # Empty .osm file f = gdal.VSIFOpenL('/vsimem/foo.osm', 'wb') gdal.VSIFCloseL(f) ds = ogr.Open('/vsimem/foo.osm') if ds is not None: gdaltest.post_reason('fail') return 'fail' gdal.Unlink('/vsimem/foo.osm') # Empty .pbf file f = gdal.VSIFOpenL('/vsimem/foo.pbf', 'wb') gdal.VSIFCloseL(f) ds = ogr.Open('/vsimem/foo.pbf') if ds is not None: gdaltest.post_reason('fail') return 'fail' gdal.Unlink('/vsimem/foo.pbf') if ogrtest.osm_drv_parse_osm: # Invalid .osm file f = gdal.VSIFOpenL('/vsimem/foo.osm', 'wb') data = "<osm>" gdal.VSIFWriteL(data, 1, len(data), f) gdal.VSIFCloseL(f) ds = ogr.Open('/vsimem/foo.osm') lyr = ds.GetLayer(0) gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') feat = lyr.GetNextFeature() gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' ds = None gdal.Unlink('/vsimem/foo.osm') # Invalid .pbf file f = gdal.VSIFOpenL('/vsimem/foo.pbf', 'wb') data = "OSMHeader\n" gdal.VSIFWriteL(data, 1, len(data), f) gdal.VSIFCloseL(f) ds = ogr.Open('/vsimem/foo.pbf') lyr = ds.GetLayer(0) gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') feat = lyr.GetNextFeature() gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' ds = None gdal.Unlink('/vsimem/foo.pbf') # Test million laugh pattern if ogrtest.osm_drv_parse_osm: ds = ogr.Open('data/billionlaugh.osm') lyr = ds.GetLayer(0) gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') feat = lyr.GetNextFeature() gdal.PopErrorHandler() if feat is not None or gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' return 'success'
def Calc(calc: MaybeSequence[str], outfile: Optional[PathLikeOrStr] = None, NoDataValue: Optional[Number] = None, type: Optional[Union[GDALDataType, str]] = None, format: Optional[str] = None, creation_options: Optional[Sequence[str]] = None, allBands: str = '', overwrite: bool = False, hideNoData: bool = False, projectionCheck: bool = False, color_table: Optional[ColorTableLike] = None, extent: Optional[Extent] = None, projwin: Optional[Union[Tuple, GeoRectangle]] = None, user_namespace: Optional[Dict]=None, debug: bool = False, quiet: bool = False, **input_files): if debug: print(f"gdal_calc.py starting calculation {calc}") if outfile and os.path.isfile(outfile) and not overwrite: if type or format or creation_options or hideNoData or extent or projwin: raise Exception("One or several options implying file creation have been provided but Output file exists, must use --overwrite option!") # Single calc value compatibility if isinstance(calc, (list, tuple)): calc = calc else: calc = [calc] calc = [c.strip('"') for c in calc] creation_options = creation_options or [] # set up global namespace for eval with all functions of gdal_array, numpy global_namespace = {key: getattr(module, key) for module in [gdal_array, numpy] for key in dir(module) if not key.startswith('__')} if user_namespace: global_namespace.update(user_namespace) if not calc: raise Exception("No calculation provided.") elif not outfile and format.upper() != 'MEM': raise Exception("No output file provided.") if format is None: format = GetOutputDriverFor(outfile) if isinstance(extent, GeoRectangle): pass elif projwin: if isinstance(projwin, GeoRectangle): extent = projwin else: extent = GeoRectangle.from_lurd(*projwin) elif not extent: extent = Extent.IGNORE else: extent = extent_util.parse_extent(extent) compatible_gt_eps = 0.000001 gt_diff_support = { GT.INCOMPATIBLE_OFFSET: extent != Extent.FAIL, GT.INCOMPATIBLE_PIXEL_SIZE: False, GT.INCOMPATIBLE_ROTATION: False, GT.NON_ZERO_ROTATION: False, } gt_diff_error = { GT.INCOMPATIBLE_OFFSET: 'different offset', GT.INCOMPATIBLE_PIXEL_SIZE: 'different pixel size', GT.INCOMPATIBLE_ROTATION: 'different rotation', GT.NON_ZERO_ROTATION: 'non zero rotation', } ################################################################ # fetch details of input layers ################################################################ # set up some lists to store data for each band myFileNames = [] # input filenames myFiles = [] # input DataSets myBands = [] # input bands myAlphaList = [] # input alpha letter that represents each input file myDataType = [] # string representation of the datatype of each input file myDataTypeNum = [] # datatype of each input file myNDV = [] # nodatavalue for each input file DimensionsCheck = None # dimensions of the output Dimensions = [] # Dimensions of input files ProjectionCheck = None # projection of the output GeoTransformCheck = None # GeoTransform of the output GeoTransforms = [] # GeoTransform of each input file GeoTransformDiffer = False # True if we have inputs with different GeoTransforms myTempFileNames = [] # vrt filename from each input file myAlphaFileLists = [] # list of the Alphas which holds a list of inputs # loop through input files - checking dimensions for alphas, filenames in input_files.items(): if isinstance(filenames, (list, tuple)): # alpha is a list of files myAlphaFileLists.append(alphas) elif is_path_like(filenames) or isinstance(filenames, gdal.Dataset): # alpha is a single filename or a Dataset filenames = [filenames] alphas = [alphas] else: # I guess this alphas should be in the global_namespace, # It would have been better to pass it as user_namepsace, but I'll accept it anyway global_namespace[alphas] = filenames continue for alpha, filename in zip(alphas * len(filenames), filenames): if not alpha.endswith("_band"): # check if we have asked for a specific band... alpha_band = f"{alpha}_band" if alpha_band in input_files: myBand = input_files[alpha_band] else: myBand = 1 myF_is_ds = not is_path_like(filename) if myF_is_ds: myFile = filename filename = None else: myFile = open_ds(filename) if not myFile: raise IOError(f"No such file or directory: '{filename}'") myFileNames.append(filename) myFiles.append(myFile) myBands.append(myBand) myAlphaList.append(alpha) dt = myFile.GetRasterBand(myBand).DataType myDataType.append(gdal.GetDataTypeName(dt)) myDataTypeNum.append(dt) myNDV.append(None if hideNoData else myFile.GetRasterBand(myBand).GetNoDataValue()) # check that the dimensions of each layer are the same myFileDimensions = [myFile.RasterXSize, myFile.RasterYSize] if DimensionsCheck: if DimensionsCheck != myFileDimensions: GeoTransformDiffer = True if extent in [Extent.IGNORE, Extent.FAIL]: raise Exception( f"Error! Dimensions of file {filename} ({myFileDimensions[0]:d}, " f"{myFileDimensions[1]:d}) are different from other files " f"({DimensionsCheck[0]:d}, {DimensionsCheck[1]:d}). Cannot proceed") else: DimensionsCheck = myFileDimensions # check that the Projection of each layer are the same myProjection = myFile.GetProjection() if ProjectionCheck: if projectionCheck and ProjectionCheck != myProjection: raise Exception( f"Error! Projection of file {filename} {myProjection} " f"are different from other files {ProjectionCheck}. Cannot proceed") else: ProjectionCheck = myProjection # check that the GeoTransforms of each layer are the same myFileGeoTransform = myFile.GetGeoTransform(can_return_null=True) if extent == Extent.IGNORE: GeoTransformCheck = myFileGeoTransform else: Dimensions.append(myFileDimensions) GeoTransforms.append(myFileGeoTransform) if not GeoTransformCheck: GeoTransformCheck = myFileGeoTransform else: my_gt_diff = extent_util.gt_diff(GeoTransformCheck, myFileGeoTransform, eps=compatible_gt_eps, diff_support=gt_diff_support) if my_gt_diff not in [GT.SAME, GT.ALMOST_SAME]: GeoTransformDiffer = True if my_gt_diff != GT.COMPATIBLE_DIFF: raise Exception( f"Error! GeoTransform of file {filename} {myFileGeoTransform} is incompatible " f"({gt_diff_error[my_gt_diff]}), first file GeoTransform is {GeoTransformCheck}. " f"Cannot proceed") if debug: print( f"file {alpha}: {filename}, dimensions: " f"{DimensionsCheck[0]}, {DimensionsCheck[1]}, type: {myDataType[-1]}") # process allBands option allBandsIndex = None allBandsCount = 1 if allBands: if len(calc) > 1: raise Exception("Error! --allBands implies a single --calc") try: allBandsIndex = myAlphaList.index(allBands) except ValueError: raise Exception(f"Error! allBands option was given but Band {allBands} not found. Cannot proceed") allBandsCount = myFiles[allBandsIndex].RasterCount if allBandsCount <= 1: allBandsIndex = None else: allBandsCount = len(calc) if extent not in [Extent.IGNORE, Extent.FAIL] and ( GeoTransformDiffer or isinstance(extent, GeoRectangle)): # mixing different GeoTransforms/Extents GeoTransformCheck, DimensionsCheck, ExtentCheck = extent_util.calc_geotransform_and_dimensions( GeoTransforms, Dimensions, extent) if GeoTransformCheck is None: raise Exception("Error! The requested extent is empty. Cannot proceed") for i in range(len(myFileNames)): temp_vrt_filename, temp_vrt_ds = extent_util.make_temp_vrt(myFiles[i], ExtentCheck) myTempFileNames.append(temp_vrt_filename) myFiles[i] = None # close original ds myFiles[i] = temp_vrt_ds # replace original ds with vrt_ds # update the new precise dimensions and gt from the new ds GeoTransformCheck = temp_vrt_ds.GetGeoTransform() DimensionsCheck = [temp_vrt_ds.RasterXSize, temp_vrt_ds.RasterYSize] temp_vrt_ds = None ################################################################ # set up output file ################################################################ # open output file exists if outfile and os.path.isfile(outfile) and not overwrite: if allBandsIndex is not None: raise Exception("Error! allBands option was given but Output file exists, must use --overwrite option!") if len(calc) > 1: raise Exception( "Error! multiple calc options were given but Output file exists, must use --overwrite option!") if debug: print(f"Output file {outfile} exists - filling in results into file") myOut = open_ds(outfile, access_mode = gdal.OF_UPDATE | gdal.OF_RASTER) if myOut is None: error = 'but cannot be opened for update' elif [myOut.RasterXSize, myOut.RasterYSize] != DimensionsCheck: error = 'but is the wrong size' elif ProjectionCheck and ProjectionCheck != myOut.GetProjection(): error = 'but is the wrong projection' elif GeoTransformCheck and GeoTransformCheck != myOut.GetGeoTransform(can_return_null=True): error = 'but is the wrong geotransform' else: error = None if error: raise Exception( f"Error! Output exists, {error}. Use the --overwrite option " f"to automatically overwrite the existing file") myOutB = myOut.GetRasterBand(1) myOutNDV = myOutB.GetNoDataValue() myOutType = myOutB.DataType else: if outfile: # remove existing file and regenerate if os.path.isfile(outfile): os.remove(outfile) # create a new file if debug: print(f"Generating output file {outfile}") else: outfile = '' # find data type to use if not type: # use the largest type of the input files if hasattr(gdal, 'DataTypeUnion'): myOutType = myDataTypeNum[0] for dt in myDataTypeNum: myOutType = gdal.DataTypeUnion(myOutType, dt) else: # GDAL < 3.5: not super reliable as it depends on the values of the GDALDataType enumeration ... myOutType = max(myDataTypeNum) else: myOutType = type if isinstance(myOutType, str): myOutType = gdal.GetDataTypeByName(myOutType) # create file myOutDrv = gdal.GetDriverByName(format) myOut = myOutDrv.Create( os.fspath(outfile), DimensionsCheck[0], DimensionsCheck[1], allBandsCount, myOutType, creation_options) if myOut is None: raise Exception(f"Error! Could not create output file {outfile}") # set output geo info based on first input layer if not GeoTransformCheck: GeoTransformCheck = myFiles[0].GetGeoTransform(can_return_null=True) if GeoTransformCheck: myOut.SetGeoTransform(GeoTransformCheck) if not ProjectionCheck: ProjectionCheck = myFiles[0].GetProjection() if ProjectionCheck: myOut.SetProjection(ProjectionCheck) if NoDataValue is None: myOutNDV = DefaultNDVLookup[myOutType] # use the default noDataValue for this datatype elif isinstance(NoDataValue, str) and NoDataValue.lower() == 'none': myOutNDV = None # not to set any noDataValue else: myOutNDV = NoDataValue # use the given noDataValue for i in range(1, allBandsCount + 1): myOutB = myOut.GetRasterBand(i) if myOutNDV is not None: myOutB.SetNoDataValue(myOutNDV) if color_table: # set color table and color interpretation if is_path_like(color_table): color_table = get_color_table(color_table) myOutB.SetRasterColorTable(color_table) myOutB.SetRasterColorInterpretation(gdal.GCI_PaletteIndex) myOutB = None # write to band if hideNoData: myOutNDV = None myOutTypeName = gdal.GetDataTypeName(myOutType) if debug: print(f"output file: {outfile}, dimensions: {myOut.RasterXSize}, {myOut.RasterYSize}, type: {myOutTypeName}") ################################################################ # find block size to chop grids into bite-sized chunks ################################################################ # use the block size of the first layer to read efficiently myBlockSize = myFiles[0].GetRasterBand(myBands[0]).GetBlockSize() # find total x and y blocks to be read nXBlocks = (int)((DimensionsCheck[0] + myBlockSize[0] - 1) / myBlockSize[0]) nYBlocks = (int)((DimensionsCheck[1] + myBlockSize[1] - 1) / myBlockSize[1]) myBufSize = myBlockSize[0] * myBlockSize[1] if debug: print(f"using blocksize {myBlockSize[0]} x {myBlockSize[1]}") # variables for displaying progress ProgressCt = -1 ProgressMk = -1 ProgressEnd = nXBlocks * nYBlocks * allBandsCount ################################################################ # start looping through each band in allBandsCount ################################################################ for bandNo in range(1, allBandsCount + 1): ################################################################ # start looping through blocks of data ################################################################ # store these numbers in variables that may change later nXValid = myBlockSize[0] nYValid = myBlockSize[1] count_file_per_alpha = {} largest_datatype_per_alpha = {} for i, Alpha in enumerate(myAlphaList): if Alpha in myAlphaFileLists: # populate lettered arrays with values if allBandsIndex is not None and allBandsIndex == i: myBandNo = bandNo else: myBandNo = myBands[i] band = myFiles[i].GetRasterBand(myBandNo) if Alpha not in count_file_per_alpha: count_file_per_alpha[Alpha] = 1 largest_datatype_per_alpha[Alpha] = band.DataType else: count_file_per_alpha[Alpha] += 1 if hasattr(gdal, 'DataTypeUnion'): largest_datatype_per_alpha[Alpha] = gdal.DataTypeUnion(largest_datatype_per_alpha[Alpha], band.DataType) # loop through X-lines for X in range(0, nXBlocks): # in case the blocks don't fit perfectly # change the block size of the final piece if X == nXBlocks - 1: nXValid = DimensionsCheck[0] - X * myBlockSize[0] # find X offset myX = X * myBlockSize[0] # reset buffer size for start of Y loop nYValid = myBlockSize[1] myBufSize = nXValid * nYValid # loop through Y lines for Y in range(0, nYBlocks): ProgressCt += 1 if 10 * ProgressCt / ProgressEnd % 10 != ProgressMk and not quiet: ProgressMk = 10 * ProgressCt / ProgressEnd % 10 print("%d.." % (10*ProgressMk), end=" ") # change the block size of the final piece if Y == nYBlocks - 1: nYValid = DimensionsCheck[1] - Y * myBlockSize[1] myBufSize = nXValid * nYValid # find Y offset myY = Y * myBlockSize[1] # create empty buffer to mark where nodata occurs myNDVs = None # make local namespace for calculation local_namespace = {} # Create destination numpy arrays for each alpha numpy_arrays = {} counter_per_alpha = {} for Alpha in count_file_per_alpha: dtype = gdal_array.GDALTypeCodeToNumericTypeCode(largest_datatype_per_alpha[Alpha]) if count_file_per_alpha[Alpha] == 1: numpy_arrays[Alpha] = numpy.empty((nYValid, nXValid), dtype=dtype) else: numpy_arrays[Alpha] = numpy.empty((count_file_per_alpha[Alpha], nYValid, nXValid), dtype=dtype) counter_per_alpha[Alpha] = 0 # fetch data for each input layer for i, Alpha in enumerate(myAlphaList): # populate lettered arrays with values if allBandsIndex is not None and allBandsIndex == i: myBandNo = bandNo else: myBandNo = myBands[i] if Alpha in myAlphaFileLists: if count_file_per_alpha[Alpha] == 1: buf_obj = numpy_arrays[Alpha] else: buf_obj = numpy_arrays[Alpha][counter_per_alpha[Alpha]] myval = gdal_array.BandReadAsArray(myFiles[i].GetRasterBand(myBandNo), xoff=myX, yoff=myY, win_xsize=nXValid, win_ysize=nYValid, buf_obj=buf_obj) counter_per_alpha[Alpha] += 1 else: myval = gdal_array.BandReadAsArray(myFiles[i].GetRasterBand(myBandNo), xoff=myX, yoff=myY, win_xsize=nXValid, win_ysize=nYValid) if myval is None: raise Exception(f'Input block reading failed from filename {filename[i]}') # fill in nodata values if myNDV[i] is not None: # myNDVs is a boolean buffer. # a cell equals to 1 if there is NDV in any of the corresponding cells in input raster bands. if myNDVs is None: # this is the first band that has NDV set. we initializes myNDVs to a zero buffer # as we didn't see any NDV value yet. myNDVs = numpy.zeros(myBufSize) myNDVs.shape = (nYValid, nXValid) myNDVs = 1 * numpy.logical_or(myNDVs == 1, myval == myNDV[i]) # add an array of values for this block to the eval namespace if not Alpha in myAlphaFileLists: local_namespace[Alpha] = myval myval = None for lst in myAlphaFileLists: local_namespace[lst] = numpy_arrays[lst] # try the calculation on the array blocks this_calc = calc[bandNo - 1 if len(calc) > 1 else 0] try: myResult = eval(this_calc, global_namespace, local_namespace) except: print(f"evaluation of calculation {this_calc} failed") raise # Propagate nodata values (set nodata cells to zero # then add nodata value to these cells). if myNDVs is not None and myOutNDV is not None: myResult = ((1 * (myNDVs == 0)) * myResult) + (myOutNDV * myNDVs) elif not isinstance(myResult, numpy.ndarray): myResult = numpy.ones((nYValid, nXValid)) * myResult # write data block to the output file myOutB = myOut.GetRasterBand(bandNo) if gdal_array.BandWriteArray(myOutB, myResult, xoff=myX, yoff=myY) != 0: raise Exception('Block writing failed') myOutB = None # write to band # remove temp files for idx, tempFile in enumerate(myTempFileNames): myFiles[idx] = None os.remove(tempFile) gdal.ErrorReset() myOut.FlushCache() if gdal.GetLastErrorMsg() != '': raise Exception('Dataset writing failed') if not quiet: print("100 - Done") return myOut
if __name__ == "__main__": if len(sys.argv) < 2: print >> sys.stderr, "Usage:", sys.argv[0], "<wkt> [<num_runs>]" sys.exit(1) if len(sys.argv) == 3: num_runs = int(sys.argv[2]) else: num_runs = 1000 for k in range(2): print "GDAL/OGR ...." ogr.UseExceptions() gdal.ErrorReset() start = time.clock() for i in range(num_runs): geometry = ogr.CreateGeometryFromWkt(sys.argv[1]) wkb = geometry.ExportToWkb() geometry.Destroy() elapsed = (time.clock() - start) print >> sys.stderr, "elapsed=", elapsed print "Shapely ...." start = time.clock() for i in range(num_runs): geometry = loads(sys.argv[1]) wkb = dumps(geometry) elapsed = (time.clock() - start)
def ogr_rfc28_44(): ds = ogr.GetDriverByName('Memory').CreateDataSource('') lyr = ds.CreateLayer('lyr.withpoint') fld_defn = ogr.FieldDefn('field.withpoint', ogr.OFTInteger) lyr.CreateField(fld_defn) fld_defn = ogr.FieldDefn('foo', ogr.OFTInteger) lyr.CreateField(fld_defn) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField(0, -1) lyr.CreateFeature(feat) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField(0, 1) feat.SetField(1, 2) lyr.CreateFeature(feat) gdal.ErrorReset() lyr = ds.ExecuteSQL( "SELECT * FROM \"lyr.withpoint\" WHERE \"field.withpoint\" = 1") if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' f = lyr.GetNextFeature() if f is None: gdaltest.post_reason('fail') return 'fail' gdaltest.ds.ReleaseResultSet(lyr) gdal.ErrorReset() lyr = ds.ExecuteSQL( "SELECT \"lyr.withpoint\".\"field.withpoint\", \"field.withpoint\" FROM \"lyr.withpoint\" WHERE \"lyr.withpoint\".\"field.withpoint\" = 1" ) if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' f = lyr.GetNextFeature() if f is None: gdaltest.post_reason('fail') return 'fail' gdaltest.ds.ReleaseResultSet(lyr) # Test our tolerance against lack of necessary quoting gdal.ErrorReset() gdal.PushErrorHandler() lyr = ds.ExecuteSQL( "SELECT * FROM \"lyr.withpoint\" WHERE field.withpoint = 1") gdal.PopErrorHandler() if gdal.GetLastErrorMsg( ) != 'Passed field name field.withpoint should have been surrounded by double quotes. Accepted since there is no ambiguity...': gdaltest.post_reason('fail') return 'fail' f = lyr.GetNextFeature() if f is None: gdaltest.post_reason('fail') return 'fail' gdaltest.ds.ReleaseResultSet(lyr) # Againg, but in a situation where there IS ambiguity lyr = ds.CreateLayer('field') fld_defn = ogr.FieldDefn('id', ogr.OFTInteger) lyr.CreateField(fld_defn) gdal.ErrorReset() gdal.PushErrorHandler() lyr = ds.ExecuteSQL( "SELECT * FROM \"lyr.withpoint\" JOIN field ON \"lyr.withpoint\".foo = field.id WHERE field.withpoint = 1" ) gdal.PopErrorHandler() if gdal.GetLastErrorMsg( ) != '"field"."withpoint" not recognised as an available field.': gdaltest.post_reason('fail') return 'fail' if lyr is not None: gdaltest.post_reason('fail') return 'fail' # Test our tolerance against unnecessary quoting gdal.ErrorReset() gdal.PushErrorHandler() lyr = ds.ExecuteSQL( "SELECT * FROM \"lyr.withpoint\" f WHERE \"f.foo\" = 2") gdal.PopErrorHandler() if gdal.GetLastErrorMsg( ) != 'Passed field name f.foo should NOT have been surrounded by double quotes. Accepted since there is no ambiguity...': gdaltest.post_reason('fail') return 'fail' f = lyr.GetNextFeature() if f is None: gdaltest.post_reason('fail') return 'fail' gdaltest.ds.ReleaseResultSet(lyr) return 'success'
def test_ogr_mongodb_2(): if ogrtest.mongodb_drv is None: pytest.skip() ogrtest.mongodb_ds = ogr.Open(ogrtest.mongodb_test_uri, update=1) assert ogrtest.mongodb_ds.GetLayerByName('not_existing') is None assert ogrtest.mongodb_ds.TestCapability(ogr.ODsCCreateLayer) == 1 assert ogrtest.mongodb_ds.TestCapability(ogr.ODsCDeleteLayer) == 1 assert ogrtest.mongodb_ds.TestCapability(ogr.ODsCCreateGeomFieldAfterCreateLayer) == 1 # Create layer a_uuid = str(uuid.uuid1()).replace('-', '_') ogrtest.mongodb_layer_name = 'test_' + a_uuid srs = osr.SpatialReference() srs.ImportFromEPSG(4258) # ETRS 89 will reproject identically to EPSG:4326 lyr = ogrtest.mongodb_ds.CreateLayer(ogrtest.mongodb_layer_name, geom_type=ogr.wkbPolygon, srs=srs, options=['GEOMETRY_NAME=location.mygeom', 'FID=']) gdal.PushErrorHandler() ret = lyr.CreateGeomField(ogr.GeomFieldDefn('location.mygeom', ogr.wkbPoint)) gdal.PopErrorHandler() assert ret != 0 ret = lyr.CreateField(ogr.FieldDefn('str', ogr.OFTString)) assert ret == 0 gdal.PushErrorHandler() ret = lyr.CreateField(ogr.FieldDefn('str', ogr.OFTString)) gdal.PopErrorHandler() assert ret != 0 lyr.CreateField(ogr.FieldDefn('location.name', ogr.OFTString)) bool_field = ogr.FieldDefn('bool', ogr.OFTInteger) bool_field.SetSubType(ogr.OFSTBoolean) lyr.CreateField(bool_field) lyr.CreateField(ogr.FieldDefn('int', ogr.OFTInteger)) lyr.CreateField(ogr.FieldDefn('int64', ogr.OFTInteger64)) lyr.CreateField(ogr.FieldDefn('real', ogr.OFTReal)) lyr.CreateField(ogr.FieldDefn('dt', ogr.OFTDateTime)) lyr.CreateField(ogr.FieldDefn('embed.str', ogr.OFTString)) lyr.CreateField(ogr.FieldDefn('binary', ogr.OFTBinary)) lyr.CreateField(ogr.FieldDefn('strlist', ogr.OFTStringList)) lyr.CreateField(ogr.FieldDefn('intlist', ogr.OFTIntegerList)) lyr.CreateField(ogr.FieldDefn('int64list', ogr.OFTInteger64List)) lyr.CreateField(ogr.FieldDefn('realist', ogr.OFTRealList)) lyr.CreateField(ogr.FieldDefn('embed.embed2.int', ogr.OFTInteger)) lyr.CreateField(ogr.FieldDefn('embed.embed2.real', ogr.OFTReal)) lyr.CreateField(ogr.FieldDefn('str_is_null', ogr.OFTString)) lyr.CreateField(ogr.FieldDefn('str_is_unset', ogr.OFTString)) # Test CreateFeature() f = ogr.Feature(lyr.GetLayerDefn()) f['str'] = 'str' f['location.name'] = 'Paris' f['bool'] = 1 f['int'] = 1 f['int64'] = 1234567890123456 # put a number larger than 1 << 40 so that fromjson() doesn't pick double f['real'] = 1.23 f['dt'] = '1234/12/31 23:59:59.123+00' f.SetFieldBinaryFromHexString('binary', '00FF') f['strlist'] = ['a', 'b'] f['intlist'] = [1, 2] f['int64list'] = [1234567890123456, 1234567890123456] f['realist'] = [1.23, 4.56] f['embed.str'] = 'foo' f['embed.embed2.int'] = 3 f['embed.embed2.real'] = 3.45 f.SetFieldNull('str_is_null') f.SetGeometryDirectly(ogr.CreateGeometryFromWkt('POLYGON((2 49,2 50,3 50,3 49,2 49))')) assert lyr.CreateFeature(f) == 0 assert f['_id'] is not None f_ref = f.Clone() # Test GetFeatureCount() assert lyr.GetFeatureCount() == 1 # Test GetNextFeature() lyr.ResetReading() f = lyr.GetNextFeature() if not f.Equal(f_ref): f.DumpReadable() pytest.fail() f = lyr.GetNextFeature() if f is not None: f.DumpReadable() pytest.fail() # Test GetFeature() f = lyr.GetFeature(1) if not f.Equal(f_ref): f.DumpReadable() pytest.fail() # Test SetFeature() f['bool'] = 0 assert lyr.SetFeature(f) == 0 f_ref = f.Clone() f = lyr.GetFeature(1) if f['bool'] != 0: f.DumpReadable() pytest.fail() # Test (not working) DeleteFeature() gdal.PushErrorHandler() ret = lyr.DeleteFeature(1) gdal.PopErrorHandler() assert ret != 0 # Test Mongo filter lyr.SetAttributeFilter('{ "int": 1 }') lyr.ResetReading() f = lyr.GetNextFeature() if not f.Equal(f_ref): f.DumpReadable() pytest.fail() lyr.SetAttributeFilter('{ "int": 2 }') lyr.ResetReading() f = lyr.GetNextFeature() if f is not None: f.DumpReadable() pytest.fail() # Test OGR filter lyr.SetAttributeFilter('int = 1') lyr.ResetReading() f = lyr.GetNextFeature() if not f.Equal(f_ref): f.DumpReadable() pytest.fail() lyr.SetAttributeFilter('int = 2') lyr.ResetReading() f = lyr.GetNextFeature() if f is not None: f.DumpReadable() pytest.fail() # Test geometry filter lyr.SetAttributeFilter(None) lyr.SetSpatialFilterRect(2.1, 49.1, 2.9, 49.9) lyr.ResetReading() f = lyr.GetNextFeature() if not f.Equal(f_ref): f.DumpReadable() pytest.fail() lyr.SetSpatialFilterRect(1.1, 49.1, 1.9, 49.9) lyr.ResetReading() f = lyr.GetNextFeature() if f is not None: f.DumpReadable() pytest.fail() f = f_ref.Clone() f.SetFID(-1) f.SetGeometryDirectly(None) assert lyr.CreateFeature(f) == 0 # Duplicate key gdal.PushErrorHandler() ret = lyr.SyncToDisk() gdal.PopErrorHandler() assert ret != 0 f['_id'] = None lyr.CreateFeature(f) ret = lyr.SyncToDisk() assert ret == 0 # Missing _id f.UnsetField('_id') gdal.PushErrorHandler() ret = lyr.SetFeature(f) gdal.PopErrorHandler() assert ret != 0 # MongoDB dialect of ExecuteSQL() with invalid JSON gdal.PushErrorHandler() sql_lyr = ogrtest.mongodb_ds.ExecuteSQL('{', dialect='MongoDB') gdal.PopErrorHandler() # MongoDB dialect of ExecuteSQL() with nonexistent command. sql_lyr = ogrtest.mongodb_ds.ExecuteSQL('{ "foo": 1 }', dialect='MongoDB') assert sql_lyr is not None ogrtest.mongodb_ds.ReleaseResultSet(sql_lyr) # MongoDB dialect of ExecuteSQL() with existing commnand sql_lyr = ogrtest.mongodb_ds.ExecuteSQL('{ "listCommands" : 1 }', dialect='MongoDB') assert sql_lyr is not None f = sql_lyr.GetNextFeature() assert f is not None f = sql_lyr.GetNextFeature() assert f is None sql_lyr.GetLayerDefn() sql_lyr.ResetReading() sql_lyr.TestCapability('') ogrtest.mongodb_ds.ReleaseResultSet(sql_lyr) # Regular ExecuteSQL() sql_lyr = ogrtest.mongodb_ds.ExecuteSQL('SELECT * FROM ' + ogrtest.mongodb_layer_name) assert sql_lyr is not None ogrtest.mongodb_ds.ReleaseResultSet(sql_lyr) # Test CreateLayer again with same name gdal.PushErrorHandler() lyr = ogrtest.mongodb_ds.CreateLayer(ogrtest.mongodb_layer_name) gdal.PopErrorHandler() assert lyr is None ogrtest.mongodb_ds = gdal.OpenEx(ogrtest.mongodb_test_uri, gdal.OF_UPDATE, open_options=['FEATURE_COUNT_TO_ESTABLISH_FEATURE_DEFN=-1', 'BULK_INSERT=NO', 'JSON_FIELD=TRUE']) # Check after reopening lyr = ogrtest.mongodb_ds.GetLayerByName(ogrtest.mongodb_layer_name) assert lyr.TestCapability(ogr.OLCFastSpatialFilter) != 0 f = lyr.GetNextFeature() json_field = f['_json'] # We cannot use feature.Equal() has the C++ layer defn has changed for i in range(f_ref.GetDefnRef().GetFieldCount()): if f.GetField(i) != f_ref.GetField(i) or \ f.GetFieldDefnRef(i).GetType() != f_ref.GetFieldDefnRef(i).GetType() or \ f.GetFieldDefnRef(i).GetSubType() != f_ref.GetFieldDefnRef(i).GetSubType(): f.DumpReadable() f_ref.DumpReadable() pytest.fail() for i in range(f_ref.GetDefnRef().GetGeomFieldCount()): if not f.GetGeomFieldRef(i).Equals(f_ref.GetGeomFieldRef(i)) or \ f.GetGeomFieldDefnRef(i).GetName() != f_ref.GetGeomFieldDefnRef(i).GetName() or \ f.GetGeomFieldDefnRef(i).GetType() != f_ref.GetGeomFieldDefnRef(i).GetType(): f.DumpReadable() f_ref.DumpReadable() pytest.fail() lyr.SetSpatialFilterRect(2.1, 49.1, 2.9, 49.9) lyr.ResetReading() if f is None: f.DumpReadable() pytest.fail() # Create a feature only from its _json content and do not store any ogr metadata related to the layer ogrtest.mongodb_layer_name_no_ogr_metadata = ogrtest.mongodb_layer_name + "_no_ogr_metadata" lyr = ogrtest.mongodb_ds.CreateLayer(ogrtest.mongodb_layer_name_no_ogr_metadata, options=['GEOMETRY_NAME=location.mygeom', 'FID=', 'WRITE_OGR_METADATA=NO']) f = ogr.Feature(lyr.GetLayerDefn()) f['_json'] = json_field assert lyr.CreateFeature(f) == 0 ogrtest.mongodb_layer_name_guess_types = ogrtest.mongodb_layer_name + "_guess_types" lyr = ogrtest.mongodb_ds.CreateLayer(ogrtest.mongodb_layer_name_guess_types, geom_type=ogr.wkbNone, options=['FID=', 'WRITE_OGR_METADATA=NO']) f = ogr.Feature(lyr.GetLayerDefn()) f['_json'] = '{' f['_json'] += '"int": 2, ' f['_json'] += '"int64": { "$numberLong" : "1234567890123456" }, ' f['_json'] += '"real": 2.34, ' f['_json'] += '"intlist" : [2], ' f['_json'] += '"reallist" : [2.34], ' f['_json'] += '"int64list" : [{ "$numberLong" : "1234567890123456" }], ' f['_json'] += '"int_str" : 2, ' f['_json'] += '"str_int" : "2", ' f['_json'] += '"int64_str" : { "$numberLong" : "1234567890123456" }, ' f['_json'] += '"str_int64" : "2", ' f['_json'] += '"int_int64": 2, ' f['_json'] += '"int64_int": { "$numberLong" : "1234567890123456" }, ' f['_json'] += '"int_real": 2, ' f['_json'] += '"real_int": 3.45, ' f['_json'] += '"int64_real": { "$numberLong" : "1234567890123456" }, ' f['_json'] += '"real_int64": 3.45, ' f['_json'] += '"real_str": 3.45, ' f['_json'] += '"str_real": "3.45", ' f['_json'] += '"int_bool" : 2, ' f['_json'] += '"bool_int" : true, ' f['_json'] += '"intlist_strlist" : [2], ' f['_json'] += '"strlist_intlist" : ["2"], ' f['_json'] += '"intlist_int64list": [2], ' f['_json'] += '"int64list_intlist": [{ "$numberLong" : "1234567890123456" }], ' f['_json'] += '"intlist_reallist": [2], ' f['_json'] += '"reallist_intlist": [3.45], ' f['_json'] += '"int64list_reallist": [{ "$numberLong" : "1234567890123456" }], ' f['_json'] += '"reallist_int64list": [3.45], ' f['_json'] += '"intlist_boollist" : [2], ' f['_json'] += '"boollist_intlist" : [true], ' f['_json'] += '"mixedlist": [true,1,{ "$numberLong" : "1234567890123456" },3.45],' f['_json'] += '"mixedlist2": [true,1,{ "$numberLong" : "1234567890123456" },3.45,"str"]' f['_json'] += '}' assert lyr.CreateFeature(f) == 0 f = ogr.Feature(lyr.GetLayerDefn()) f['_json'] = '{' f['_json'] += '"int_str" : "3", ' f['_json'] += '"str_int" : 3, ' f['_json'] += '"int64_str" : "2", ' f['_json'] += '"str_int64" : { "$numberLong" : "1234567890123456" }, ' f['_json'] += '"int_int64": { "$numberLong" : "1234567890123456" }, ' f['_json'] += '"int64_int": 2, ' f['_json'] += '"int_real" : 3.45, ' f['_json'] += '"real_int": 2, ' f['_json'] += '"int64_real": 3.45, ' f['_json'] += '"real_int64": { "$numberLong" : "1234567890123456" }, ' f['_json'] += '"real_str": "3.45", ' f['_json'] += '"str_real": 3.45, ' f['_json'] += '"int_bool" : true, ' f['_json'] += '"bool_int" : 2, ' f['_json'] += '"intlist_strlist" : ["3"], ' f['_json'] += '"strlist_intlist" : [3], ' f['_json'] += '"intlist_int64list": [{ "$numberLong" : "1234567890123456" }], ' f['_json'] += '"int64list_intlist": [2], ' f['_json'] += '"intlist_reallist": [3.45], ' f['_json'] += '"reallist_intlist": [2], ' f['_json'] += '"int64list_reallist": [3.45], ' f['_json'] += '"reallist_int64list": [{ "$numberLong" : "1234567890123456" }], ' f['_json'] += '"intlist_boollist" : [true], ' f['_json'] += '"boollist_intlist" : [2]' f['_json'] += '}' assert lyr.CreateFeature(f) == 0 # This new features will not be taken into account by below the FEATURE_COUNT_TO_ESTABLISH_FEATURE_DEFN=2 f = ogr.Feature(lyr.GetLayerDefn()) f['_json'] = '{' f['_json'] += '"int": { "$minKey": 1 }, ' f['_json'] += '"int64": { "$minKey": 1 }, ' f['_json'] += '"real": { "$minKey": 1 }, ' f['_json'] += '"intlist" : [1, "1", { "$minKey": 1 },{ "$maxKey": 1 },{ "$numberLong" : "-1234567890123456" }, { "$numberLong" : "1234567890123456" }, -1234567890123456.1, 1234567890123456.1, { "$numberLong" : "1" }, 1.23 ], ' f['_json'] += '"int64list" : [1, { "$numberLong" : "1234567890123456" }, "1", { "$minKey": 1 },{ "$maxKey": 1 }, -1e300, 1e300, 1.23 ], ' f['_json'] += '"reallist" : [1, { "$numberLong" : "1234567890123456" }, 1.0, "1", { "$minKey": 1 },{ "$maxKey": 1 }, { "$numberLong" : "1234567890123456" } ] ' f['_json'] += '}' assert lyr.CreateFeature(f) == 0 f = ogr.Feature(lyr.GetLayerDefn()) f['_json'] = '{' f['_json'] += '"int": { "$maxKey": 1 }, ' f['_json'] += '"int64": { "$maxKey": 1 }, ' f['_json'] += '"real": { "$maxKey": 1 } ' f['_json'] += '}' assert lyr.CreateFeature(f) == 0 ogrtest.mongodb_layer_name_with_2d_index = ogrtest.mongodb_layer_name + "_with_2d_index" gdal.SetConfigOption('OGR_MONGODB_SPAT_INDEX_TYPE', '2d') lyr = ogrtest.mongodb_ds.CreateLayer(ogrtest.mongodb_layer_name_with_2d_index, geom_type=ogr.wkbPoint, options=['FID=', 'WRITE_OGR_METADATA=NO']) gdal.SetConfigOption('OGR_MONGODB_SPAT_INDEX_TYPE', None) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometryDirectly(ogr.CreateGeometryFromWkt('POINT(2 49)')) assert lyr.CreateFeature(f) == 0 ogrtest.mongodb_layer_name_no_spatial_index = ogrtest.mongodb_layer_name + "_no_spatial_index" for i in range(2): lyr = ogrtest.mongodb_ds.CreateLayer(ogrtest.mongodb_layer_name_no_spatial_index, options=['SPATIAL_INDEX=NO', 'OVERWRITE=YES']) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometryDirectly(ogr.CreateGeometryFromWkt('POINT(2 49)')) assert lyr.CreateFeature(f) == 0 ogrtest.mongodb_ds.ExecuteSQL('WRITE_OGR_METADATA ' + ogrtest.mongodb_layer_name_no_spatial_index) # Open "ghost" layer lyr = ogrtest.mongodb_ds.GetLayerByName('_ogr_metadata') assert lyr is not None lyr.SetAttributeFilter("layer LIKE '%s%%'" % ogrtest.mongodb_layer_name) assert lyr.GetFeatureCount() == 2 assert ogrtest.mongodb_ds.DeleteLayer(-1) != 0 lyr = ogrtest.mongodb_ds.GetLayerByName(ogrtest.mongodb_test_dbname + '.' + '_ogr_metadata') assert lyr is not None ogrtest.mongodb_ds = None # Reopen in read-only ogrtest.mongodb_ds = gdal.OpenEx(ogrtest.mongodb_test_uri, 0, open_options=['FEATURE_COUNT_TO_ESTABLISH_FEATURE_DEFN=2', 'JSON_FIELD=TRUE']) lyr = ogrtest.mongodb_ds.GetLayerByName(ogrtest.mongodb_layer_name_no_ogr_metadata) assert lyr.TestCapability(ogr.OLCFastSpatialFilter) == 0 f = lyr.GetNextFeature() for i in range(f_ref.GetDefnRef().GetFieldCount()): if f_ref.GetFieldDefnRef(i).GetNameRef() == "str_is_null": continue if f_ref.GetFieldDefnRef(i).GetNameRef() == "str_is_unset": continue # Order might be a bit different... j = f.GetDefnRef().GetFieldIndex(f_ref.GetFieldDefnRef(i).GetNameRef()) if f.GetField(j) != f_ref.GetField(i) or \ f.GetFieldDefnRef(j).GetType() != f_ref.GetFieldDefnRef(i).GetType() or \ f.GetFieldDefnRef(j).GetSubType() != f_ref.GetFieldDefnRef(i).GetSubType(): f.DumpReadable() f_ref.DumpReadable() pytest.fail() for i in range(f_ref.GetDefnRef().GetGeomFieldCount()): # Order might be a bit different... j = f.GetDefnRef().GetGeomFieldIndex(f_ref.GetGeomFieldDefnRef(i).GetNameRef()) if not f.GetGeomFieldRef(j).Equals(f_ref.GetGeomFieldRef(i)) or \ f.GetGeomFieldDefnRef(j).GetName() != f_ref.GetGeomFieldDefnRef(i).GetName() or \ f.GetGeomFieldDefnRef(j).GetType() != f_ref.GetGeomFieldDefnRef(i).GetType(): f.DumpReadable() f_ref.DumpReadable() print(f_ref.GetGeomFieldDefnRef(i).GetType()) pytest.fail(f.GetGeomFieldDefnRef(j).GetType()) lyr.SetSpatialFilterRect(2.1, 49.1, 2.9, 49.9) lyr.ResetReading() if f is None: f.DumpReadable() pytest.fail() lyr = ogrtest.mongodb_ds.GetLayerByName(ogrtest.mongodb_layer_name_guess_types) expected_fields = [ ("int", ogr.OFTInteger), ("int64", ogr.OFTInteger64), ("real", ogr.OFTReal), ("intlist", ogr.OFTIntegerList), ("reallist", ogr.OFTRealList), ("int64list", ogr.OFTInteger64List), ("int_str", ogr.OFTString), ("str_int", ogr.OFTString), ("int64_str", ogr.OFTString), ("str_int64", ogr.OFTString), ("int_int64", ogr.OFTInteger64), ("int64_int", ogr.OFTInteger64), ("int_real", ogr.OFTReal), ("real_int", ogr.OFTReal), ("int64_real", ogr.OFTReal), ("real_int64", ogr.OFTReal), ("real_str", ogr.OFTString), ("str_real", ogr.OFTString), ("int_bool", ogr.OFTInteger), ("bool_int", ogr.OFTInteger), ("intlist_strlist", ogr.OFTStringList), ("strlist_intlist", ogr.OFTStringList), ("intlist_int64list", ogr.OFTInteger64List), ("int64list_intlist", ogr.OFTInteger64List), ("intlist_reallist", ogr.OFTRealList), ("reallist_intlist", ogr.OFTRealList), ("int64list_reallist", ogr.OFTRealList), ("reallist_int64list", ogr.OFTRealList), ("intlist_boollist", ogr.OFTIntegerList), ("boollist_intlist", ogr.OFTIntegerList), ("mixedlist", ogr.OFTRealList), ("mixedlist2", ogr.OFTStringList)] for (fieldname, fieldtype) in expected_fields: fld_defn = lyr.GetLayerDefn().GetFieldDefn(lyr.GetLayerDefn().GetFieldIndex(fieldname)) assert fld_defn.GetType() == fieldtype, fieldname assert fld_defn.GetSubType() == ogr.OFSTNone f = lyr.GetNextFeature() f = lyr.GetNextFeature() f = lyr.GetNextFeature() if f['intlist'] != [1, 1, -2147483648, 2147483647, -2147483648, 2147483647, -2147483648, 2147483647, 1, 1] or \ f['int64list'] != [1, 1234567890123456, 1, -9223372036854775808, 9223372036854775807, -9223372036854775808, 9223372036854775807, 1] or \ f['int'] != -2147483648 or f['int64'] != -9223372036854775808 or f['real'] - 1 != f['real']: f.DumpReadable() pytest.fail() f = lyr.GetNextFeature() if f['int'] != 2147483647 or f['int64'] != 9223372036854775807 or f['real'] + 1 != f['real']: f.DumpReadable() pytest.fail() lyr = ogrtest.mongodb_ds.GetLayerByName(ogrtest.mongodb_layer_name_with_2d_index) assert lyr.TestCapability(ogr.OLCFastSpatialFilter) != 0 lyr.SetSpatialFilterRect(1.9, 48.9, 2.1, 49.1) lyr.ResetReading() f = lyr.GetNextFeature() assert f is not None lyr.SetSpatialFilterRect(1.9, 48.9, 1.95, 48.95) lyr.ResetReading() f = lyr.GetNextFeature() assert f is None lyr = ogrtest.mongodb_ds.GetLayerByName(ogrtest.mongodb_layer_name_no_spatial_index) assert lyr.TestCapability(ogr.OLCFastSpatialFilter) == 0 lyr.SetSpatialFilterRect(1.9, 48.9, 2.1, 49.1) lyr.ResetReading() f = lyr.GetNextFeature() assert f is not None gdal.PushErrorHandler() lyr = ogrtest.mongodb_ds.CreateLayer('foo') gdal.PopErrorHandler() assert lyr is None gdal.ErrorReset() gdal.PushErrorHandler() ogrtest.mongodb_ds.ExecuteSQL('WRITE_OGR_METADATA ' + ogrtest.mongodb_layer_name) gdal.PopErrorHandler() assert gdal.GetLastErrorMsg() != '' lyr_count_before = ogrtest.mongodb_ds.GetLayerCount() gdal.PushErrorHandler() ogrtest.mongodb_ds.ExecuteSQL('DELLAYER:' + ogrtest.mongodb_layer_name) gdal.PopErrorHandler() assert ogrtest.mongodb_ds.GetLayerCount() == lyr_count_before lyr = ogrtest.mongodb_ds.GetLayerByName(ogrtest.mongodb_layer_name) gdal.PushErrorHandler() ret = lyr.CreateField(ogr.FieldDefn('foo', ogr.OFTString)) gdal.PopErrorHandler() assert ret != 0 gdal.PushErrorHandler() ret = lyr.CreateGeomField(ogr.GeomFieldDefn('foo', ogr.wkbPoint)) gdal.PopErrorHandler() assert ret != 0 f = ogr.Feature(lyr.GetLayerDefn()) gdal.PushErrorHandler() ret = lyr.CreateFeature(f) gdal.PopErrorHandler() assert ret != 0 gdal.PushErrorHandler() ret = lyr.SetFeature(f) gdal.PopErrorHandler() assert ret != 0 gdal.PushErrorHandler() ret = lyr.DeleteFeature(1) gdal.PopErrorHandler() assert ret != 0
def misc_6_internal(datatype, nBands, setDriversDone): ds = gdal.GetDriverByName('MEM').Create('', 10, 10, nBands, datatype) if nBands > 0: ds.GetRasterBand(1).Fill(255) ds.SetGeoTransform([2, 1.0 / 10, 0, 49, 0, -1.0 / 10]) ds.SetProjection( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.01745329251994328]]' ) ds.SetMetadata(['a']) for i in range(gdal.GetDriverCount()): drv = gdal.GetDriver(i) md = drv.GetMetadata() if ('DCAP_CREATECOPY' in md or 'DCAP_CREATE' in md) and 'DCAP_RASTER' in md: #print ('drv = %s, nBands = %d, datatype = %s' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype))) skip = False # FIXME: A few cases that crashes and should be investigated if drv.ShortName == 'JPEG2000': if (nBands == 2 or nBands >= 5) or \ not (datatype == gdal.GDT_Byte or datatype == gdal.GDT_Int16 or datatype == gdal.GDT_UInt16): skip = True if skip is False: dirname = 'tmp/tmp/tmp_%s_%d_%s' % ( drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) try: os.mkdir(dirname) except: try: os.stat(dirname) # Hum the directory already exists... Not expected, but let's try to go on except: reason = 'Cannot create %s before drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' filename = get_filename(drv, dirname) dst_ds = drv.CreateCopy(filename, ds) has_succeeded = dst_ds is not None dst_ds = None size = 0 stat = gdal.VSIStatL(filename) if stat is not None: size = stat.size try: shutil.rmtree(dirname) except: reason = 'Cannot remove %s after drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' if has_succeeded and not drv.ShortName in setDriversDone and nBands > 0: setDriversDone.add(drv.ShortName) # The first list of drivers fail to detect short writing # The second one is because they are verbose in stderr if 'DCAP_VIRTUALIO' in md and size != 0 and \ drv.ShortName not in ['JPEG2000', 'KMLSUPEROVERLAY', 'HF2', 'ZMap', 'DDS'] and \ drv.ShortName not in ['GIF', 'JP2ECW', 'JP2Lura']: for j in range(10): truncated_size = (size * j) / 10 vsimem_filename = ( '/vsimem/test_truncate/||maxlength=%d||' % truncated_size) + get_filename(drv, '')[1:] #print('drv = %s, nBands = %d, datatype = %s, truncated_size = %d' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype), truncated_size)) dst_ds = drv.CreateCopy(vsimem_filename, ds) error_detected = False if dst_ds is None: error_detected = True else: gdal.ErrorReset() dst_ds = None if gdal.GetLastErrorMsg() != '': error_detected = True if not error_detected: msg = 'write error not decteded with with drv = %s, nBands = %d, datatype = %s, truncated_size = %d' % ( drv.ShortName, nBands, gdal.GetDataTypeName(datatype), truncated_size) print(msg) gdaltest.post_reason(msg) fl = gdal.ReadDirRecursive('/vsimem/test_truncate') if fl is not None: for myf in fl: gdal.Unlink('/vsimem/test_truncate/' + myf) fl = gdal.ReadDirRecursive( '/vsimem/test_truncate') if fl is not None: print(fl) if not drv.ShortName in [ 'ECW', 'JP2ECW', 'VRT', 'XPM', 'JPEG2000', 'FIT', 'RST', 'INGR', 'USGSDEM', 'KMLSUPEROVERLAY', 'GMT' ]: dst_ds = drv.CreateCopy( filename, ds, callback=misc_6_interrupt_callback_class().cbk) if dst_ds is not None: gdaltest.post_reason( 'interruption did not work with drv = %s, nBands = %d, datatype = %s' % (drv.ShortName, nBands, gdal.GetDataTypeName(datatype))) dst_ds = None try: shutil.rmtree(dirname) except: pass return 'fail' dst_ds = None try: shutil.rmtree(dirname) except: pass try: os.mkdir(dirname) except: reason = 'Cannot create %s before drv = %s, nBands = %d, datatype = %s' % ( dirname, drv.ShortName, nBands, gdal.GetDataTypeName(datatype)) gdaltest.post_reason(reason) return 'fail' ds = None return 'success'
def main( argv = None ): bReportHistograms = False bApproxStats = False scale = 1.0 offset = 0.0 bComputeMinMax = False bSample = False bShowGCPs = True bShowMetadata = True bShowRAT=True bStats = False bScale = False bShowColorTable = True pszFilename = None papszExtraMDDomains = [ ] pszProjection = None hTransform = None if argv is None: argv = sys.argv argv = gdal.GeneralCmdLineProcessor( argv ) if argv is None: return 1 nArgc = len(argv) #/* -------------------------------------------------------------------- */ #/* Parse arguments. */ #/* -------------------------------------------------------------------- */ i = 1 while i < nArgc: if EQUAL(argv[i], "--utility_version"): print("%s is running against GDAL %s" % (argv[0], gdal.VersionInfo("RELEASE_NAME"))) return 0 elif EQUAL(argv[i], "-mm"): bComputeMinMax = True elif EQUAL(argv[i], "-unscale"): bScale = True elif EQUAL(argv[i], "-stats"): bStats = True elif EQUAL(argv[i], "-hist"): bReportHistograms = True elif argv[i][0] == '-': return Usage() elif pszFilename is None: pszFilename = argv[i] else: return Usage() i = i + 1 if pszFilename is None: return Usage() if not (bComputeMinMax or bScale or bStats or bReportHistograms): return Usage() #/* -------------------------------------------------------------------- */ #/* Open dataset. */ #/* -------------------------------------------------------------------- */ hDataset = gdal.Open( pszFilename, gdal.GA_ReadOnly ) if hDataset is None: print("gdalinfo failed - unable to open '%s'." % pszFilename ) return 1 #/* ==================================================================== */ #/* Loop over bands. */ #/* ==================================================================== */ for iBand in range(hDataset.RasterCount): hBand = hDataset.GetRasterBand(iBand+1 ) (nBlockXSize, nBlockYSize) = hBand.GetBlockSize() if bScale: offset = hBand.GetOffset() if offset is None: offset = 0.0 scale = hBand.GetScale() if scale is None: scale = 1.0 if (hDataset.RasterCount > 1): print( "Band %d Block=%dx%d Type=%s, ColorInterp=%s" % ( iBand+1, \ nBlockXSize, nBlockYSize, \ gdal.GetDataTypeName(hBand.DataType), \ gdal.GetColorInterpretationName( \ hBand.GetRasterColorInterpretation()) )) dfMin = hBand.GetMinimum() dfMax = hBand.GetMaximum() if dfMin is not None or dfMax is not None or bComputeMinMax: line = "" if dfMin is not None: dfMin = (dfMin * scale) + offset line = line + ("Min=%.3f " % (dfMin)) if dfMax is not None: dfMax = (dfMax * scale) + offset line = line + ("Max=%.3f " % (dfMax)) if bComputeMinMax: gdal.ErrorReset() adfCMinMax = hBand.ComputeRasterMinMax(True) if gdal.GetLastErrorType() == gdal.CE_None: line = line + ( " Computed Min/Max=%.3f,%.3f" % ( \ ((adfCMinMax[0] * scale) + offset), \ ((adfCMinMax[1] * scale) + offset) )) print( line ) #if bStats: # print( line ) stats = hBand.GetStatistics( bApproxStats, bStats) #inType = gdal.GetDataTypeName(hBand.DataType) # Dirty hack to recognize if stats are valid. If invalid, the returned # stddev is negative if stats[3] >= 0.0: if bStats: mean = (stats[2] * scale) + offset; stdev = (stats[3] * scale) + offset; rms = math.sqrt((mean * mean) + ( stdev * stdev)) print( "Min=%.2f, Max=%.2f, Mean=%.2f, StdDev=%.2f, RMS=%.2f" \ % ((stats[0] * scale) + offset, (stats[1] * scale) + offset,\ mean, stdev, rms )) if bReportHistograms: print ("level\tvalue\tcount\tcumulative") #Histogram call not returning exact min and max. #...Workaround run gdalinfo -stats and then use min/max from above hist = hBand.GetDefaultHistogram(force = True) #hist = hBand.GetDefaultHistogram(force = True, callback = gdal.TermProgress) cnt = 0 sum = 0 sumTotal = 0 if hist is not None: #use dfMin and dfMax from previous calls when possible if dfMin is None: dfMin = (hist[0] * scale) + offset if dfMax is None: dfMax = (hist[1] * scale) + offset nBucketCount = hist[2] panHistogram = hist[3] #print( " %d buckets from %g to %g:" % ( \ # nBucketCount, dfMin, dfMax )) #print ( "scale: %g, offset: %g" % (scale, offset)) increment = round(((dfMax - dfMin) / nBucketCount),2) value = dfMin #get total to normalize (below) for bucket in panHistogram: sumTotal = sumTotal + bucket for bucket in panHistogram: sum = sum + bucket #normalize cumulative nsum = sum / float(sumTotal) line = "%d\t%0.2f\t%d\t%0.6f" % (cnt, value, bucket, nsum) print(line) cnt = cnt + 1 value = value + increment return True
def doit(opts, args): # pylint: disable=unused-argument if opts.debug: print("gdal_calc.py starting calculation %s" % (opts.calc)) # set up global namespace for eval with all functions of gdalnumeric global_namespace = dict([(key, getattr(gdalnumeric, key)) for key in dir(gdalnumeric) if not key.startswith('__')]) if not opts.calc: raise Exception("No calculation provided.") elif not opts.outF: raise Exception("No output file provided.") if opts.format is None: opts.format = GetOutputDriverFor(opts.outF) ################################################################ # fetch details of input layers ################################################################ # set up some lists to store data for each band myFiles = [] myBands = [] myAlphaList = [] myDataType = [] myDataTypeNum = [] myNDV = [] DimensionsCheck = None # loop through input files - checking dimensions for myI, myF in opts.input_files.items(): if not myI.endswith("_band"): # check if we have asked for a specific band... if "%s_band" % myI in opts.input_files: myBand = opts.input_files["%s_band" % myI] else: myBand = 1 myFile = gdal.Open(myF, gdal.GA_ReadOnly) if not myFile: raise IOError("No such file or directory: '%s'" % myF) myFiles.append(myFile) myBands.append(myBand) myAlphaList.append(myI) myDataType.append(gdal.GetDataTypeName(myFile.GetRasterBand(myBand).DataType)) myDataTypeNum.append(myFile.GetRasterBand(myBand).DataType) myNDV.append(myFile.GetRasterBand(myBand).GetNoDataValue()) # check that the dimensions of each layer are the same if DimensionsCheck: if DimensionsCheck != [myFile.RasterXSize, myFile.RasterYSize]: raise Exception("Error! Dimensions of file %s (%i, %i) are different from other files (%i, %i). Cannot proceed" % (myF, myFile.RasterXSize, myFile.RasterYSize, DimensionsCheck[0], DimensionsCheck[1])) else: DimensionsCheck = [myFile.RasterXSize, myFile.RasterYSize] if opts.debug: print("file %s: %s, dimensions: %s, %s, type: %s" % (myI, myF, DimensionsCheck[0], DimensionsCheck[1], myDataType[-1])) # process allBands option allBandsIndex = None allBandsCount = 1 if opts.allBands: try: allBandsIndex = myAlphaList.index(opts.allBands) except ValueError: raise Exception("Error! allBands option was given but Band %s not found. Cannot proceed" % (opts.allBands)) allBandsCount = myFiles[allBandsIndex].RasterCount if allBandsCount <= 1: allBandsIndex = None ################################################################ # set up output file ################################################################ # open output file exists if os.path.isfile(opts.outF) and not opts.overwrite: if allBandsIndex is not None: raise Exception("Error! allBands option was given but Output file exists, must use --overwrite option!") if opts.debug: print("Output file %s exists - filling in results into file" % (opts.outF)) myOut = gdal.Open(opts.outF, gdal.GA_Update) if [myOut.RasterXSize, myOut.RasterYSize] != DimensionsCheck: raise Exception("Error! Output exists, but is the wrong size. Use the --overwrite option to automatically overwrite the existing file") myOutB = myOut.GetRasterBand(1) myOutNDV = myOutB.GetNoDataValue() myOutType = gdal.GetDataTypeName(myOutB.DataType) else: # remove existing file and regenerate if os.path.isfile(opts.outF): os.remove(opts.outF) # create a new file if opts.debug: print("Generating output file %s" % (opts.outF)) # find data type to use if not opts.type: # use the largest type of the input files myOutType = gdal.GetDataTypeName(max(myDataTypeNum)) else: myOutType = opts.type # create file myOutDrv = gdal.GetDriverByName(opts.format) myOut = myOutDrv.Create( opts.outF, DimensionsCheck[0], DimensionsCheck[1], allBandsCount, gdal.GetDataTypeByName(myOutType), opts.creation_options) # set output geo info based on first input layer myOut.SetGeoTransform(myFiles[0].GetGeoTransform()) myOut.SetProjection(myFiles[0].GetProjection()) if opts.NoDataValue is not None: myOutNDV = opts.NoDataValue else: myOutNDV = DefaultNDVLookup[myOutType] for i in range(1, allBandsCount + 1): myOutB = myOut.GetRasterBand(i) myOutB.SetNoDataValue(myOutNDV) # write to band myOutB = None if opts.debug: print("output file: %s, dimensions: %s, %s, type: %s" % (opts.outF, myOut.RasterXSize, myOut.RasterYSize, myOutType)) ################################################################ # find block size to chop grids into bite-sized chunks ################################################################ # use the block size of the first layer to read efficiently myBlockSize = myFiles[0].GetRasterBand(myBands[0]).GetBlockSize() # find total x and y blocks to be read nXBlocks = (int)((DimensionsCheck[0] + myBlockSize[0] - 1) / myBlockSize[0]) nYBlocks = (int)((DimensionsCheck[1] + myBlockSize[1] - 1) / myBlockSize[1]) myBufSize = myBlockSize[0] * myBlockSize[1] if opts.debug: print("using blocksize %s x %s" % (myBlockSize[0], myBlockSize[1])) # variables for displaying progress ProgressCt = -1 ProgressMk = -1 ProgressEnd = nXBlocks * nYBlocks * allBandsCount ################################################################ # start looping through each band in allBandsCount ################################################################ for bandNo in range(1, allBandsCount + 1): ################################################################ # start looping through blocks of data ################################################################ # store these numbers in variables that may change later nXValid = myBlockSize[0] nYValid = myBlockSize[1] # loop through X-lines for X in range(0, nXBlocks): # in case the blocks don't fit perfectly # change the block size of the final piece if X == nXBlocks - 1: nXValid = DimensionsCheck[0] - X * myBlockSize[0] # find X offset myX = X * myBlockSize[0] # reset buffer size for start of Y loop nYValid = myBlockSize[1] myBufSize = nXValid * nYValid # loop through Y lines for Y in range(0, nYBlocks): ProgressCt += 1 if 10 * ProgressCt / ProgressEnd % 10 != ProgressMk and not opts.quiet: ProgressMk = 10 * ProgressCt / ProgressEnd % 10 from sys import version_info if version_info >= (3, 0, 0): exec('print("%d.." % (10*ProgressMk), end=" ")') else: exec('print 10*ProgressMk, "..",') # change the block size of the final piece if Y == nYBlocks - 1: nYValid = DimensionsCheck[1] - Y * myBlockSize[1] myBufSize = nXValid * nYValid # find Y offset myY = Y * myBlockSize[1] # create empty buffer to mark where nodata occurs myNDVs = None # make local namespace for calculation local_namespace = {} # fetch data for each input layer for i, Alpha in enumerate(myAlphaList): # populate lettered arrays with values if allBandsIndex is not None and allBandsIndex == i: myBandNo = bandNo else: myBandNo = myBands[i] myval = gdalnumeric.BandReadAsArray(myFiles[i].GetRasterBand(myBandNo), xoff=myX, yoff=myY, win_xsize=nXValid, win_ysize=nYValid) if myval is None: raise Exception('Input block reading failed') # fill in nodata values if myNDV[i] is not None: if myNDVs is None: myNDVs = numpy.zeros(myBufSize) myNDVs.shape = (nYValid, nXValid) myNDVs = 1 * numpy.logical_or(myNDVs == 1, myval == myNDV[i]) # add an array of values for this block to the eval namespace local_namespace[Alpha] = myval myval = None # try the calculation on the array blocks try: myResult = eval(opts.calc, global_namespace, local_namespace) except: print("evaluation of calculation %s failed" % (opts.calc)) raise # Propagate nodata values (set nodata cells to zero # then add nodata value to these cells). if myNDVs is not None: myResult = ((1 * (myNDVs == 0)) * myResult) + (myOutNDV * myNDVs) elif not isinstance(myResult, numpy.ndarray): myResult = numpy.ones((nYValid, nXValid)) * myResult # write data block to the output file myOutB = myOut.GetRasterBand(bandNo) if gdalnumeric.BandWriteArray(myOutB, myResult, xoff=myX, yoff=myY) != 0: raise Exception('Block writing failed') gdal.ErrorReset() myOut.FlushCache() myOut = None if gdal.GetLastErrorMsg() != '': raise Exception('Dataset writing failed') if not opts.quiet: print("100 - Done")
def test_vsiswift_extra_1(): if not gdaltest.built_against_curl(): pytest.skip() swift_resource = gdal.GetConfigOption('SWIFT_RESOURCE') if swift_resource is None: pytest.skip('Missing SWIFT_RESOURCE') if '/' not in swift_resource: path = '/vsiswift/' + swift_resource statres = gdal.VSIStatL(path) assert statres is not None and stat.S_ISDIR(statres.mode), \ ('%s is not a valid bucket' % path) readdir = gdal.ReadDir(path) assert readdir is not None, 'ReadDir() should not return empty list' for filename in readdir: if filename != '.': subpath = path + '/' + filename assert gdal.VSIStatL(subpath) is not None, \ ('Stat(%s) should not return an error' % subpath) unique_id = 'vsiswift_test' subpath = path + '/' + unique_id ret = gdal.Mkdir(subpath, 0) assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath) readdir = gdal.ReadDir(path) assert unique_id in readdir, \ ('ReadDir(%s) should contain %s' % (path, unique_id)) ret = gdal.Mkdir(subpath, 0) assert ret != 0, ('Mkdir(%s) repeated should return an error' % subpath) ret = gdal.Rmdir(subpath) assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath) readdir = gdal.ReadDir(path) assert unique_id not in readdir, \ ('ReadDir(%s) should not contain %s' % (path, unique_id)) ret = gdal.Rmdir(subpath) assert ret != 0, ('Rmdir(%s) repeated should return an error' % subpath) ret = gdal.Mkdir(subpath, 0) assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath) f = gdal.VSIFOpenL(subpath + '/test.txt', 'wb') assert f is not None gdal.VSIFWriteL('hello', 1, 5, f) gdal.VSIFCloseL(f) ret = gdal.Rmdir(subpath) assert ret != 0, \ ('Rmdir(%s) on non empty directory should return an error' % subpath) f = gdal.VSIFOpenL(subpath + '/test.txt', 'rb') assert f is not None data = gdal.VSIFReadL(1, 5, f).decode('utf-8') assert data == 'hello' gdal.VSIFCloseL(f) ret = gdal.Unlink(subpath + '/test.txt') assert ret >= 0, \ ('Unlink(%s) should not return an error' % (subpath + '/test.txt')) ret = gdal.Rmdir(subpath) assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath) return f = open_for_read('/vsiswift/' + swift_resource) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1 # Same with /vsiswift_streaming/ f = open_for_read('/vsiswift_streaming/' + swift_resource) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1 # Invalid resource gdal.ErrorReset() f = open_for_read('/vsiswift_streaming/' + swift_resource + '/invalid_resource.baz') assert f is None, gdal.VSIGetLastErrorMsg()
def rasterlite_2(): if gdaltest.rasterlite_drv is None: return 'skip' # Test if SQLite3 supports rtrees try: os.remove('tmp/testrtree.sqlite') except: pass ds2 = ogr.GetDriverByName('SQLite').CreateDataSource( 'tmp/testrtree.sqlite') gdal.ErrorReset() ds2.ExecuteSQL( 'CREATE VIRTUAL TABLE testrtree USING rtree(id,minX,maxX,minY,maxY)') ds2.Destroy() try: os.remove('tmp/testrtree.sqlite') except: pass if gdal.GetLastErrorMsg().find('rtree') != -1: gdaltest.rasterlite_drv = None gdaltest.post_reason( 'Please upgrade your sqlite3 library to be able to read Rasterlite DBs (needs rtree support)!' ) return 'skip' gdal.ErrorReset() ds = gdal.Open('data/rasterlite.sqlite') if ds is None: if gdal.GetLastErrorMsg().find('unsupported file format') != -1: gdaltest.rasterlite_drv = None gdaltest.post_reason( 'Please upgrade your sqlite3 library to be able to read Rasterlite DBs!' ) return 'skip' else: return 'fail' if ds.RasterCount != 3: gdaltest.post_reason('expected 3 bands') return 'fail' if ds.GetRasterBand(1).GetOverviewCount() != 0: gdaltest.post_reason('did not expect overview') return 'fail' cs = ds.GetRasterBand(1).Checksum() expected_cs = 11746 if cs != expected_cs and cs != 11751: gdaltest.post_reason( 'for band 1, cs = %d, different from expected_cs = %d' % (cs, expected_cs)) return 'fail' cs = ds.GetRasterBand(2).Checksum() expected_cs = 19843 if cs != expected_cs and cs != 20088 and cs != 20083: gdaltest.post_reason( 'for band 2, cs = %d, different from expected_cs = %d' % (cs, expected_cs)) return 'fail' cs = ds.GetRasterBand(3).Checksum() expected_cs = 48911 if cs != expected_cs and cs != 47978: gdaltest.post_reason( 'for band 3, cs = %d, different from expected_cs = %d' % (cs, expected_cs)) return 'fail' if ds.GetProjectionRef().find('WGS_1984') == -1: gdaltest.post_reason('projection_ref = %s' % ds.GetProjectionRef()) return 'fail' gt = ds.GetGeoTransform() expected_gt = (-180.0, 360. / ds.RasterXSize, 0.0, 90.0, 0.0, -180. / ds.RasterYSize) for i in range(6): if abs(gt[i] - expected_gt[i]) > 1e-15: print(gt) print(expected_gt) return 'fail' ds = None return 'success'
def vsizip_2(): fmain = gdal.VSIFOpenL("/vsizip/vsimem/test2.zip/foo.bar", "wb") if fmain is None: gdaltest.post_reason('fail 1') return 'fail' gdal.VSIFWriteL("12345", 1, 5, fmain) gdal.VSIFCloseL(fmain) content = gdal.ReadDir("/vsizip/vsimem/test2.zip") if content != ['foo.bar']: gdaltest.post_reason('bad content 1') print(content) return 'fail' # Now append a second file fmain = gdal.VSIFOpenL("/vsizip/vsimem/test2.zip/bar.baz", "wb") if fmain is None: gdaltest.post_reason('fail 2') return 'fail' gdal.VSIFWriteL("67890", 1, 5, fmain) gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') content = gdal.ReadDir("/vsizip/vsimem/test2.zip") gdal.PopErrorHandler() if gdal.GetLastErrorMsg() != 'Cannot read a zip file being written': gdaltest.post_reason('expected error') print(gdal.GetLastErrorMsg()) return 'fail' if content != None: gdaltest.post_reason('bad content 2') print(content) return 'fail' gdal.VSIFCloseL(fmain) content = gdal.ReadDir("/vsizip/vsimem/test2.zip") if content != ['foo.bar', 'bar.baz']: gdaltest.post_reason('bad content 3') print(content) return 'fail' fmain = gdal.VSIFOpenL("/vsizip/vsimem/test2.zip/foo.bar", "rb") if fmain is None: gdaltest.post_reason('fail 3') return 'fail' data = gdal.VSIFReadL(1, 5, fmain) gdal.VSIFCloseL(fmain) if data.decode('ASCII') != '12345': print(data) return 'fail' fmain = gdal.VSIFOpenL("/vsizip/vsimem/test2.zip/bar.baz", "rb") if fmain is None: gdaltest.post_reason('fail 4') return 'fail' data = gdal.VSIFReadL(1, 5, fmain) gdal.VSIFCloseL(fmain) if data.decode('ASCII') != '67890': print(data) return 'fail' gdal.Unlink("/vsimem/test2.zip") return 'success'
def ogr_rfc41_4(): ds = ogr.GetDriverByName('memory').CreateDataSource('') if ds.TestCapability(ogr.ODsCCreateGeomFieldAfterCreateLayer) == 0: gdaltest.post_reason('fail') return 'fail' sr = osr.SpatialReference() lyr = ds.CreateLayer('test', geom_type=ogr.wkbPoint, srs=sr) if lyr.TestCapability(ogr.OLCCreateGeomField) == 0: gdaltest.post_reason('fail') return 'fail' if lyr.GetSpatialRef().IsSame(sr) == 0: gdaltest.post_reason('fail') return 'fail' if lyr.GetLayerDefn().GetGeomFieldDefn(0).GetSpatialRef().IsSame(sr) == 0: gdaltest.post_reason('fail') return 'fail' lyr.GetLayerDefn().GetGeomFieldDefn(0).SetName('a_name') feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetGeometry(ogr.CreateGeometryFromWkt('POINT (1 2)')) lyr.CreateFeature(feat) lyr.ResetReading() feat = lyr.GetNextFeature() geom = feat.GetGeometryRef() if geom.GetSpatialReference().IsSame(sr) == 0: gdaltest.post_reason('fail') return 'fail' feat = None lyr.CreateGeomField(ogr.GeomFieldDefn('another_geom_field', ogr.wkbPolygon)) lyr.ResetReading() feat = lyr.GetNextFeature() feat.SetGeomField( 1, ogr.CreateGeometryFromWkt('POLYGON ((10 10,10 11,11 11,11 10,10 10))')) lyr.SetFeature(feat) lyr.ResetReading() feat = lyr.GetNextFeature() geom = feat.GetGeomFieldRef(0) if geom.ExportToWkt() != 'POINT (1 2)': gdaltest.post_reason('fail') return 'fail' geom = feat.GetGeomFieldRef('another_geom_field') if geom.ExportToWkt() != 'POLYGON ((10 10,10 11,11 11,11 10,10 10))': gdaltest.post_reason('fail') return 'fail' # Test GetExtent() got_extent = lyr.GetExtent(geom_field=1) if got_extent != (10.0, 11.0, 10.0, 11.0): gdaltest.post_reason('fail') return 'fail' # Test invalid geometry field index gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') got_extent = lyr.GetExtent(geom_field=2) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' # Test SetSpatialFilter() lyr.SetSpatialFilter( 1, ogr.CreateGeometryFromWkt( 'POLYGON ((-10 10,-10 11,-11 11,-11 10,-10 10))')) lyr.ResetReading() feat = lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' lyr.SetSpatialFilter( 1, ogr.CreateGeometryFromWkt('POLYGON ((10 10,10 11,11 11,11 10,10 10))')) lyr.ResetReading() feat = lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' lyr.SetSpatialFilterRect(1, 10, 10, 11, 11) lyr.ResetReading() feat = lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' # Test invalid spatial filter index gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') lyr.SetSpatialFilterRect(2, 0, 0, 0, 0) gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' lyr.SetSpatialFilter(None) another_lyr = ds.CopyLayer(lyr, 'dup_test') dup_feat = another_lyr.GetNextFeature() geom = dup_feat.GetGeomFieldRef('a_name') if geom.ExportToWkt() != 'POINT (1 2)': gdaltest.post_reason('fail') return 'fail' geom = dup_feat.GetGeomFieldRef('another_geom_field') if geom.ExportToWkt() != 'POLYGON ((10 10,10 11,11 11,11 10,10 10))': gdaltest.post_reason('fail') return 'fail' return 'success'
def vsizip_1(): # We can keep the handle open during all the ZIP writing hZIP = gdal.VSIFOpenL("/vsizip/vsimem/test.zip", "wb") if hZIP is None: gdaltest.post_reason('fail 1') return 'fail' # One way to create a directory f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir2/", "wb") if f is None: gdaltest.post_reason('fail 2') return 'fail' gdal.VSIFCloseL(f) # A more natural one gdal.Mkdir("/vsizip/vsimem/test.zip/subdir1", 0) # Create 1st file f2 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "wb") if f2 is None: gdaltest.post_reason('fail 3') return 'fail' gdal.VSIFWriteL("abcd", 1, 4, f2) gdal.VSIFCloseL(f2) # Test that we cannot read a zip file being written gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "rb") gdal.PopErrorHandler() if gdal.GetLastErrorMsg() != 'Cannot read a zip file being written': gdaltest.post_reason('expected error') print(gdal.GetLastErrorMsg()) return 'fail' if f is not None: gdaltest.post_reason('should not have been successful 1') return 'fail' # Create 2nd file f3 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/efghi", "wb") if f3 is None: gdaltest.post_reason('fail 4') return 'fail' gdal.VSIFWriteL("efghi", 1, 5, f3) # Try creating a 3d file gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') f4 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/that_wont_work", "wb") gdal.PopErrorHandler() if gdal.GetLastErrorMsg() != 'Cannot create that_wont_work while another file is being written in the .zip': gdaltest.post_reason('expected error') print(gdal.GetLastErrorMsg()) return 'fail' if f4 is not None: gdaltest.post_reason('should not have been successful 2') return 'fail' gdal.VSIFCloseL(f3) # Now we can close the main handle gdal.VSIFCloseL(hZIP) f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "rb") if f is None: gdaltest.post_reason('fail 5') return 'fail' data = gdal.VSIFReadL(1, 4, f) gdal.VSIFCloseL(f) gdal.Unlink("/vsimem/test.zip") if data.decode('ASCII') != 'abcd': print(data) return 'fail' return 'success'
def test_transformer_12(): ds = gdal.Open(""" <VRTDataset rasterXSize="20" rasterYSize="20"> <GCPList Projection="PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","26711"]]"> <GCP Id="" Pixel="0" Line="0" X="0" Y="0"/> <GCP Id="" Pixel="20" Line="0" X="20" Y="0"/> <GCP Id="" Pixel="0" Line="20" X="0" Y="20"/> <GCP Id="" Pixel="20" Line="20" X="20" Y="20"/> <GCP Id="" Pixel="0" Line="0" X="0" Y="0"/> <!-- duplicate entry --> </GCPList> <VRTRasterBand dataType="Byte" band="1"> <ColorInterp>Gray</ColorInterp> <SimpleSource> <SourceFilename relativeToVRT="1">data/byte.tif</SourceFilename> </SimpleSource> </VRTRasterBand> </VRTDataset>""") tr = gdal.Transformer(ds, None, ['METHOD=GCP_TPS']) assert tr is not None ds = gdal.Open(""" <VRTDataset rasterXSize="20" rasterYSize="20"> <GCPList Projection="PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","26711"]]"> <GCP Id="" Pixel="0" Line="0" X="0" Y="0"/> <GCP Id="" Pixel="20" Line="0" X="20" Y="0"/> <GCP Id="" Pixel="0" Line="20" X="0" Y="20"/> <GCP Id="" Pixel="20" Line="20" X="20" Y="20"/> <GCP Id="" Pixel="0" Line="0" X="10" Y="10"/> <!-- same pixel,line --> </GCPList> <VRTRasterBand dataType="Byte" band="1"> <ColorInterp>Gray</ColorInterp> <SimpleSource> <SourceFilename relativeToVRT="1">data/byte.tif</SourceFilename> </SimpleSource> </VRTRasterBand> </VRTDataset>""") gdal.ErrorReset() with gdaltest.error_handler(): tr = gdal.Transformer(ds, None, ['METHOD=GCP_TPS']) assert gdal.GetLastErrorMsg() != '' ds = gdal.Open(""" <VRTDataset rasterXSize="20" rasterYSize="20"> <GCPList Projection="PROJCS["NAD27 / UTM zone 11N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","26711"]]"> <GCP Id="" Pixel="0" Line="0" X="0" Y="0"/> <GCP Id="" Pixel="20" Line="0" X="20" Y="0"/> <GCP Id="" Pixel="0" Line="20" X="0" Y="20"/> <GCP Id="" Pixel="20" Line="20" X="20" Y="20"/> <GCP Id="" Pixel="10" Line="10" X="20" Y="20"/> <!-- same X,Y --> </GCPList> <VRTRasterBand dataType="Byte" band="1"> <ColorInterp>Gray</ColorInterp> <SimpleSource> <SourceFilename relativeToVRT="1">data/byte.tif</SourceFilename> </SimpleSource> </VRTRasterBand> </VRTDataset>""") gdal.ErrorReset() with gdaltest.error_handler(): tr = gdal.Transformer(ds, None, ['METHOD=GCP_TPS']) assert gdal.GetLastErrorMsg() != ''
def vsis3_4(): if gdaltest.webserver_port == 0: return 'skip' with gdaltest.error_handler(): f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3', 'wb') if f is not None: gdaltest.post_reason('fail') return 'fail' if gdal.VSIStatL('/vsis3/s3_fake_bucket3/empty_file.bin').size != 3: gdaltest.post_reason('fail') return 'fail' # Empty file f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' gdal.ErrorReset() gdal.VSIFCloseL(f) if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' if gdal.VSIStatL('/vsis3/s3_fake_bucket3/empty_file.bin').size != 0: gdaltest.post_reason('fail') return 'fail' # Invalid seek f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' with gdaltest.error_handler(): ret = gdal.VSIFSeekL(f, 1, 0) if ret == 0: gdaltest.post_reason('fail') return 'fail' gdal.VSIFCloseL(f) # Invalid read f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' with gdaltest.error_handler(): ret = gdal.VSIFReadL(1, 1, f) if len(ret) != 0: gdaltest.post_reason('fail') return 'fail' gdal.VSIFCloseL(f) # Error case f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file_error.bin', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' gdal.ErrorReset() with gdaltest.error_handler(): gdal.VSIFCloseL(f) if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' # Nominal case f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/another_file.bin', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' if gdal.VSIFSeekL(f, gdal.VSIFTellL(f), 0) != 0: gdaltest.post_reason('fail') return 'fail' if gdal.VSIFSeekL(f, 0, 1) != 0: gdaltest.post_reason('fail') return 'fail' if gdal.VSIFSeekL(f, 0, 2) != 0: gdaltest.post_reason('fail') return 'fail' if gdal.VSIFWriteL('foo', 1, 3, f) != 3: gdaltest.post_reason('fail') return 'fail' if gdal.VSIFWriteL('bar', 1, 3, f) != 3: gdaltest.post_reason('fail') return 'fail' gdal.ErrorReset() gdal.VSIFCloseL(f) if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' # Redirect case f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/redirect', 'wb') if f is None: gdaltest.post_reason('fail') return 'fail' if gdal.VSIFWriteL('foobar', 1, 6, f) != 6: gdaltest.post_reason('fail') return 'fail' gdal.ErrorReset() gdal.VSIFCloseL(f) if gdal.GetLastErrorMsg() != '': gdaltest.post_reason('fail') return 'fail' return 'success'