def vsifile_8(): # octal 0666 = decimal 438 gdal.Mkdir('/vsimem/mydir', 438) fp = gdal.VSIFOpenL('/vsimem/mydir/a', 'wb') gdal.VSIFCloseL(fp) gdal.Rename('/vsimem/mydir', '/vsimem/newdir'.encode('ascii').decode('ascii')) if gdal.VSIStatL('/vsimem/newdir') is None: gdaltest.post_reason('fail') return 'fail' if gdal.VSIStatL('/vsimem/newdir/a') is None: gdaltest.post_reason('fail') return 'fail' gdal.Unlink('/vsimem/newdir/a') gdal.Rmdir('/vsimem/newdir') return 'success'
def test_rfc30_2(): if version_info >= (3, 0, 0): filename = 'tmp/yy\u4E2D\u6587.\u4E2D\u6587' else: exec("filename = u'tmp/yy\u4E2D\u6587.\u4E2D\u6587'") # The typemaps should accept Unicode strings directly # filename = filename.encode( 'utf-8' ) fd = gdal.VSIFOpenL(filename, 'w') assert fd is not None, 'failed to create utf-8 named file.' gdal.VSIFWriteL('abc', 3, 1, fd) gdal.VSIFCloseL(fd) # rename if version_info >= (3, 0, 0): new_filename = 'tmp/yy\u4E2D\u6587.\u4E2D\u6587' filename_for_rename = filename else: exec("new_filename = u'tmp/yy\u4E2D\u6587.\u4E2D\u6587'") filename_for_rename = filename.encode('utf-8') # FIXME ? rename should perhaps accept unicode strings new_filename = new_filename.encode('utf-8') # FIXME ? rename should perhaps accept unicode strings assert gdal.Rename(filename_for_rename, new_filename) == 0, 'utf-8 rename failed.' fd = gdal.VSIFOpenL(new_filename, 'r') assert fd is not None, 'reopen failed with utf8' data = gdal.VSIFReadL(3, 1, fd) gdal.VSIFCloseL(fd) if version_info >= (3, 0, 0): ok = eval("data == b'abc'") else: ok = data == 'abc' assert ok, 'did not get expected data.' gdal.Unlink(new_filename) fd = gdal.VSIFOpenL(new_filename, 'r') assert fd is None, 'did unlink fail on utf8 filename?'
def test_vsiaz_fake_rename(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add('HEAD', '/azure/blob/myaccount/test/source.txt', 200, {'Content-Length': '3'}) handler.add('HEAD', '/azure/blob/myaccount/test/target.txt', 404) handler.add( 'GET', '/azure/blob/myaccount/test?comp=list&delimiter=%2F&maxresults=1&prefix=target.txt%2F&restype=container', 200) def method(request): if request.headers['Content-Length'] != '0': sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) return expected = 'http://127.0.0.1:%d/azure/blob/myaccount/test/source.txt' % gdaltest.webserver_port if request.headers['x-ms-copy-source'] != expected: sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers)) request.send_response(400) return request.send_response(202) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test/target.txt', custom_method=method) handler.add('DELETE', '/azure/blob/myaccount/test/source.txt', 202) with webserver.install_http_handler(handler): assert gdal.Rename('/vsiaz/test/source.txt', '/vsiaz/test/target.txt') == 0
def test_vsiadls_fake_rename(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() handler = webserver.SequentialHandler() handler.add( 'HEAD', '/azure/blob/myaccount/test/source.txt', 200, { 'Content-Length': '3', 'x-ms-permissions': 'rwxrwxrwx', 'x-ms-resource-type': 'file' }) handler.add('PUT', '/azure/blob/myaccount/test/target.txt', 201, expected_headers={'x-ms-rename-source': '/test/source.txt'}) handler.add('HEAD', '/azure/blob/myaccount/test/source.txt', 404) with webserver.install_http_handler(handler): assert gdal.Rename('/vsiadls/test/source.txt', '/vsiadls/test/target.txt') == 0 assert gdal.VSIStatL('/vsiadls/test/source.txt') is None
def vsizip_1(): # We can keep the handle open during all the ZIP writing hZIP = gdal.VSIFOpenL("/vsizip/vsimem/test.zip", "wb") if hZIP is None: gdaltest.post_reason('fail 1') return 'fail' # One way to create a directory f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir2/", "wb") if f is None: gdaltest.post_reason('fail 2') return 'fail' gdal.VSIFCloseL(f) # A more natural one gdal.Mkdir("/vsizip/vsimem/test.zip/subdir1", 0) # Create 1st file f2 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "wb") if f2 is None: gdaltest.post_reason('fail 3') return 'fail' gdal.VSIFWriteL("abcd", 1, 4, f2) gdal.VSIFCloseL(f2) # Test that we cannot read a zip file being written gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "rb") gdal.PopErrorHandler() if gdal.GetLastErrorMsg() != 'Cannot read a zip file being written': gdaltest.post_reason('expected error') print(gdal.GetLastErrorMsg()) return 'fail' if f is not None: gdaltest.post_reason('should not have been successful 1') return 'fail' # Create 2nd file f3 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/efghi", "wb") if f3 is None: gdaltest.post_reason('fail 4') return 'fail' gdal.VSIFWriteL("efghi", 1, 5, f3) # Try creating a 3d file gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') f4 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/that_wont_work", "wb") gdal.PopErrorHandler() if gdal.GetLastErrorMsg( ) != 'Cannot create that_wont_work while another file is being written in the .zip': gdaltest.post_reason('expected error') print(gdal.GetLastErrorMsg()) return 'fail' if f4 is not None: gdaltest.post_reason('should not have been successful 2') return 'fail' gdal.VSIFCloseL(f3) # Now we can close the main handle gdal.VSIFCloseL(hZIP) f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "rb") if f is None: gdaltest.post_reason('fail 5') return 'fail' data = gdal.VSIFReadL(1, 4, f) gdal.VSIFCloseL(f) if data.decode('ASCII') != 'abcd': gdaltest.post_reason('fail') print(data) return 'fail' # Test alternate uri syntax gdal.Rename("/vsimem/test.zip", "/vsimem/test.xxx") f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}/subdir3/abcd", "rb") if f is None: gdaltest.post_reason('fail') return 'fail' data = gdal.VSIFReadL(1, 4, f) gdal.VSIFCloseL(f) if data.decode('ASCII') != 'abcd': gdaltest.post_reason('fail') print(data) return 'fail' # With a trailing slash f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}/subdir3/abcd/", "rb") if f is None: gdaltest.post_reason('fail') return 'fail' gdal.VSIFCloseL(f) # Test ReadDir() if len(gdal.ReadDir("/vsizip/{/vsimem/test.xxx}")) != 3: gdaltest.post_reason('fail') print(gdal.ReadDir("/vsizip/{/vsimem/test.xxx}")) return 'fail' # Unbalanced curls f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx", "rb") if f is not None: gdaltest.post_reason('fail') return 'fail' # Non existing mainfile f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}/bla", "rb") if f is not None: gdaltest.post_reason('fail') return 'fail' # Non existing subfile f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.zzz}/bla", "rb") if f is not None: gdaltest.post_reason('fail') return 'fail' # Wrong syntax f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}.aux.xml", "rb") if f is not None: gdaltest.post_reason('fail') return 'fail' # Test nested { { } } hZIP = gdal.VSIFOpenL("/vsizip/{/vsimem/zipinzip.yyy}", "wb") if hZIP is None: gdaltest.post_reason('fail 1') return 'fail' f = gdal.VSIFOpenL("/vsizip/{/vsimem/zipinzip.yyy}/test.xxx", "wb") f_src = gdal.VSIFOpenL("/vsimem/test.xxx", "rb") data = gdal.VSIFReadL(1, 10000, f_src) gdal.VSIFCloseL(f_src) gdal.VSIFWriteL(data, 1, len(data), f) gdal.VSIFCloseL(f) gdal.VSIFCloseL(hZIP) f = gdal.VSIFOpenL( "/vsizip/{/vsizip/{/vsimem/zipinzip.yyy}/test.xxx}/subdir3/abcd/", "rb") if f is None: gdaltest.post_reason('fail') return 'fail' data = gdal.VSIFReadL(1, 4, f) gdal.VSIFCloseL(f) if data.decode('ASCII') != 'abcd': gdaltest.post_reason('fail') print(data) return 'fail' gdal.Unlink("/vsimem/test.xxx") gdal.Unlink("/vsimem/zipinzip.yyy") return 'success'
def test_vsicrypt_3(): if not gdaltest.has_vsicrypt: pytest.skip() for options in [ 'sector_size=16', 'alg=AES', 'alg=DES_EDE2', 'alg=DES_EDE3', 'alg=SKIPJACK', 'alg=invalid', 'mode=CBC', 'mode=CFB', 'mode=OFB', 'mode=CTR', 'mode=CBC_CTS', 'mode=invalid', 'freetext=my_free_text', 'add_key_check=yes' ]: gdal.Unlink('/vsimem/file.bin') if options == 'alg=invalid' or options == 'mode=invalid': with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PRODDONT_USE_IN_PROD,%s,file=/vsimem/file.bin' % options, 'wb') else: fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PRODDONT_USE_IN_PROD,%s,file=/vsimem/file.bin' % options, 'wb') assert fp is not None, options gdal.VSIFWriteL('hello', 1, 5, fp) gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PRODDONT_USE_IN_PROD,file=/vsimem/file.bin', 'r') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) assert content == 'hello', options # Some of those algs might be missing for options in [ 'alg=Blowfish', 'alg=Camellia', 'alg=CAST256', 'alg=MARS', 'alg=IDEA', 'alg=RC5', 'alg=RC6', 'alg=Serpent', 'alg=SHACAL2', 'alg=Twofish', 'alg=XTEA' ]: gdal.Unlink('/vsimem/file.bin') with gdaltest.error_handler(): fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,%s,file=/vsimem/file.bin' % options, 'wb') if fp is not None: gdal.VSIFWriteL('hello', 1, 5, fp) gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL( '/vsicrypt/key=DONT_USE_IN_PROD,file=/vsimem/file.bin', 'rb') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) assert content == 'hello', options # Test key generation # Do NOT set VSICRYPT_CRYPTO_RANDOM=NO in production. This is just to speed up tests ! gdal.SetConfigOption("VSICRYPT_CRYPTO_RANDOM", "NO") fp = gdal.VSIFOpenL( '/vsicrypt/key=GENERATE_IT,add_key_check=yes,file=/vsimem/file.bin', 'wb') gdal.SetConfigOption("VSICRYPT_CRYPTO_RANDOM", None) # Get the generated random key key_b64 = gdal.GetConfigOption('VSICRYPT_KEY_B64') assert key_b64 is not None gdal.VSIFWriteL('hello', 1, 5, fp) gdal.VSIFCloseL(fp) fp = gdal.VSIFOpenL('/vsicrypt//vsimem/file.bin', 'rb') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) assert content == 'hello', options gdal.SetConfigOption('VSICRYPT_KEY_B64', None) fp = gdal.VSIFOpenL('/vsicrypt/key_b64=%s,file=/vsimem/file.bin' % key_b64, 'rb') content = gdal.VSIFReadL(1, 5, fp).decode('latin1') gdal.VSIFCloseL(fp) assert content == 'hello', options with gdaltest.error_handler(): statRes = gdal.VSIStatL('/vsicrypt//vsimem/file.bin') assert statRes is None ret = gdal.Rename('/vsicrypt//vsimem/file.bin', '/vsicrypt//vsimem/subdir_crypt/file.bin') assert ret == 0 ret = gdal.Rename('/vsicrypt//vsimem/subdir_crypt/file.bin', '/vsimem/subdir_crypt/file2.bin') assert ret == 0 dir_content = gdal.ReadDir('/vsicrypt//vsimem/subdir_crypt') assert dir_content == ['file2.bin'] gdal.Unlink('/vsimem/subdir_crypt/file2.bin')
def test_vsiaz_extra_1(): if not gdaltest.built_against_curl(): pytest.skip() az_resource = gdal.GetConfigOption('AZ_RESOURCE') if az_resource is None: pytest.skip('Missing AZ_RESOURCE') if '/' not in az_resource: path = '/vsiaz/' + az_resource statres = gdal.VSIStatL(path) assert statres is not None and stat.S_ISDIR(statres.mode), \ ('%s is not a valid bucket' % path) readdir = gdal.ReadDir(path) assert readdir is not None, 'ReadDir() should not return empty list' for filename in readdir: if filename != '.': subpath = path + '/' + filename assert gdal.VSIStatL(subpath) is not None, \ ('Stat(%s) should not return an error' % subpath) unique_id = 'vsiaz_test' subpath = path + '/' + unique_id ret = gdal.Mkdir(subpath, 0) assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath) readdir = gdal.ReadDir(path) assert unique_id in readdir, \ ('ReadDir(%s) should contain %s' % (path, unique_id)) ret = gdal.Mkdir(subpath, 0) assert ret != 0, ('Mkdir(%s) repeated should return an error' % subpath) ret = gdal.Rmdir(subpath) assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath) readdir = gdal.ReadDir(path) assert unique_id not in readdir, \ ('ReadDir(%s) should not contain %s' % (path, unique_id)) ret = gdal.Mkdir(subpath, 0) assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath) f = gdal.VSIFOpenL(subpath + '/test.txt', 'wb') assert f is not None gdal.VSIFWriteL('hello', 1, 5, f) gdal.VSIFCloseL(f) ret = gdal.Rmdir(subpath) assert ret != 0, \ ('Rmdir(%s) on non empty directory should return an error' % subpath) f = gdal.VSIFOpenL(subpath + '/test.txt', 'rb') assert f is not None data = gdal.VSIFReadL(1, 5, f).decode('utf-8') assert data == 'hello' gdal.VSIFCloseL(f) md = gdal.GetFileMetadata(subpath + '/test.txt', 'HEADERS') assert 'x-ms-blob-type' in md md = gdal.GetFileMetadata(subpath + '/test.txt', 'METADATA') assert 'ETag' in md assert 'x-ms-blob-type' not in md md = gdal.GetFileMetadata(subpath + '/test.txt', 'TAGS') assert md == {} # Change properties assert gdal.SetFileMetadata(subpath + '/test.txt', {'x-ms-blob-content-type': 'foo'}, 'PROPERTIES') md = gdal.GetFileMetadata(subpath + '/test.txt', 'HEADERS') assert md['Content-Type'] == 'foo' # Change metadata assert gdal.SetFileMetadata(subpath + '/test.txt', {'x-ms-meta-FOO': 'BAR'}, 'METADATA') md = gdal.GetFileMetadata(subpath + '/test.txt', 'METADATA') assert md['x-ms-meta-FOO'] == 'BAR' # Change tags assert gdal.SetFileMetadata(subpath + '/test.txt', {'BAR': 'BAZ'}, 'TAGS') md = gdal.GetFileMetadata(subpath + '/test.txt', 'TAGS') assert md['BAR'] == 'BAZ' assert gdal.Rename(subpath + '/test.txt', subpath + '/test2.txt') == 0 f = gdal.VSIFOpenL(subpath + '/test2.txt', 'rb') assert f is not None data = gdal.VSIFReadL(1, 5, f).decode('utf-8') assert data == 'hello' gdal.VSIFCloseL(f) ret = gdal.Unlink(subpath + '/test2.txt') assert ret >= 0, \ ('Unlink(%s) should not return an error' % (subpath + '/test2.txt')) ret = gdal.Rmdir(subpath) assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath) return f = open_for_read('/vsiaz/' + az_resource) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1 # Same with /vsiaz_streaming/ f = open_for_read('/vsiaz_streaming/' + az_resource) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1 if False: # pylint: disable=using-constant-test # we actually try to read at read() time and bSetError = false # Invalid bucket : "The specified bucket does not exist" gdal.ErrorReset() f = open_for_read('/vsiaz/not_existing_bucket/foo') with gdaltest.error_handler(): gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert gdal.VSIGetLastErrorMsg() != '' # Invalid resource gdal.ErrorReset() f = open_for_read('/vsiaz_streaming/' + az_resource + '/invalid_resource.baz') assert f is None, gdal.VSIGetLastErrorMsg() # Test GetSignedURL() signed_url = gdal.GetSignedURL('/vsiaz/' + az_resource) f = open_for_read('/vsicurl_streaming/' + signed_url) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1
def test_vsizip_1(): # We can keep the handle open during all the ZIP writing hZIP = gdal.VSIFOpenL("/vsizip/vsimem/test.zip", "wb") assert hZIP is not None, 'fail 1' # One way to create a directory f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir2/", "wb") assert f is not None, 'fail 2' gdal.VSIFCloseL(f) # A more natural one gdal.Mkdir("/vsizip/vsimem/test.zip/subdir1", 0) # Create 1st file f2 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "wb") assert f2 is not None, 'fail 3' gdal.VSIFWriteL("abcd", 1, 4, f2) gdal.VSIFCloseL(f2) # Test that we cannot read a zip file being written gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "rb") gdal.PopErrorHandler() assert gdal.GetLastErrorMsg() == 'Cannot read a zip file being written', \ 'expected error' assert f is None, 'should not have been successful 1' # Create 2nd file f3 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/efghi", "wb") assert f3 is not None, 'fail 4' gdal.VSIFWriteL("efghi", 1, 5, f3) # Try creating a 3d file gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') f4 = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/that_wont_work", "wb") gdal.PopErrorHandler() assert gdal.GetLastErrorMsg() == 'Cannot create that_wont_work while another file is being written in the .zip', \ 'expected error' assert f4 is None, 'should not have been successful 2' gdal.VSIFCloseL(f3) # Now we can close the main handle gdal.VSIFCloseL(hZIP) # ERROR 6: Support only 1 file in archive file /vsimem/test.zip when no explicit in-archive filename is specified gdal.ErrorReset() with gdaltest.error_handler(): f = gdal.VSIFOpenL('/vsizip/vsimem/test.zip', 'rb') if f is not None: gdal.VSIFCloseL(f) assert gdal.GetLastErrorMsg() != '', 'expected error' f = gdal.VSIFOpenL("/vsizip/vsimem/test.zip/subdir3/abcd", "rb") assert f is not None, 'fail 5' data = gdal.VSIFReadL(1, 4, f) gdal.VSIFCloseL(f) assert data.decode('ASCII') == 'abcd' # Test alternate uri syntax gdal.Rename("/vsimem/test.zip", "/vsimem/test.xxx") f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}/subdir3/abcd", "rb") assert f is not None data = gdal.VSIFReadL(1, 4, f) gdal.VSIFCloseL(f) assert data.decode('ASCII') == 'abcd' # With a trailing slash f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}/subdir3/abcd/", "rb") assert f is not None gdal.VSIFCloseL(f) # Test ReadDir() assert len(gdal.ReadDir("/vsizip/{/vsimem/test.xxx}")) == 3 # Unbalanced curls f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx", "rb") assert f is None # Non existing mainfile f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}/bla", "rb") assert f is None # Non existing subfile f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.zzz}/bla", "rb") assert f is None # Wrong syntax f = gdal.VSIFOpenL("/vsizip/{/vsimem/test.xxx}.aux.xml", "rb") assert f is None # Test nested { { } } hZIP = gdal.VSIFOpenL("/vsizip/{/vsimem/zipinzip.yyy}", "wb") assert hZIP is not None, 'fail 1' f = gdal.VSIFOpenL("/vsizip/{/vsimem/zipinzip.yyy}/test.xxx", "wb") f_src = gdal.VSIFOpenL("/vsimem/test.xxx", "rb") data = gdal.VSIFReadL(1, 10000, f_src) gdal.VSIFCloseL(f_src) gdal.VSIFWriteL(data, 1, len(data), f) gdal.VSIFCloseL(f) gdal.VSIFCloseL(hZIP) f = gdal.VSIFOpenL("/vsizip/{/vsizip/{/vsimem/zipinzip.yyy}/test.xxx}/subdir3/abcd/", "rb") assert f is not None data = gdal.VSIFReadL(1, 4, f) gdal.VSIFCloseL(f) assert data.decode('ASCII') == 'abcd' gdal.Unlink("/vsimem/test.xxx") gdal.Unlink("/vsimem/zipinzip.yyy") # Test VSIStatL on a non existing file assert gdal.VSIStatL('/vsizip//vsimem/foo.zip') is None # Test ReadDir on a non existing file assert gdal.ReadDir('/vsizip//vsimem/foo.zip') is None
def to_gdf(mask_poly, layer="waterdeel", bronhouders=None, end_registration='now', log_level="INFO", service='default'): logging.basicConfig(level=os.environ.get("LOGLEVEL", log_level)) if end_registration == 'now': end_registration = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S") if service == 'default': api_url = 'https://download.pdok.io' url = f'{api_url}/lv/bgt/api/v1/full/custom' elif service == 'website': api_url = 'https://api.pdok.nl' url = f'{api_url}/lv/bgt/download/v1_0/full/custom' body = {"format": "gmllight", "featuretypes": [layer]} if mask_poly.type == 'MultiPolygon': xmin, ymin, xmax, ymax = mask_poly.bounds body["geofilter"] = "POLYGON(({}))".format( ",".join(['{} {}'.format(coords[0], coords[1]) for coords in [[xmin, ymax], [xmax, ymax], [xmax, ymin], [xmin, ymin], [xmin, ymax]]])) else: body["geofilter"] = "POLYGON(({}))".format( ",".join(['{} {}'.format(coords[0], coords[1]) for coords in mask_poly.exterior.coords]) ) headers = {'content-type': 'application/json'} response = requests.post(url, headers=headers, data=json.dumps(body)) # check api-status, if completed, download if response.status_code in range(200, 300): running = True url = '{}{}'.format(api_url, response.json()[ "_links"]["status"]["href"]) while running: response = requests.get(url) if response.status_code in range(200, 300): logging.info('{}% ({})'.format(response.json()[ "progress"], response.json()['status'])) status = response.json()['status'] if status == "COMPLETED": running = False else: time.sleep(2) else: logging.error(response.text) logging.error(url) running = False else: logging.error(response.text) logging.error(url) logging.info('converting to gdf') response = requests.get('{}{}'.format( api_url, response.json()["_links"]["download"]["href"])) vsif = fiona.ogrext.buffer_to_virtual_file(bytes(response.content)) vsiz = vsif + '.zip' gdal.Rename(vsif, vsiz) fc = fiona.Collection(vsiz, vsi='zip') gdf = gpd.GeoDataFrame.from_features( [feature for feature in fc], crs='epsg:28992') # select polygons after end_registration gdf = gdf[(gdf['eindRegistratie'] != gdf['eindRegistratie']) | (gdf['eindRegistratie'] > end_registration)] # select polygons of specific bronhouders if not bronhouders == None: gdf = gdf[gdf['bronhouder'].isin(bronhouders)] # select polygons within polygon mask gdf = gdf[gdf.intersects(mask_poly)] # re-order columns columns = [col for col in gdf.columns if not col == 'geometry'] + ['geometry'] gdf = gdf[columns] return gdf
def get_features(self, layer, poly=None, object_filter='', object_id=None): ''' to download features from a layer for a shapely polygon Parameters: layer: integer layer number poly: shapely Polygon object used as a boundary ''' if object_id == None: properties = requests.get( ('{url}/{layer}/?f=pjson').format(url=self.url, layer=layer)).json() if "uniqueIdField" in list(properties.keys()): object_id = properties["uniqueIdField"]["name"] else: if "fields" in list(properties.keys()): field = [field['name'] for field in properties["fields"] if field['name'].lower() == "objectid"] if len(field) == 1: object_id = field[0] if object_id == None: logging.error( 'Processing data from the following url failed: {url}/{layer}/?f=pjson'.format(url=self.url, layer=layer)) logging.error(('ArcREST Layer has no Unique ID Field, script defaulted to {object_id}.' 'Please specify a correct object_id for this layer & adminstration').format(object_id=object_id)) sys.exit() xmin, ymin, xmax, ymax = poly.bounds try: if not object_filter == '': object_filter = ' and {}'.format(object_filter) url = ('{url}/{layer}/query?' 'where={object_id}>=0{object_filter}' '&geometry={xmin},{ymin},{xmax},{ymax}' '&geometryType=esriGeometryEnvelope' '&f=json' '&inSR={epsg}' '&returnIdsOnly=true').format(url=self.url, layer=layer, object_id=object_id, xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax, epsg=self.epsg, object_filter=object_filter) response = requests.get(url) if response.status_code == 200: if 'objectIds' in list(response.json().keys()): object_ids = response.json()['objectIds'] else: object_ids = response.json()['properties']['objectIds'] if (object_ids == None) | (object_ids == []): return gpd.GeoDataFrame() else: object_ids.sort() downloads = round(len(object_ids) / self.maxRecordCount + 0.5) gdf_list = [] for download in range(downloads): min_object = download * self.maxRecordCount max_object = min( min_object + self.maxRecordCount - 1, len(object_ids) - 1) url = ('{url}/{layer}/query?' 'where={min_objects}<={object_id} and {max_objects}>={object_id}{object_filter}' '&outFields=*' '&geometry={xmin},{ymin},{xmax},{ymax}' '&geometryType=esriGeometryEnvelope' '&inSR={epsg}' '&outSR={epsg}&f={output_format}').format(url=self.url, layer=layer, object_id=object_id, min_objects=object_ids[min_object], max_objects=object_ids[max_object], object_filter=object_filter, xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax, epsg=self.epsg, output_format=self.format) response = requests.post(url) gdf = gpd.GeoDataFrame() if len(response.json()['features']) > 0: if self.format == 'json': logging.warning( 'reading ESRI-json format (GeoJSON is preferred)') vsif = fiona.ogrext.buffer_to_virtual_file( bytes(response.content)) vsiz = vsif + '.json' gdal.Rename(vsif, vsiz) fc = fiona.Collection(vsiz) gdf = gpd.GeoDataFrame.from_features( [feature for feature in fc], crs=self.crs) columns = [ col for col in gdf.columns if not col == 'geometry'] + ['geometry'] gdf = gdf[columns] else: features = response.json()['features'] gdf = gpd.GeoDataFrame.from_features(features) else: logging.warning( 'no features returned for url: {}'.format(url)) if len(gdf) > 0: gdf.crs = self.crs gdf = gdf[gdf.intersects(poly)] gdf_list += [gdf] if not gdf.empty: if len(gdf_list) > 1: gdf = gpd.GeoDataFrame( pd.concat(gdf_list, ignore_index=True)) else: gdf = gdf_list[0] layer_name = [lay['name'] for lay in self.layers if lay['id'] == layer][0] gdf['layer_name'] = layer_name return(gdf) except Exception as e: logging.error( 'Processing data from the following url failed: {} with error {}'.format(url, e)) raise e
def get_bgt(extent, layer="waterdeel", cut_by_extent=True): """ Get geometries within an extent or polygon from the Basis Registratie Grootschalige Topografie (BGT) Parameters ---------- extent : list or tuple of length 4 or shapely Polygon The extent (xmin, xmax, ymin, ymax) or polygon for which shapes are requested. layer : string, optional The layer for which shapes are requested. The default is "waterdeel". cut_by_extent : bool, optional Only return the intersection with the extent if True. The default is True Returns ------- gdf : GeoPandas GeoDataFrame A GeoDataFrame containing all geometries and properties. """ api_url = 'https://api.pdok.nl' url = '{}/lv/bgt/download/v1_0/full/custom'.format(api_url) body = {"format": "citygml", "featuretypes": [layer]} if isinstance(extent, Polygon): polygon = extent else: polygon = extent2polygon(extent) body['geofilter'] = polygon.to_wkt() headers = {'content-type': 'application/json'} response = requests.post(url, headers=headers, data=json.dumps(body)) # check api-status, if completed, download if response.status_code in range(200, 300): running = True href = response.json()["_links"]["status"]["href"] url = '{}{}'.format(api_url, href) while running: response = requests.get(url) if response.status_code in range(200, 300): status = response.json()['status'] if status == "COMPLETED": running = False else: time.sleep(2) else: running = False else: msg = 'Download of bgt-data failed: {}'.format(response.text) raise (Exception(msg)) href = response.json()["_links"]["download"]["href"] response = requests.get('{}{}'.format(api_url, href)) vsif = fiona.ogrext.buffer_to_virtual_file(bytes(response.content)) vsiz = vsif + '.zip' gdal.Rename(vsif, vsiz) fc = fiona.Collection(vsiz, vsi='zip') gdf = gpd.GeoDataFrame.from_features([feature for feature in fc], crs='epsg:28992') # remove double features by removing features with an eindRegistratie gdf = gdf[gdf['eindRegistratie'].isna()] # re-order columns columns = [col for col in gdf.columns if not col == 'geometry'] + ['geometry'] gdf = gdf[columns] if cut_by_extent: gdf.geometry = gdf.intersection(polygon) gdf = gdf[~gdf.is_empty] return gdf
def test_vsiadls_real_instance_tests(): adls_resource = gdal.GetConfigOption('ADLS_RESOURCE') if adls_resource is None: pytest.skip('Missing ADLS_RESOURCE') if '/' not in adls_resource: path = '/vsiadls/' + adls_resource try: statres = gdal.VSIStatL(path) assert statres is not None and stat.S_ISDIR(statres.mode), \ ('%s is not a valid bucket' % path) readdir = gdal.ReadDir(path) assert readdir is not None, 'ReadDir() should not return empty list' for filename in readdir: if filename != '.': subpath = path + '/' + filename assert gdal.VSIStatL(subpath) is not None, \ ('Stat(%s) should not return an error' % subpath) unique_id = 'vsiadls_test' subpath = path + '/' + unique_id ret = gdal.Mkdir(subpath, 0) assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath) readdir = gdal.ReadDir(path) assert unique_id in readdir, \ ('ReadDir(%s) should contain %s' % (path, unique_id)) ret = gdal.Mkdir(subpath, 0) assert ret != 0, ('Mkdir(%s) repeated should return an error' % subpath) ret = gdal.Rmdir(subpath) assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath) readdir = gdal.ReadDir(path) assert unique_id not in readdir, \ ('ReadDir(%s) should not contain %s' % (path, unique_id)) ret = gdal.Rmdir(subpath) assert ret != 0, ('Rmdir(%s) repeated should return an error' % subpath) ret = gdal.Mkdir(subpath, 0) assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath) f = gdal.VSIFOpenL(subpath + '/test.txt', 'wb') assert f is not None gdal.VSIFWriteL('hello', 1, 5, f) gdal.VSIFCloseL(f) ret = gdal.Rmdir(subpath) assert ret != 0, \ ('Rmdir(%s) on non empty directory should return an error' % subpath) f = gdal.VSIFOpenL(subpath + '/test.txt', 'rb') assert f is not None data = gdal.VSIFReadL(1, 5, f).decode('utf-8') assert data == 'hello' gdal.VSIFCloseL(f) assert gdal.VSIStatL(subpath + '/test.txt') is not None md = gdal.GetFileMetadata(subpath + '/test.txt', 'HEADERS') assert 'x-ms-properties' in md md = gdal.GetFileMetadata(subpath + '/test.txt', 'STATUS') assert 'x-ms-resource-type' in md assert 'x-ms-properties' not in md md = gdal.GetFileMetadata(subpath + '/test.txt', 'ACL') assert 'x-ms-acl' in md assert 'x-ms-permissions' in md # Change properties properties_foo_bar = 'foo=' + base64.b64encode('bar') assert gdal.SetFileMetadata( subpath + '/test.txt', {'x-ms-properties': properties_foo_bar}, 'PROPERTIES') md = gdal.GetFileMetadata(subpath + '/test.txt', 'HEADERS') assert 'x-ms-properties' in md assert md['x-ms-properties'] == properties_foo_bar # Change ACL assert gdal.SetFileMetadata(subpath + '/test.txt', {'x-ms-permissions': '0777'}, 'ACL') md = gdal.GetFileMetadata(subpath + '/test.txt', 'ACL') assert 'x-ms-permissions' in md assert md['x-ms-permissions'] == 'rwxrwxrwx' # Change ACL recursively md = gdal.GetFileMetadata(subpath, 'ACL') assert 'x-ms-acl' in md assert gdal.SetFileMetadata(subpath + '/test.txt', {'x-ms-acl': md['x-ms-acl']}, 'ACL', ['RECURSIVE=YES', 'MODE=set']) assert gdal.Rename(subpath + '/test.txt', subpath + '/test2.txt') == 0 assert gdal.VSIStatL(subpath + '/test.txt') is None assert gdal.VSIStatL(subpath + '/test2.txt') is not None f = gdal.VSIFOpenL(subpath + '/test2.txt', 'rb') assert f is not None data = gdal.VSIFReadL(1, 5, f).decode('utf-8') assert data == 'hello' gdal.VSIFCloseL(f) ret = gdal.Unlink(subpath + '/test2.txt') assert ret >= 0, \ ('Unlink(%s) should not return an error' % (subpath + '/test2.txt')) assert gdal.VSIStatL(subpath + '/test2.txt') is None assert gdal.Unlink( subpath + '/test2.txt' ) != 0, "Unlink on a deleted file should return an error" f = gdal.VSIFOpenL(subpath + '/test2.txt', 'wb') assert f is not None gdal.VSIFCloseL(f) assert gdal.VSIStatL(subpath + '/test2.txt') is not None finally: assert gdal.RmdirRecursive(subpath) == 0 return f = open_for_read('/vsiadls/' + adls_resource) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1 # Test GetSignedURL() signed_url = gdal.GetSignedURL('/vsiadls/' + adls_resource) f = open_for_read('/vsicurl_streaming/' + signed_url) assert f is not None ret = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(ret) == 1