def ogr_pgdump_2(): try: os.remove('tmp/tpoly.sql') except: pass gdal.SetConfigOption('PG_USE_COPY', 'YES') ds = ogr.GetDriverByName('PGDump').CreateDataSource( 'tmp/tpoly.sql', options=['LINEFORMAT=CRLF']) ###################################################### # Create Layer lyr = ds.CreateLayer('tpoly', geom_type=ogr.wkbPolygon, options=[ 'SCHEMA=another_schema', 'SRID=4326', 'GEOMETRY_NAME=the_geom' ]) ###################################################### # Setup Schema ogrtest.quick_create_layer_def(lyr, [('AREA', ogr.OFTReal), ('EAS_ID', ogr.OFTInteger), ('PRFEDEA', ogr.OFTString), ('SHORTNAME', ogr.OFTString, 8)]) ###################################################### # Copy in poly.shp dst_feat = ogr.Feature(feature_def=lyr.GetLayerDefn()) shp_ds = ogr.Open('data/poly.shp') shp_lyr = shp_ds.GetLayer(0) feat = shp_lyr.GetNextFeature() gdaltest.poly_feat = [] while feat is not None: gdaltest.poly_feat.append(feat) dst_feat.SetFrom(feat) lyr.CreateFeature(dst_feat) feat = shp_lyr.GetNextFeature() dst_feat.Destroy() ds.Destroy() gdal.SetConfigOption('PG_USE_COPY', 'NO') f = open('tmp/tpoly.sql') sql = f.read() f.close() if sql.find("""DROP TABLE IF EXISTS "another_schema"."tpoly" CASCADE;""") == -1 or \ sql.find("""DELETE FROM geometry_columns WHERE f_table_name = 'tpoly' AND f_table_schema = 'another_schema';""") == -1 or \ sql.find("""BEGIN;""") == -1 or \ sql.find("""CREATE TABLE "another_schema"."tpoly" ( "ogc_fid" SERIAL, CONSTRAINT "tpoly_pk" PRIMARY KEY ("ogc_fid") );""") == -1 or \ sql.find("""SELECT AddGeometryColumn('another_schema','tpoly','the_geom',4326,'POLYGON',2);""") == -1 or \ sql.find("""CREATE INDEX "tpoly_the_geom_geom_idx" ON "another_schema"."tpoly" USING GIST ("the_geom");""") == -1 or \ sql.find("""ALTER TABLE "another_schema"."tpoly" ADD COLUMN "area" FLOAT8;""") == -1 or \ sql.find("""ALTER TABLE "another_schema"."tpoly" ADD COLUMN "eas_id" INTEGER;""") == -1 or \ sql.find("""ALTER TABLE "another_schema"."tpoly" ADD COLUMN "prfedea" VARCHAR;""") == -1 or \ sql.find("""ALTER TABLE "another_schema"."tpoly" ADD COLUMN "shortname" VARCHAR(8);""") == -1 or \ sql.find("""COPY "another_schema"."tpoly" ("the_geom", "area", "eas_id", "prfedea", "shortname") FROM STDIN;""") == -1 or \ sql.find("0103000020E61000000100000005000000000000C01A481D4100000080072D524100000060AA461D4100000080FF2C52410000006060461D41000000400C2D5241000000A0DF471D4100000000142D5241000000C01A481D4100000080072D5241 5268.813 170 35043413 \\N") == -1 or \ sql.find("""\.""") == -1 or \ sql.find("""COMMIT;""") == -1 : print(sql) return 'fail' return 'success'
def eedai_2(): if gdaltest.eedai_drv is None: return 'skip' gdal.FileFromMemBuffer( '/vsimem/ee/assets/image', json.dumps({ 'type': 'IMAGE', 'properties': { 'foo': 'bar', 'prop_B9': 'the_prop_B9', 'prop_BAND_2': 'the_prop_B2' }, 'bands': [ { "id": "B1", "dataType": { "precision": "INTEGER", "range": { "max": 65535 } }, "grid": { "crsCode": "EPSG:32610", "affineTransform": { "translateX": 499980, "translateY": 4200000, "scaleX": 60, "scaleY": -60 }, "dimensions": { "width": 1830, "height": 1831 } } }, { "id": "B2", "dataType": { "precision": "INTEGER", "range": { "max": 65535 } }, "grid": { "crsCode": "EPSG:32610", "affineTransform": { "translateX": 499980, "translateY": 4200000, "scaleX": 60, "scaleY": -60 }, "dimensions": { "width": 10980, "height": 10981 } } }, { "id": "B9", "dataType": { "precision": "INTEGER", "range": { "max": 65535 } }, "grid": { "crsCode": "EPSG:32610", "affineTransform": { "translateX": 499980, "translateY": 4200000, "scaleX": 60, "scaleY": -60 }, "dimensions": { "width": 1830, "height": 1831 } } }, ] })) # To please the unregistering of the persistent connection gdal.FileFromMemBuffer('/vsimem/ee/', '') gdal.SetConfigOption('EEDA_BEARER', 'mybearer') gdal.SetConfigOption('EEDA_URL', '/vsimem/ee/') ds = gdal.OpenEx('EEDAI:image', open_options=['PIXEL_ENCODING=NPY']) gdal.SetConfigOption('EEDA_URL', None) expected_info = { "metadata": { "": { "foo": "bar" }, "SUBDATASETS": { "SUBDATASET_1_DESC": "Bands B1,B9 of image", "SUBDATASET_2_DESC": "Band B2 of image", "SUBDATASET_2_NAME": "EEDAI:image:B2", "SUBDATASET_1_NAME": "EEDAI:image:B1,B9" }, "IMAGE_STRUCTURE": { "INTERLEAVE": "PIXEL" } }, "size": [1830, 1831], "driverLongName": "Earth Engine Data API Image", "bands": [{ "colorInterpretation": "Undefined", "block": [256, 256], "metadata": {}, "band": 1, "description": "B1", "type": "UInt16", "overviews": [{ "size": [915, 915] }, { "size": [457, 457] }, { "size": [228, 228] }] }, { "colorInterpretation": "Undefined", "block": [256, 256], "metadata": { "": { "prop": "the_prop_B9" } }, "band": 2, "description": "B9", "type": "UInt16", "overviews": [{ "size": [915, 915] }, { "size": [457, 457] }, { "size": [228, 228] }] }], "cornerCoordinates": { "center": [554880.0, 4145070.0], "upperRight": [609780.0, 4200000.0], "lowerLeft": [499980.0, 4090140.0], "lowerRight": [609780.0, 4090140.0], "upperLeft": [499980.0, 4200000.0] }, "files": [], "description": "EEDAI:image", "geoTransform": [499980.0, 60.0, 0.0, 4200000.0, 0.0, -60.0] } info = gdal.Info(ds, format='json') for key in expected_info: if not (key in info and info[key] == expected_info[key]): gdaltest.post_reason('fail') print('Got difference for key %s' % key) if key in info: print('Got: ' + str(info[key])) else: print('Does not exist in got info') print('Expected: ' + str(expected_info[key])) print('Whole info:') print(json.dumps(info, indent=4)) return 'fail' if ds.GetProjectionRef().find('32610') < 0: gdaltest.post_reason('fail') print(ds.GetProjectionRef()) return 'fail' if ds.GetRasterBand(1).GetOverview(-1) is not None: gdaltest.post_reason('fail') return 'fail' if ds.GetRasterBand(1).GetOverview(3) is not None: gdaltest.post_reason('fail') return 'fail' npy_serialized = struct.pack('B' * 8, 0x93, ord('N'), ord('U'), ord('M'), ord('P'), ord('Y'), 1, 0) descr = "{'descr': [('B1', '<u2'), ('B9', '<u2')], 'fortran_order': False, 'shape': (39, 38), }".encode( 'ascii') npy_serialized += struct.pack('<h', len(descr)) npy_serialized += descr val = struct.pack('<h', 12345) + struct.pack('<h', 23456) npy_serialized += ''.encode('ascii').join(val for i in range(38 * 39)) gdal.FileFromMemBuffer( '/vsimem/ee/assets:getPixels&CUSTOMREQUEST=POST&POSTFIELDS={ "path": "image", "encoding": "NPY", "bandIds": [ "B1", "B9" ], "grid": { "affineTransform": { "translateX": 607500.0, "translateY": 4092480.0, "scaleX": 60.0, "scaleY": -60.0, "shearX": 0.0, "shearY": 0.0 }, "dimensions": { "width": 38, "height": 39 } } }', npy_serialized) got_data = ds.GetRasterBand(1).ReadRaster(1800, 1810, 1, 1) got_data = struct.unpack('h', got_data)[0] if got_data != 12345: gdaltest.post_reason('fail') print(got_data) return 'fail' got_data = ds.GetRasterBand(2).ReadRaster(1800, 1810, 1, 1) got_data = struct.unpack('h', got_data)[0] if got_data != 23456: gdaltest.post_reason('fail') print(got_data) return 'fail' ds = None gdal.SetConfigOption('EEDA_URL', '/vsimem/ee/') sub_ds = gdal.Open('EEDAI:image:B1,B9') gdal.SetConfigOption('EEDA_URL', None) if sub_ds.RasterCount != 2: gdaltest.post_reason('fail') print(sub_ds.RasterCount) return 'fail' gdal.SetConfigOption('EEDA_URL', '/vsimem/ee/') sub_ds = gdal.Open('EEDAI:image:B2') gdal.SetConfigOption('EEDA_URL', None) if sub_ds.RasterCount != 1: gdaltest.post_reason('fail') print(sub_ds.RasterCount) return 'fail' got_md = sub_ds.GetRasterBand(1).GetMetadata() expected_md = {'prop': 'the_prop_B2'} if got_md != expected_md: gdaltest.post_reason('fail') print(got_md) return 'fail' gdal.SetConfigOption('EEDA_BEARER', None) return 'success'
# vsicurl_2, # vsicurl_3, vsicurl_4, vsicurl_5, #vsicurl_6_disabled, vsicurl_7, # vsicurl_8, vsicurl_9, vsicurl_10, vsicurl_11, vsicurl_start_webserver, vsicurl_test_redirect, vsicurl_test_clear_cache, vsicurl_test_retry, vsicurl_test_fallback_from_head_to_get, vsicurl_test_parse_html_filelist_apache, vsicurl_stop_webserver ] if __name__ == '__main__': if gdal.GetConfigOption('GDAL_RUN_SLOW_TESTS', '').upper() != 'NO': print('Enabling slow tests as GDAL_RUN_SLOW_TESTS is not defined') gdal.SetConfigOption('GDAL_RUN_SLOW_TESTS', 'YES') gdaltest.setup_run('vsicurl') gdaltest.run_tests(gdaltest_list) sys.exit(gdaltest.summarize())
def __init__( self, host="localhost", port=5432, db_name="postgis", user="******", password="******", views_included=1, dico_dataset=OrderedDict(), txt=dict(), ): """Uses gdal/ogr functions to extract basic informations about geographic file (handles shapefile or MapInfo tables) and store into the dictionaries. layer = path to the geographic file dico_dataset = dictionary for global informations dico_fields = dictionary for the fields' informations tipo = feature type to read text = dictionary of texts to display """ # handling GDAL/OGR specific exceptions gdal.AllRegister() ogr.UseExceptions() gdal.UseExceptions() # Creating variables self.dico_dataset = dico_dataset self.txt = txt self.alert = 0 if views_included: gdal.SetConfigOption(str("PG_LIST_ALL_TABLES"), str("YES")) logger.info("PostgreSQL views enabled.") else: gdal.SetConfigOption(str("PG_LIST_ALL_TABLES"), str("NO")) logger.info("PostgreSQL views disabled.") # connection infos self.host = host self.port = port self.db_name = db_name self.user = user self.password = password self.conn_settings = "PG: host={} port={} dbname={} user={} password={}".format( host, port, db_name, user, password) # testing connection self.conn = self.get_connection() if not self.conn: self.alert += 1 youtils.erratum( ctner=dico_dataset, mess_type=1, ds_lyr=self.conn_settings, mess="err_connection_failed", ) dico_dataset["err_gdal"] = gdal_err.err_type, gdal_err.err_msg return None else: pass # sgbd info dico_dataset["sgbd_version"] = self.get_version() dico_dataset["sgbd_schemas"] = self.get_schemas()
def eedai_4(): if gdaltest.eedai_drv is None: return 'skip' gdal.FileFromMemBuffer( '/vsimem/ee/assets/image', json.dumps({ 'type': 'IMAGE', 'bands': [{ "id": "B1", "dataType": { "precision": "INTEGER", "range": { "max": 255 } }, "grid": { "crsCode": "EPSG:32610", "affineTransform": { "translateX": 499980, "translateY": 4200000, "scaleX": 60, "scaleY": -60 }, "dimensions": { "width": 1830, "height": 1831 } } }, { "id": "B2", "dataType": { "precision": "INTEGER", "range": { "max": 255 } }, "grid": { "crsCode": "EPSG:32610", "affineTransform": { "translateX": 499980, "translateY": 4200000, "scaleX": 60, "scaleY": -60 }, "dimensions": { "width": 1830, "height": 1831 } } }, { "id": "B3", "dataType": { "precision": "INTEGER", "range": { "max": 255 } }, "grid": { "crsCode": "EPSG:32610", "affineTransform": { "translateX": 499980, "translateY": 4200000, "scaleX": 60, "scaleY": -60 }, "dimensions": { "width": 1830, "height": 1831 } } }] })) gdal.SetConfigOption('EEDA_BEARER', 'mybearer') gdal.SetConfigOption('EEDA_URL', '/vsimem/ee/') ds = gdal.Open('EEDAI:image') gdal.SetConfigOption('EEDA_URL', None) mem_ds = gdal.GetDriverByName('MEM').Create('', 256, 256, 3) mem_ds.GetRasterBand(1).Fill(127) mem_ds.GetRasterBand(2).Fill(128) mem_ds.GetRasterBand(3).Fill(129) gdal.GetDriverByName('PNG').CreateCopy('/vsimem/out.png', mem_ds) f = gdal.VSIFOpenL('/vsimem/out.png', 'rb') png_data = gdal.VSIFReadL(1, 1000000, f) gdal.VSIFCloseL(f) gdal.Unlink('/vsimem/out.png') gdal.FileFromMemBuffer( '/vsimem/ee/assets:getPixels&CUSTOMREQUEST=POST&POSTFIELDS={ "path": "image", "encoding": "PNG", "bandIds": [ "B1", "B2", "B3" ], "grid": { "affineTransform": { "translateX": 499980.0, "translateY": 4200000.0, "scaleX": 60.0, "scaleY": -60.0, "shearX": 0.0, "shearY": 0.0 }, "dimensions": { "width": 256, "height": 256 } } }', png_data) got_data = ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1) got_data = struct.unpack('B', got_data)[0] if got_data != 127: gdaltest.post_reason('fail') print(got_data) return 'fail' # Same with dataset RasterIO got_data = ds.ReadRaster(0, 0, 1, 1) got_data = struct.unpack('B' * 3, got_data) if got_data != (127, 128, 129): gdaltest.post_reason('fail') print(got_data) return 'fail' # Same after flushing cache ds.FlushCache() got_data = ds.ReadRaster(0, 0, 1, 1) got_data = struct.unpack('B' * 3, got_data) if got_data != (127, 128, 129): gdaltest.post_reason('fail') print(got_data) return 'fail' # Sub-sampled query gdal.FileFromMemBuffer( '/vsimem/ee/assets:getPixels&CUSTOMREQUEST=POST&POSTFIELDS={ "path": "image", "encoding": "PNG", "bandIds": [ "B1", "B2", "B3" ], "grid": { "affineTransform": { "translateX": 499980.0, "translateY": 4200000.0, "scaleX": 120.0, "scaleY": -120.06557377049181, "shearX": 0.0, "shearY": 0.0 }, "dimensions": { "width": 256, "height": 256 } } }', png_data) got_data = ds.GetRasterBand(1).ReadRaster(0, 0, 2, 2, buf_xsize=1, buf_ysize=1) got_data = struct.unpack('B', got_data)[0] if got_data != 127: gdaltest.post_reason('fail') print(got_data) return 'fail' # Same after flushing cache with dataset RasterIO ds.FlushCache() got_data = ds.ReadRaster(0, 0, 2, 2, buf_xsize=1, buf_ysize=1) got_data = struct.unpack('B' * 3, got_data) if got_data != (127, 128, 129): gdaltest.post_reason('fail') print(got_data) return 'fail' ds = None gdal.SetConfigOption('EEDA_BEARER', None) return 'success'
def test_vsigs_1(): if not gdaltest.built_against_curl(): pytest.skip() # Invalid header filename gdal.ErrorReset() with gdaltest.config_option('GDAL_HTTP_HEADER_FILE', '/i_dont/exist.py'): with gdaltest.config_option('CPL_GCE_SKIP', 'YES'): with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar') if f is not None: gdal.VSIFCloseL(f) pytest.fail() last_err = gdal.GetLastErrorMsg() assert 'Cannot read' in last_err # Invalid content for header file with gdaltest.config_option('GDAL_HTTP_HEADER_FILE', 'vsigs.py'): with gdaltest.config_option('CPL_GCE_SKIP', 'YES'): f = open_for_read('/vsigs/foo/bar') if f is not None: gdal.VSIFCloseL(f) pytest.fail() # Missing GS_SECRET_ACCESS_KEY gdal.ErrorReset() with gdaltest.config_option('CPL_GCE_SKIP', 'YES'): with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar') assert f is None and gdal.VSIGetLastErrorMsg().find( 'GS_SECRET_ACCESS_KEY') >= 0 gdal.ErrorReset() with gdaltest.config_option('CPL_GCE_SKIP', 'YES'): with gdaltest.error_handler(): f = open_for_read('/vsigs_streaming/foo/bar') assert f is None and gdal.VSIGetLastErrorMsg().find( 'GS_SECRET_ACCESS_KEY') >= 0 gdal.SetConfigOption('GS_SECRET_ACCESS_KEY', 'GS_SECRET_ACCESS_KEY') # Missing GS_ACCESS_KEY_ID gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar') assert f is None and gdal.VSIGetLastErrorMsg().find( 'GS_ACCESS_KEY_ID') >= 0 gdal.SetConfigOption('GS_ACCESS_KEY_ID', 'GS_ACCESS_KEY_ID') # ERROR 1: The User Id you provided does not exist in our records. gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs/foo/bar.baz') if f is not None or gdal.VSIGetLastErrorMsg() == '': if f is not None: gdal.VSIFCloseL(f) if gdal.GetConfigOption('APPVEYOR') is not None: return pytest.fail(gdal.VSIGetLastErrorMsg()) gdal.ErrorReset() with gdaltest.error_handler(): f = open_for_read('/vsigs_streaming/foo/bar.baz') assert f is None and gdal.VSIGetLastErrorMsg() != ''
def test_vsigs_read_credentials_file_refresh_token(): if gdaltest.webserver_port == 0: pytest.skip() gdal.SetConfigOption('GS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('GS_ACCESS_KEY_ID', '') gdal.SetConfigOption('CPL_GS_CREDENTIALS_FILE', '/vsimem/.boto') gdal.SetConfigOption( 'GOA2_AUTH_URL_TOKEN', 'http://localhost:%d/accounts.google.com/o/oauth2/token' % gdaltest.webserver_port) gdal.VSICurlClearCache() gdal.FileFromMemBuffer( '/vsimem/.boto', """ [Credentials] gs_oauth2_refresh_token = REFRESH_TOKEN [OAuth2] client_id = CLIENT_ID client_secret = CLIENT_SECRET """) handler = webserver.SequentialHandler() def method(request): content = request.rfile.read(int( request.headers['Content-Length'])).decode('ascii') if content != 'refresh_token=REFRESH_TOKEN&client_id=CLIENT_ID&client_secret=CLIENT_SECRET&grant_type=refresh_token': sys.stderr.write('Bad POST content: %s\n' % content) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') content = """{ "access_token" : "ACCESS_TOKEN", "token_type" : "Bearer", "expires_in" : 3600, }""" request.send_header('Content-Length', len(content)) request.end_headers() request.wfile.write(content.encode('ascii')) handler.add('POST', '/accounts.google.com/o/oauth2/token', custom_method=method) def method(request): if 'Authorization' not in request.headers: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return expected_authorization = 'Bearer ACCESS_TOKEN' if request.headers['Authorization'] != expected_authorization: sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization'])) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.end_headers() request.wfile.write("""foo""".encode('ascii')) handler.add('GET', '/gs_fake_bucket/resource', custom_method=method) with webserver.install_http_handler(handler): f = open_for_read('/vsigs/gs_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' gdal.SetConfigOption('CPL_GS_CREDENTIALS_FILE', '') gdal.SetConfigOption('GOA2_AUTH_URL_TOKEN', None) gdal.Unlink('/vsimem/.boto')
def test_gdalwarp_lib_128(): mem_ds = gdal.GetDriverByName('MEM').Create('', 1177, 4719) rpc = [ "HEIGHT_OFF=109", "LINE_NUM_COEFF=-0.001245683 -0.09427649 -1.006342 -1.954469e-05 0.001033926 2.020534e-08 -3.845472e-07 -0.002075817 0.0005520694 0 -4.642442e-06 -3.271793e-06 2.705977e-05 -7.634384e-07 -2.132832e-05 -3.248862e-05 -8.17894e-06 -3.678094e-07 2.002032e-06 3.693162e-08", "LONG_OFF=7.1477", "SAMP_DEN_COEFF=1 0.01415176 -0.003715018 -0.001205632 -0.0007738299 4.057763e-05 -1.649126e-05 0.0001453584 0.0001628194 -7.354731e-05 4.821444e-07 -4.927701e-06 -1.942371e-05 -2.817499e-06 1.946396e-06 3.04243e-06 2.362282e-07 -2.5371e-07 -1.36993e-07 1.132432e-07", "LINE_SCALE=2360", "SAMP_NUM_COEFF=0.04337163 1.775948 -0.87108 0.007425391 0.01783631 0.0004057179 -0.000184695 -0.04257537 -0.01127869 -1.531228e-06 1.017961e-05 0.000572344 -0.0002941 -0.0001301705 -0.0003289546 5.394918e-05 6.388447e-05 -4.038289e-06 -7.525785e-06 -5.431241e-07", "LONG_SCALE=0.8383", "SAMP_SCALE=593", "SAMP_OFF=589", "LAT_SCALE=1.4127", "LAT_OFF=33.8992", "LINE_OFF=2359", "LINE_DEN_COEFF=1 0.0007273139 -0.0006006867 -4.272095e-07 2.578717e-05 4.718479e-06 -2.116976e-06 -1.347805e-05 -2.209958e-05 8.131258e-06 -7.290143e-08 5.105109e-08 -7.353388e-07 0 2.131142e-06 9.697701e-08 1.237039e-08 7.153246e-08 6.758015e-08 5.811124e-08", "HEIGHT_SCALE=96.3" ] mem_ds.SetMetadata(rpc, "RPC") mem_ds.GetRasterBand(1).Fill(255) cutlineDSName = '/vsimem/test_gdalwarp_lib_128.json' cutline_ds = ogr.GetDriverByName('GeoJSON').CreateDataSource(cutlineDSName) cutline_lyr = cutline_ds.CreateLayer('cutline') f = ogr.Feature(cutline_lyr.GetLayerDefn()) f.SetGeometry( ogr.CreateGeometryFromWkt( 'POLYGON ((7.2151 32.51930,7.214316 32.58116,7.216043 32.59476,7.21666 32.5193,7.2151 32.51930))' )) cutline_lyr.CreateFeature(f) f = None cutline_lyr = None cutline_ds = None # Default is GDALWARP_DENSIFY_CUTLINE=YES ds = gdal.Warp( '', mem_ds, format='MEM', cutlineDSName=cutlineDSName, dstSRS='EPSG:4326', outputBounds=[7.2, 32.52, 7.217, 32.59], xRes=0.000226555, yRes=0.000226555, transformerOptions=['RPC_DEM=data/test_gdalwarp_lib_128_dem.tif']) cs = ds.GetRasterBand(1).Checksum() if cs != 4248: gdaltest.post_reason('bad checksum') print(cs) return 'fail' # Below steps depend on GEOS if not ogrtest.have_geos(): gdal.Unlink(cutlineDSName) return 'success' gdal.SetConfigOption('GDALWARP_DENSIFY_CUTLINE', 'ONLY_IF_INVALID') ds = gdal.Warp( '', mem_ds, format='MEM', cutlineDSName=cutlineDSName, dstSRS='EPSG:4326', outputBounds=[7.2, 32.52, 7.217, 32.59], xRes=0.000226555, yRes=0.000226555, transformerOptions=['RPC_DEM=data/test_gdalwarp_lib_128_dem.tif']) gdal.SetConfigOption('GDALWARP_DENSIFY_CUTLINE', None) cs = ds.GetRasterBand(1).Checksum() if cs != 4248: gdaltest.post_reason('bad checksum') print(cs) return 'fail' gdal.SetConfigOption('GDALWARP_DENSIFY_CUTLINE', 'NO') with gdaltest.error_handler(): ds = gdal.Warp( '', mem_ds, format='MEM', cutlineDSName=cutlineDSName, dstSRS='EPSG:4326', outputBounds=[7.2, 32.52, 7.217, 32.59], xRes=0.000226555, yRes=0.000226555, transformerOptions=['RPC_DEM=data/test_gdalwarp_lib_128_dem.tif']) gdal.SetConfigOption('GDALWARP_DENSIFY_CUTLINE', None) if ds is not None: gdaltest.post_reason('expected none return') return 'fail' gdal.Unlink(cutlineDSName) return 'success'
def get_metadata_2_file(self, file_name_with_path: str): # print('你的任务: 将文件{0}的元数据信息, 提取出来, 存储到文件{1}中'.format(self.__file_name_with_path__, file_name_with_path)) vector_ds = None json_vector = None # os.environ['PROJ_LIB'] = r'C:\APP\Python\Python38\Lib\site-packages\osgeo\data\proj' 环境变量中设置 # result_success = abs(self.Success) # 成功的标记,元数据json中的为1,而系统常量为-1,暂采用绝对值 result_success = self.Success # 成功的标记-1 gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES") gdal.SetConfigOption("SHAPE_ENCODING", "GBK") # gdal.SetConfigOption("SHAPE_ENCODING", "UTF-8") # 定义矢量的json对象 json_vector = CJson() mdb_flag = False vector_ds = ogr.Open(self.__file_name_with_path__) if vector_ds is None: vector_ds = ogr.Open(self.__file_name_with_path__.encode('gbk'), 0) mdb_flag = True if vector_ds is None: message = '文件[{0}]打开失败!'.format(self.__file_name_with_path__) json_vector.set_value_of_name('result', self.Failure) json_vector.set_value_of_name('message', message) # 判断路径是否存在,不存在则创建 if CFile.check_and_create_directory(file_name_with_path): json_vector.to_file(file_name_with_path) return CResult.merge_result( CResult.Failure, '文件[{0}]打开失败!'.format(self.__file_name_with_path__)) try: layer_count = vector_ds.GetLayerCount() if layer_count == 0: message = '文件[{0}]没有图层!'.format(self.__file_name_with_path__) json_vector.set_value_of_name('result', self.Failure) json_vector.set_value_of_name('message', message) # 判断路径是否存在,不存在则创建 if CFile.check_and_create_directory(file_name_with_path): json_vector.to_file(file_name_with_path) return CResult.merge_result( CResult.Failure, '文件[{0}]没有图层!'.format(self.__file_name_with_path__)) shp_lyr = vector_ds.GetLayer(0) if shp_lyr is None: message = '文件[{0}]读取图层失败!'.format(self.__file_name_with_path__) json_vector.set_value_of_name('result', self.Failure) json_vector.set_value_of_name('message', message) # 判断路径是否存在,不存在则创建 if CFile.check_and_create_directory(file_name_with_path): json_vector.to_file(file_name_with_path) return CResult.merge_result( CResult.Failure, '文件[{0}]读取图层失败!'.format(self.__file_name_with_path__)) driver = vector_ds.GetDriver() if driver is None: message = '文件[{0}]读取驱动失败!'.format(self.__file_name_with_path__) json_vector.set_value_of_name('result', self.Failure) json_vector.set_value_of_name('message', message) # 判断路径是否存在,不存在则创建 if CFile.check_and_create_directory(file_name_with_path): json_vector.to_file(file_name_with_path) return CResult.merge_result( CResult.Failure, '文件[{0}]读取驱动失败!'.format(self.__file_name_with_path__)) # 定义datasource子节点,并添加到矢量json对象中 json_datasource = CJson() json_datasource.set_value_of_name('name', self.__file_name_with_path__) json_datasource.set_value_of_name('description', driver.name) json_vector.set_value_of_name('datasource', json_datasource.json_obj) # print(driver.name) layer_count_real, layer_list = self.get_vector_layercount_and_layers( vector_ds) # print('共{0}个有效图层'.format(layer_count_real)) # print(layer_list) json_vector.set_value_of_name('layer_count', layer_count_real) # shp图层只有1个,gdb有多个 json_vector.set_value_of_name('result', result_success) # 定义layers子节点,并添加到矢量json对象中 json_layers = CJson() if layer_count_real == 0: json_vector.set_value_of_name('layers', []) else: list_json_layers = [] for layer_temp in layer_list: print('图层对象: {0}'.format(layer_temp)) if mdb_flag: layer_name = CUtils.conversion_chinese_code( layer_temp.GetName()) else: layer_name = layer_temp.GetName() json_layer = CJson() list_json_layers.append(json_layer.json_obj) # name节点 json_layer.set_value_of_name("name", layer_name) json_layer.set_value_of_name("description", layer_name) # print(layer_name) # projwkt 节点 json_proj_wkt = self.get_projwkt_by_layer(layer_temp) json_layer.set_value_of_name("coordinate", json_proj_wkt.json_obj) # features节点 json_features = CJson() feature_count = layer_temp.GetFeatureCount() json_features.set_value_of_name("count", feature_count) json_layer.set_value_of_name("features", json_features.json_obj) # geometry节点 json_geometry = self.get_geometry_by_vectorlayer( layer_temp) json_layer.set_value_of_name("geometry", json_geometry.json_obj) # extent节点 json_extent = self.get_extent_by_vectorlayer( layer_temp, feature_count) json_layer.set_value_of_name("extent", json_extent.json_obj) # attributes节点 json_attributes = self.get_attributes_by_vectorlayer( layer_temp, mdb_flag) json_layer.set_value_of_name("attributes", json_attributes.json_obj) # wgs84节点 json_wgs84 = self.transform_to_wgs84( layer_temp, feature_count) json_layer.set_value_of_name('wgs84', json_wgs84.json_obj) json_vector.set_value_of_name('layers', list_json_layers) # json_shp_str = json_vector.to_json() # print(json_shp_str) # 判断路径是否存在,不存在则创建 if CFile.check_and_create_directory(file_name_with_path): json_vector.to_file(file_name_with_path) CLogger().info('文件[{0}]元数据信息读取成功!'.format( self.__file_name_with_path__)) return CResult.merge_result( CResult.Success, '文件[{0}]元数据信息读取成功!'.format(self.__file_name_with_path__)) except Exception as error: CLogger().info('get_metadata_2_file解析错误:{0}'.format(error)) message = 'get_metadata_2_file解析错误:文件:{0},错误信息为{1}'.format( self.__file_name_with_path__, error) json_vector.set_value_of_name('result', self.Failure) json_vector.set_value_of_name('message', message) # 判断路径是否存在,不存在则创建 if CFile.check_and_create_directory(file_name_with_path): json_vector.to_file(file_name_with_path) return CResult.merge_result( CResult.Failure, '文件[{0}]读取异常!{1}'.format(self.__file_name_with_path__, error.__str__())) finally: vector_ds.Destroy() vector_ds = None
def ogr_pgdump_9(pg_use_copy='YES'): gdal.SetConfigOption('PG_USE_COPY', pg_use_copy) ds = ogr.GetDriverByName('PGDump').CreateDataSource( '/vsimem/ogr_pgdump_9.sql', options=['LINEFORMAT=LF']) lyr = ds.CreateLayer('test', geom_type=ogr.wkbNone) fld = ogr.FieldDefn('str', ogr.OFTString) fld.SetWidth(5) lyr.CreateField(fld) fld = ogr.FieldDefn('str2', ogr.OFTString) lyr.CreateField(fld) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField('str', '01234') lyr.CreateFeature(feat) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField('str', 'ABCDEF') lyr.CreateFeature(feat) if sys.version_info >= (3, 0, 0): val4 = '\u00e9\u00e9\u00e9\u00e9' val5 = val4 + '\u00e9' val6 = val5 + '\u00e9' else: exec("val4 = u'\\u00e9\\u00e9\\u00e9\\u00e9'") exec("val5 = val4 + u'\\u00e9'") exec("val6 = val5 + u'\\u00e9'") feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField('str', val6) lyr.CreateFeature(feat) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField('str', 'a' + val5) lyr.CreateFeature(feat) gdal.SetConfigOption('PG_USE_COPY', None) ds = None f = gdal.VSIFOpenL('/vsimem/ogr_pgdump_9.sql', 'rb') sql = gdal.VSIFReadL(1, 10000, f).decode('utf8') gdal.VSIFCloseL(f) gdal.Unlink('/vsimem/ogr_pgdump_9.sql') if pg_use_copy == 'YES': eofield = '\t' else: eofield = "'" if sql.find("""01234%s""" % eofield) < 0 or \ sql.find("""ABCDE%s""" % eofield) < 0 or \ sql.find("""%s%s""" % (val5, eofield)) < 0 or \ sql.find("""%s%s""" % ('a'+val4, eofield)) < 0: print(sql) return 'fail' return 'success'
def test_vsiaz_fake_write(): if gdaltest.webserver_port == 0: pytest.skip() gdal.VSICurlClearCache() # Test creation of BlockBob f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') assert f is not None handler = webserver.SequentialHandler() def method(request): h = request.headers if 'Authorization' not in h or \ h['Authorization'] != 'SharedKey myaccount:AigkrY7q66WCrx3JRKBte56k7kxV2cxB/ZyGNubxk5I=' or \ 'Expect' not in h or h['Expect'] != '100-continue' or \ 'Content-Length' not in h or h['Content-Length'] != '40000' or \ 'x-ms-date' not in h or h['x-ms-date'] != 'my_timestamp' or \ 'x-ms-blob-type' not in h or h['x-ms-blob-type'] != 'BlockBlob': sys.stderr.write('Bad headers: %s\n' % str(h)) request.send_response(403) return request.protocol_version = 'HTTP/1.1' request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii')) content = request.rfile.read(40000).decode('ascii') if len(content) != 40000: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(201) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', custom_method=method) with webserver.install_http_handler(handler): ret = gdal.VSIFWriteL('x' * 35000, 1, 35000, f) ret += gdal.VSIFWriteL('x' * 5000, 1, 5000, f) if ret != 40000: gdal.VSIFCloseL(f) pytest.fail(ret) gdal.VSIFCloseL(f) # Simulate illegal read f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') assert f is not None with gdaltest.error_handler(): ret = gdal.VSIFReadL(1, 1, f) assert not ret gdal.VSIFCloseL(f) # Simulate illegal seek f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') assert f is not None with gdaltest.error_handler(): ret = gdal.VSIFSeekL(f, 1, 0) assert ret != 0 gdal.VSIFCloseL(f) # Simulate failure when putting BlockBob f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') assert f is not None handler = webserver.SequentialHandler() def method(request): request.protocol_version = 'HTTP/1.1' request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', custom_method=method) if gdal.VSIFSeekL(f, 0, 0) != 0: gdal.VSIFCloseL(f) pytest.fail() gdal.VSIFWriteL('x' * 35000, 1, 35000, f) if gdal.VSIFTellL(f) != 35000: gdal.VSIFCloseL(f) pytest.fail() if gdal.VSIFSeekL(f, 35000, 0) != 0: gdal.VSIFCloseL(f) pytest.fail() if gdal.VSIFSeekL(f, 0, 1) != 0: gdal.VSIFCloseL(f) pytest.fail() if gdal.VSIFSeekL(f, 0, 2) != 0: gdal.VSIFCloseL(f) pytest.fail() if gdal.VSIFEofL(f) != 0: gdal.VSIFCloseL(f) pytest.fail() with webserver.install_http_handler(handler): with gdaltest.error_handler(): ret = gdal.VSIFCloseL(f) if ret == 0: gdal.VSIFCloseL(f) pytest.fail(ret) # Simulate creation of BlockBob over an existing blob of incompatible type f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') assert f is not None handler = webserver.SequentialHandler() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', 409) handler.add('DELETE', '/azure/blob/myaccount/test_copy/file.bin', 202) handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', 201) with webserver.install_http_handler(handler): gdal.VSIFCloseL(f) # Test creation of AppendBlob gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', '10') f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', None) assert f is not None handler = webserver.SequentialHandler() def method(request): h = request.headers if 'Authorization' not in h or \ h['Authorization'] != 'SharedKey myaccount:KimVui3ptY9D5ftLlsI7CNOgK36CNAEzsXqcuHskdEY=' or \ 'Content-Length' not in h or h['Content-Length'] != '0' or \ 'x-ms-date' not in h or h['x-ms-date'] != 'my_timestamp' or \ 'x-ms-blob-type' not in h or h['x-ms-blob-type'] != 'AppendBlob': sys.stderr.write('Bad headers: %s\n' % str(h)) request.send_response(403) return request.protocol_version = 'HTTP/1.1' request.send_response(201) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', custom_method=method) def method(request): h = request.headers if 'Content-Length' not in h or h['Content-Length'] != '10' or \ 'x-ms-date' not in h or h['x-ms-date'] != 'my_timestamp' or \ 'x-ms-blob-type' not in h or h['x-ms-blob-type'] != 'AppendBlob': sys.stderr.write('Bad headers: %s\n' % str(h)) request.send_response(403) return request.protocol_version = 'HTTP/1.1' content = request.rfile.read(10).decode('ascii') if content != '0123456789': sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(201) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin?comp=appendblock', custom_method=method) def method(request): h = request.headers if 'Content-Length' not in h or h['Content-Length'] != '6' or \ 'x-ms-date' not in h or h['x-ms-date'] != 'my_timestamp' or \ 'x-ms-blob-type' not in h or h['x-ms-blob-type'] != 'AppendBlob': sys.stderr.write('Bad headers: %s\n' % str(h)) request.send_response(403) return request.protocol_version = 'HTTP/1.1' content = request.rfile.read(6).decode('ascii') if content != 'abcdef': sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() return request.send_response(201) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin?comp=appendblock', custom_method=method) with webserver.install_http_handler(handler): ret = gdal.VSIFWriteL('0123456789abcdef', 1, 16, f) if ret != 16: gdal.VSIFCloseL(f) pytest.fail(ret) gdal.VSIFCloseL(f) # Test failed creation of AppendBlob gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', '10') f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', None) assert f is not None handler = webserver.SequentialHandler() def method(request): request.protocol_version = 'HTTP/1.1' request.send_response(403) request.send_header('Content-Length', 0) request.end_headers() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', custom_method=method) with webserver.install_http_handler(handler): with gdaltest.error_handler(): ret = gdal.VSIFWriteL('0123456789abcdef', 1, 16, f) if ret != 0: gdal.VSIFCloseL(f) pytest.fail(ret) gdal.VSIFCloseL(f) # Test failed writing of a block of an AppendBlob gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', '10') f = gdal.VSIFOpenL('/vsiaz/test_copy/file.bin', 'wb') gdal.SetConfigOption('VSIAZ_CHUNK_SIZE_BYTES', None) assert f is not None handler = webserver.SequentialHandler() handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin', 201) handler.add('PUT', '/azure/blob/myaccount/test_copy/file.bin?comp=appendblock', 403) with webserver.install_http_handler(handler): with gdaltest.error_handler(): ret = gdal.VSIFWriteL('0123456789abcdef', 1, 16, f) if ret != 0: gdal.VSIFCloseL(f) pytest.fail(ret) gdal.VSIFCloseL(f)
def ogr_pgdump_8(): ds = ogr.GetDriverByName('PGDump').CreateDataSource( '/vsimem/ogr_pgdump_8.sql', options=['LINEFORMAT=LF']) lyr = ds.CreateLayer('test', geom_type=ogr.wkbNone, options=['FID=myfid']) lyr.CreateField(ogr.FieldDefn('str', ogr.OFTString)) gdal.PushErrorHandler() ret = lyr.CreateField(ogr.FieldDefn('myfid', ogr.OFTString)) gdal.PopErrorHandler() if ret == 0: gdaltest.post_reason('fail') return 'fail' ret = lyr.CreateField(ogr.FieldDefn('myfid', ogr.OFTInteger)) if ret != 0: gdaltest.post_reason('fail') return 'fail' lyr.CreateField(ogr.FieldDefn('str2', ogr.OFTString)) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField('str', 'first string') feat.SetField('myfid', 10) feat.SetField('str2', 'second string') gdal.SetConfigOption('PG_USE_COPY', 'YES') ret = lyr.CreateFeature(feat) gdal.SetConfigOption('PG_USE_COPY', None) if ret != 0: gdaltest.post_reason('fail') return 'fail' if feat.GetFID() != 10: gdaltest.post_reason('fail') return 'fail' feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField('str2', 'second string') gdal.SetConfigOption('PG_USE_COPY', 'YES') ret = lyr.CreateFeature(feat) gdal.SetConfigOption('PG_USE_COPY', None) if ret != 0: gdaltest.post_reason('fail') return 'fail' if feat.GetFID() < 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if feat.GetField('myfid') != feat.GetFID(): gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' #feat.SetField('str', 'foo') #ret = lyr.SetFeature(feat) #if ret != 0: # gdaltest.post_reason('fail') # return 'fail' feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetFID(1) feat.SetField('myfid', 10) gdal.PushErrorHandler() gdal.SetConfigOption('PG_USE_COPY', 'YES') ret = lyr.CreateFeature(feat) gdal.SetConfigOption('PG_USE_COPY', None) gdal.PopErrorHandler() if ret == 0: gdaltest.post_reason('fail') return 'fail' #gdal.PushErrorHandler() #ret = lyr.SetFeature(feat) #gdal.PopErrorHandler() #if ret == 0: # gdaltest.post_reason('fail') # return 'fail' #feat.UnsetField('myfid') #gdal.PushErrorHandler() #ret = lyr.SetFeature(feat) #gdal.PopErrorHandler() #if ret == 0: # gdaltest.post_reason('fail') # return 'fail' feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField('str', 'first string') feat.SetField('myfid', 12) feat.SetField('str2', 'second string') gdal.SetConfigOption('PG_USE_COPY', 'YES') ret = lyr.CreateFeature(feat) gdal.SetConfigOption('PG_USE_COPY', None) if ret != 0: gdaltest.post_reason('fail') return 'fail' if feat.GetFID() != 12: gdaltest.post_reason('fail') return 'fail' ds = None f = gdal.VSIFOpenL('/vsimem/ogr_pgdump_8.sql', 'rb') sql = gdal.VSIFReadL(1, 10000, f).decode('ascii') gdal.VSIFCloseL(f) gdal.Unlink('/vsimem/ogr_pgdump_8.sql') if sql.find("""CREATE TABLE "public"."test" ( "myfid" SERIAL, CONSTRAINT "test_pk" PRIMARY KEY ("myfid") )""") < 0 or \ sql.find("""ALTER TABLE "public"."test" ADD COLUMN "myfid" """) >= 0 or \ sql.find("""10\tfirst string\tsecond string""") == -1 or \ sql.find("""INSERT INTO "public"."test" ("str2") VALUES ('second string');""") == -1 or \ sql.find("""12\tfirst string\tsecond string""") == -1: print(sql) return 'fail' return 'success'
def ogr_pgdump_6(): ds = ogr.GetDriverByName('PGDump').CreateDataSource( '/vsimem/ogr_pgdump_6.sql', options=['LINEFORMAT=LF']) lyr = ds.CreateLayer('test', geom_type=ogr.wkbNone) field_defn = ogr.FieldDefn('field_string', ogr.OFTString) field_defn.SetDefault("'a''b'") lyr.CreateField(field_defn) field_defn = ogr.FieldDefn('field_int', ogr.OFTInteger) field_defn.SetDefault('123') lyr.CreateField(field_defn) field_defn = ogr.FieldDefn('field_real', ogr.OFTReal) field_defn.SetDefault('1.23') lyr.CreateField(field_defn) field_defn = ogr.FieldDefn('field_nodefault', ogr.OFTInteger) lyr.CreateField(field_defn) field_defn = ogr.FieldDefn('field_datetime', ogr.OFTDateTime) field_defn.SetDefault("CURRENT_TIMESTAMP") lyr.CreateField(field_defn) field_defn = ogr.FieldDefn('field_datetime2', ogr.OFTDateTime) field_defn.SetDefault("'2015/06/30 12:34:56'") lyr.CreateField(field_defn) field_defn = ogr.FieldDefn('field_date', ogr.OFTDate) field_defn.SetDefault("CURRENT_DATE") lyr.CreateField(field_defn) field_defn = ogr.FieldDefn('field_time', ogr.OFTTime) field_defn.SetDefault("CURRENT_TIME") lyr.CreateField(field_defn) gdal.SetConfigOption('PG_USE_COPY', 'YES') f = ogr.Feature(lyr.GetLayerDefn()) f.SetField('field_string', 'a') f.SetField('field_int', 456) f.SetField('field_real', 4.56) f.SetField('field_datetime', '2015/06/30 12:34:56') f.SetField('field_datetime2', '2015/06/30 12:34:56') f.SetField('field_date', '2015/06/30') f.SetField('field_time', '12:34:56') lyr.CreateFeature(f) f = None # Transition from COPY to INSERT f = ogr.Feature(lyr.GetLayerDefn()) lyr.CreateFeature(f) f = None # Transition from INSERT to COPY f = ogr.Feature(lyr.GetLayerDefn()) f.SetField('field_string', 'b') f.SetField('field_int', 456) f.SetField('field_real', 4.56) f.SetField('field_datetime', '2015/06/30 12:34:56') f.SetField('field_datetime2', '2015/06/30 12:34:56') f.SetField('field_date', '2015/06/30') f.SetField('field_time', '12:34:56') lyr.CreateFeature(f) f = None gdal.SetConfigOption('PG_USE_COPY', None) ds = None f = gdal.VSIFOpenL('/vsimem/ogr_pgdump_6.sql', 'rb') sql = gdal.VSIFReadL(1, 10000, f).decode('ascii') gdal.VSIFCloseL(f) gdal.Unlink('/vsimem/ogr_pgdump_6.sql') if sql.find("""a\t456\t4.56\t\\N\t2015/06/30 12:34:56\t2015/06/30 12:34:56\t2015/06/30\t12:34:56""") < 0 or \ sql.find("""ALTER TABLE "public"."test" ADD COLUMN "field_string" VARCHAR DEFAULT 'a''b';""") == -1 or \ sql.find("""ALTER TABLE "public"."test" ADD COLUMN "field_int" INTEGER DEFAULT 123;""") == -1 or \ sql.find("""ALTER TABLE "public"."test" ADD COLUMN "field_real" FLOAT8 DEFAULT 1.23;""") == -1 or \ sql.find("""ALTER TABLE "public"."test" ADD COLUMN "field_datetime" timestamp with time zone DEFAULT CURRENT_TIMESTAMP;""") == -1 or \ sql.find("""ALTER TABLE "public"."test" ADD COLUMN "field_datetime2" timestamp with time zone DEFAULT '2015/06/30 12:34:56+00'::timestamp with time zone;""") == -1 or \ sql.find("""ALTER TABLE "public"."test" ADD COLUMN "field_date" date DEFAULT CURRENT_DATE;""") == -1 or \ sql.find("""ALTER TABLE "public"."test" ADD COLUMN "field_time" time DEFAULT CURRENT_TIME;""") == -1 or \ sql.find("""b\t456\t4.56\t\\N\t2015/06/30 12:34:56\t2015/06/30 12:34:56\t2015/06/30\t12:34:56""") < 0: print(sql) return 'fail' return 'success'
def ogr_pgdump_3(): try: os.remove('tmp/tpoly.sql') except: pass gdal.SetConfigOption('PG_USE_COPY', 'YES') ds = ogr.GetDriverByName('PGDump').CreateDataSource( 'tmp/tpoly.sql', options=['LINEFORMAT=LF']) ###################################################### # Create Layer lyr = ds.CreateLayer('tpoly', geom_type=ogr.wkbNone, options=['SCHEMA=another_schema']) ###################################################### # Setup Schema ogrtest.quick_create_layer_def(lyr, [('EMPTYCHAR', ogr.OFTString), ('AREA', ogr.OFTReal), ('EAS_ID', ogr.OFTInteger), ('PRFEDEA', ogr.OFTString), ('SHORTNAME', ogr.OFTString, 8)]) ###################################################### # Copy in poly.shp dst_feat = ogr.Feature(feature_def=lyr.GetLayerDefn()) shp_ds = ogr.Open('data/poly.shp') shp_lyr = shp_ds.GetLayer(0) feat = shp_lyr.GetNextFeature() gdaltest.poly_feat = [] i = 0 while feat is not None: gdaltest.poly_feat.append(feat) dst_feat.SetFrom(feat) if i == 0: # Be perverse and test the case where a feature has a geometry # even if it's a wkbNone layer ! (#4040) dst_feat.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 1)')) elif i == 1: # Field with 0 character (not empty!) (#4040) dst_feat.SetField(0, '') i = i + 1 lyr.CreateFeature(dst_feat) feat = shp_lyr.GetNextFeature() dst_feat.Destroy() ds.Destroy() gdal.SetConfigOption('PG_USE_COPY', 'NO') f = open('tmp/tpoly.sql') sql = f.read() f.close() if sql.find("""DROP TABLE IF EXISTS "another_schema"."tpoly" CASCADE;""") == -1 or \ sql.find("""DELETE FROM geometry_columns""") != -1 or \ sql.find("""BEGIN;""") == -1 or \ sql.find("""CREATE TABLE "another_schema"."tpoly" ( "ogc_fid" SERIAL, CONSTRAINT "tpoly_pk" PRIMARY KEY ("ogc_fid") );""") == -1 or \ sql.find("""SELECT AddGeometryColumn""") != -1 or \ sql.find("""CREATE INDEX "tpoly_wkb_geometry_geom_idx""") != -1 or \ sql.find("""ALTER TABLE "another_schema"."tpoly" ADD COLUMN "area" FLOAT8;""") == -1 or \ sql.find("""ALTER TABLE "another_schema"."tpoly" ADD COLUMN "eas_id" INTEGER;""") == -1 or \ sql.find("""ALTER TABLE "another_schema"."tpoly" ADD COLUMN "prfedea" VARCHAR;""") == -1 or \ sql.find("""ALTER TABLE "another_schema"."tpoly" ADD COLUMN "shortname" VARCHAR(8);""") == -1 or \ sql.find("""COPY "another_schema"."tpoly" ("emptychar", "area", "eas_id", "prfedea", "shortname") FROM STDIN;""") == -1 or \ sql.find("""\\N 215229.266 168 35043411 \\N""") == -1 or \ sql.find(""" 5268.813 170 35043413 \\N""") == -1 or \ sql.find("""\\.""") == -1 or \ sql.find("""COMMIT;""") == -1 : print(sql) return 'fail' return 'success'
def test_vsigs_read_credentials_refresh_token_default_gdal_app(): if gdaltest.webserver_port == 0: pytest.skip() gdal.SetConfigOption('GS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('GS_ACCESS_KEY_ID', '') gdal.SetConfigOption( 'GOA2_AUTH_URL_TOKEN', 'http://localhost:%d/accounts.google.com/o/oauth2/token' % gdaltest.webserver_port) gdal.SetConfigOption('GS_OAUTH2_REFRESH_TOKEN', 'REFRESH_TOKEN') with gdaltest.error_handler(): assert gdal.GetSignedURL('/vsigs/foo/bar') is None gdal.VSICurlClearCache() handler = webserver.SequentialHandler() def method(request): content = request.rfile.read(int( request.headers['Content-Length'])).decode('ascii') if content != 'refresh_token=REFRESH_TOKEN&client_id=265656308688.apps.googleusercontent.com&client_secret=0IbTUDOYzaL6vnIdWTuQnvLz&grant_type=refresh_token': sys.stderr.write('Bad POST content: %s\n' % content) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') content = """{ "access_token" : "ACCESS_TOKEN", "token_type" : "Bearer", "expires_in" : 3600, }""" request.send_header('Content-Length', len(content)) request.end_headers() request.wfile.write(content.encode('ascii')) handler.add('POST', '/accounts.google.com/o/oauth2/token', custom_method=method) def method(request): if 'Authorization' not in request.headers: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return expected_authorization = 'Bearer ACCESS_TOKEN' if request.headers['Authorization'] != expected_authorization: sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization'])) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.end_headers() request.wfile.write("""foo""".encode('ascii')) handler.add('GET', '/gs_fake_bucket/resource', custom_method=method) with webserver.install_http_handler(handler): f = open_for_read('/vsigs/gs_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' gdal.SetConfigOption('GOA2_AUTH_URL_TOKEN', None) gdal.SetConfigOption('GS_OAUTH2_REFRESH_TOKEN', '')
def vsifile_5(): fp = gdal.VSIFOpenL('tmp/vsifile_5.bin', 'wb') ref_data = ''.join(['%08X' % i for i in range(5 * 32768)]) gdal.VSIFWriteL(ref_data, 1, len(ref_data), fp) gdal.VSIFCloseL(fp) gdal.SetConfigOption('VSI_CACHE', 'YES') for i in range(3): if i == 0: gdal.SetConfigOption('VSI_CACHE_SIZE', '0') elif i == 1: gdal.SetConfigOption('VSI_CACHE_SIZE', '65536') else: gdal.SetConfigOption('VSI_CACHE_SIZE', None) fp = gdal.VSIFOpenL('tmp/vsifile_5.bin', 'rb') gdal.VSIFSeekL(fp, 50000, 0) if gdal.VSIFTellL(fp) != 50000: gdaltest.post_reason('fail') gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) return 'fail' gdal.VSIFSeekL(fp, 50000, 1) if gdal.VSIFTellL(fp) != 100000: gdaltest.post_reason('fail') gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) return 'fail' gdal.VSIFSeekL(fp, 0, 2) if gdal.VSIFTellL(fp) != 5 * 32768 * 8: gdaltest.post_reason('fail') gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) return 'fail' gdal.VSIFReadL(1, 1, fp) gdal.VSIFSeekL(fp, 0, 0) data = gdal.VSIFReadL(1, 3 * 32768, fp) if data.decode('ascii') != ref_data[0:3 * 32768]: gdaltest.post_reason('fail') gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) return 'fail' gdal.VSIFSeekL(fp, 16384, 0) data = gdal.VSIFReadL(1, 5 * 32768, fp) if data.decode('ascii') != ref_data[16384:16384 + 5 * 32768]: gdaltest.post_reason('fail') gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) return 'fail' data = gdal.VSIFReadL(1, 50 * 32768, fp) if data[0:1130496].decode('ascii') != ref_data[16384 + 5 * 32768:]: gdaltest.post_reason('fail') gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) return 'fail' gdal.VSIFCloseL(fp) gdal.SetConfigOption('VSI_CACHE_SIZE', None) gdal.SetConfigOption('VSI_CACHE', None) gdal.Unlink('tmp/vsifile_5.bin') return 'success'
def test_vsigs_read_credentials_oauth2_service_account(): if gdaltest.webserver_port == 0: pytest.skip() gdal.SetConfigOption('GS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('GS_ACCESS_KEY_ID', '') # Generated with 'openssl genrsa -out rsa-openssl.pem 1024' and # 'openssl pkcs8 -nocrypt -in rsa-openssl.pem -inform PEM -topk8 -outform PEM -out rsa-openssl.pkcs8.pem' # DO NOT USE in production !!!! key = """-----BEGIN PRIVATE KEY----- MIICeAIBADANBgkqhkiG9w0BAQEFAASCAmIwggJeAgEAAoGBAOlwJQLLDG1HeLrk VNcFR5Qptto/rJE5emRuy0YmkVINT4uHb1be7OOo44C2Ev8QPVtNHHS2XwCY5gTm i2RfIBLv+VDMoVQPqqE0LHb0WeqGmM5V1tHbmVnIkCcKMn3HpK30grccuBc472LQ DVkkGqIiGu0qLAQ89JP/r0LWWySRAgMBAAECgYAWjsS00WRBByAOh1P/dz4kfidy TabiXbiLDf3MqJtwX2Lpa8wBjAc+NKrPXEjXpv0W3ou6Z4kkqKHJpXGg4GRb4N5I 2FA+7T1lA0FCXa7dT2jvgJLgpBepJu5b//tqFqORb4A4gMZw0CiPN3sUsWsSw5Hd DrRXwp6sarzG77kvZQJBAPgysAmmXIIp9j1hrFSkctk4GPkOzZ3bxKt2Nl4GFrb+ bpKSon6OIhP1edrxTz1SMD1k5FiAAVUrMDKSarbh5osCQQDwxq4Tvf/HiYz79JBg Wz5D51ySkbg01dOVgFW3eaYAdB6ta/o4vpHhnbrfl6VO9oUb3QR4hcrruwnDHsw3 4mDTAkEA9FPZjbZSTOSH/cbgAXbdhE4/7zWOXj7Q7UVyob52r+/p46osAk9i5qj5 Kvnv2lrFGDrwutpP9YqNaMtP9/aLnwJBALLWf9n+GAv3qRZD0zEe1KLPKD1dqvrj j+LNjd1Xp+tSVK7vMs4PDoAMDg+hrZF3HetSQM3cYpqxNFEPgRRJOy0CQQDQlZHI yzpSgEiyx8O3EK1iTidvnLXbtWabvjZFfIE/0OhfBmN225MtKG3YLV2HoUvpajLq gwE6fxOLyJDxuWRf -----END PRIVATE KEY----- """ for i in range(2): gdal.SetConfigOption( 'GO2A_AUD', 'http://localhost:%d/oauth2/v4/token' % gdaltest.webserver_port) gdal.SetConfigOption('GOA2_NOW', '123456') if i == 0: gdal.SetConfigOption('GS_OAUTH2_PRIVATE_KEY', key) else: gdal.FileFromMemBuffer('/vsimem/pkey', key) gdal.SetConfigOption('GS_OAUTH2_PRIVATE_KEY_FILE', '/vsimem/pkey') gdal.SetConfigOption('GS_OAUTH2_CLIENT_EMAIL', 'CLIENT_EMAIL') gdal.VSICurlClearCache() handler = webserver.SequentialHandler() def method(request): content = request.rfile.read(int( request.headers['Content-Length'])).decode('ascii') content_8080 = 'grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer&assertion=eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiAiQ0xJRU5UX0VNQUlMIiwgInNjb3BlIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvZGV2c3RvcmFnZS5yZWFkX3dyaXRlIiwgImF1ZCI6ICJodHRwOi8vbG9jYWxob3N0OjgwODAvb2F1dGgyL3Y0L3Rva2VuIiwgImlhdCI6IDEyMzQ1NiwgImV4cCI6IDEyNzA1Nn0%3D.DAhqWtBgKpObxZ%2BGiXqwF%2Fa4SS%2FNWQRhLCI7DYZCuOTuf2w7dL8j4CdpiwwzQg1diIus7dyViRfzpsFmuZKAXwL%2B84iBoVVqnJJZ4TgwH49NdfMAnc4Rgm%2Bo2a2nEcMjX%2FbQ3jRY%2B9WNVl96hzULGvLrVeyego2f06wivqmvxHA%3D' content_8081 = 'grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer&assertion=eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiAiQ0xJRU5UX0VNQUlMIiwgInNjb3BlIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvZGV2c3RvcmFnZS5yZWFkX3dyaXRlIiwgImF1ZCI6ICJodHRwOi8vbG9jYWxob3N0OjgwODEvb2F1dGgyL3Y0L3Rva2VuIiwgImlhdCI6IDEyMzQ1NiwgImV4cCI6IDEyNzA1Nn0%3D.0abOEg4%2FRApWTSeAs6YTHaNzdwOgZLm8DTMO2MKlOA%2Fiagyb4cBJxDpkD5gECPvi7qhkg7LsyFuj0a%2BK48Bsuj%2FgLHOU4MpB0dHwYnDO2UXzH%2FUPdgFCVak1P1V%2ByiDA%2B%2Ft4aDI5fD9qefKQiu3wsMDHzP71MNLzayrjqaqKKS4%3D' if content not in [content_8080, content_8081]: sys.stderr.write('Bad POST content: %s\n' % content) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') content = """{ "access_token" : "ACCESS_TOKEN", "token_type" : "Bearer", "expires_in" : 3600, }""" request.send_header('Content-Length', len(content)) request.end_headers() request.wfile.write(content.encode('ascii')) handler.add('POST', '/oauth2/v4/token', custom_method=method) def method(request): if 'Authorization' not in request.headers: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return expected_authorization = 'Bearer ACCESS_TOKEN' if request.headers['Authorization'] != expected_authorization: sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization'])) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.end_headers() request.wfile.write("""foo""".encode('ascii')) handler.add('GET', '/gs_fake_bucket/resource', custom_method=method) try: with webserver.install_http_handler(handler): f = open_for_read('/vsigs/gs_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) except: if gdal.GetLastErrorMsg().find( 'CPLRSASHA256Sign() not implemented') >= 0: pytest.skip() finally: gdal.SetConfigOption('GO2A_AUD', None) gdal.SetConfigOption('GO2A_NOW', None) gdal.SetConfigOption('GS_OAUTH2_PRIVATE_KEY', '') gdal.SetConfigOption('GS_OAUTH2_PRIVATE_KEY_FILE', '') gdal.SetConfigOption('GS_OAUTH2_CLIENT_EMAIL', '') assert data == 'foo' gdal.Unlink('/vsimem/pkey')
def jpeg_19(): for width, height, iX in [(32, 32, 12), (25, 25, 8), (24, 25, 8)]: src_ds = gdal.GetDriverByName('GTiff').Create('/vsimem/jpeg_19.tif', width, height, 1) src_ds.CreateMaskBand(gdal.GMF_PER_DATASET) src_ds.GetRasterBand(1).GetMaskBand().WriteRaster( 0, 0, iX, height, struct.pack('B' * 1, 255), 1, 1) src_ds.GetRasterBand(1).GetMaskBand().WriteRaster( iX, 0, width - iX, height, struct.pack('B' * 1, 0), 1, 1) tiff_mask_data = src_ds.GetRasterBand(1).GetMaskBand().ReadRaster( 0, 0, width, height) # Generate a JPEG file with a (default) LSB bit mask order out_ds = gdal.GetDriverByName('JPEG').CreateCopy( '/vsimem/jpeg_19.jpg', src_ds) out_ds = None # Generate a JPEG file with a MSB bit mask order gdal.SetConfigOption('JPEG_WRITE_MASK_BIT_ORDER', 'MSB') out_ds = gdal.GetDriverByName('JPEG').CreateCopy( '/vsimem/jpeg_19_msb.jpg', src_ds) del out_ds gdal.SetConfigOption('JPEG_WRITE_MASK_BIT_ORDER', None) src_ds = None # Check that the file are indeed different statBuf = gdal.VSIStatL('/vsimem/jpeg_19.jpg') f = gdal.VSIFOpenL('/vsimem/jpeg_19.jpg', 'rb') data1 = gdal.VSIFReadL(1, statBuf.size, f) gdal.VSIFCloseL(f) statBuf = gdal.VSIStatL('/vsimem/jpeg_19_msb.jpg') f = gdal.VSIFOpenL('/vsimem/jpeg_19_msb.jpg', 'rb') data2 = gdal.VSIFReadL(1, statBuf.size, f) gdal.VSIFCloseL(f) if (width, height, iX) == (24, 25, 8): if data1 != data2: gdaltest.post_reason('fail') return 'fail' else: if data1 == data2: gdaltest.post_reason('fail') return 'fail' # Check the file with the LSB bit mask order ds = gdal.Open('/vsimem/jpeg_19.jpg') jpg_mask_data = ds.GetRasterBand(1).GetMaskBand().ReadRaster( 0, 0, width, height) ds = None if tiff_mask_data != jpg_mask_data: gdaltest.post_reason('fail') return 'fail' # Check the file with the MSB bit mask order ds = gdal.Open('/vsimem/jpeg_19_msb.jpg') jpg_mask_data = ds.GetRasterBand(1).GetMaskBand().ReadRaster( 0, 0, width, height) ds = None if tiff_mask_data != jpg_mask_data: gdaltest.post_reason('fail') return 'fail' gdal.GetDriverByName('GTiff').Delete('/vsimem/jpeg_19.tif') gdal.GetDriverByName('JPEG').Delete('/vsimem/jpeg_19.jpg') gdal.GetDriverByName('JPEG').Delete('/vsimem/jpeg_19_msb.jpg') return 'success'
def test_vsigs_read_credentials_oauth2_service_account_json_file(): if gdaltest.webserver_port == 0: pytest.skip() gdal.SetConfigOption('GS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('GS_ACCESS_KEY_ID', '') gdal.FileFromMemBuffer( '/vsimem/service_account.json', """{ "private_key": "-----BEGIN PRIVATE KEY-----\nMIICeAIBADANBgkqhkiG9w0BAQEFAASCAmIwggJeAgEAAoGBAOlwJQLLDG1HeLrk\nVNcFR5Qptto/rJE5emRuy0YmkVINT4uHb1be7OOo44C2Ev8QPVtNHHS2XwCY5gTm\ni2RfIBLv+VDMoVQPqqE0LHb0WeqGmM5V1tHbmVnIkCcKMn3HpK30grccuBc472LQ\nDVkkGqIiGu0qLAQ89JP/r0LWWySRAgMBAAECgYAWjsS00WRBByAOh1P/dz4kfidy\nTabiXbiLDf3MqJtwX2Lpa8wBjAc+NKrPXEjXpv0W3ou6Z4kkqKHJpXGg4GRb4N5I\n2FA+7T1lA0FCXa7dT2jvgJLgpBepJu5b//tqFqORb4A4gMZw0CiPN3sUsWsSw5Hd\nDrRXwp6sarzG77kvZQJBAPgysAmmXIIp9j1hrFSkctk4GPkOzZ3bxKt2Nl4GFrb+\nbpKSon6OIhP1edrxTz1SMD1k5FiAAVUrMDKSarbh5osCQQDwxq4Tvf/HiYz79JBg\nWz5D51ySkbg01dOVgFW3eaYAdB6ta/o4vpHhnbrfl6VO9oUb3QR4hcrruwnDHsw3\n4mDTAkEA9FPZjbZSTOSH/cbgAXbdhE4/7zWOXj7Q7UVyob52r+/p46osAk9i5qj5\nKvnv2lrFGDrwutpP9YqNaMtP9/aLnwJBALLWf9n+GAv3qRZD0zEe1KLPKD1dqvrj\nj+LNjd1Xp+tSVK7vMs4PDoAMDg+hrZF3HetSQM3cYpqxNFEPgRRJOy0CQQDQlZHI\nyzpSgEiyx8O3EK1iTidvnLXbtWabvjZFfIE/0OhfBmN225MtKG3YLV2HoUvpajLq\ngwE6fxOLyJDxuWRf\n-----END PRIVATE KEY-----\n", "client_email": "CLIENT_EMAIL" }""") gdal.SetConfigOption('GOOGLE_APPLICATION_CREDENTIALS', '/vsimem/service_account.json') gdal.SetConfigOption( 'GO2A_AUD', 'http://localhost:%d/oauth2/v4/token' % gdaltest.webserver_port) gdal.SetConfigOption('GOA2_NOW', '123456') gdal.VSICurlClearCache() handler = webserver.SequentialHandler() def method(request): content = request.rfile.read(int( request.headers['Content-Length'])).decode('ascii') content_8080 = 'grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer&assertion=eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiAiQ0xJRU5UX0VNQUlMIiwgInNjb3BlIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvZGV2c3RvcmFnZS5yZWFkX3dyaXRlIiwgImF1ZCI6ICJodHRwOi8vbG9jYWxob3N0OjgwODAvb2F1dGgyL3Y0L3Rva2VuIiwgImlhdCI6IDEyMzQ1NiwgImV4cCI6IDEyNzA1Nn0%3D.DAhqWtBgKpObxZ%2BGiXqwF%2Fa4SS%2FNWQRhLCI7DYZCuOTuf2w7dL8j4CdpiwwzQg1diIus7dyViRfzpsFmuZKAXwL%2B84iBoVVqnJJZ4TgwH49NdfMAnc4Rgm%2Bo2a2nEcMjX%2FbQ3jRY%2B9WNVl96hzULGvLrVeyego2f06wivqmvxHA%3D' content_8081 = 'grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer&assertion=eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiAiQ0xJRU5UX0VNQUlMIiwgInNjb3BlIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvZGV2c3RvcmFnZS5yZWFkX3dyaXRlIiwgImF1ZCI6ICJodHRwOi8vbG9jYWxob3N0OjgwODEvb2F1dGgyL3Y0L3Rva2VuIiwgImlhdCI6IDEyMzQ1NiwgImV4cCI6IDEyNzA1Nn0%3D.0abOEg4%2FRApWTSeAs6YTHaNzdwOgZLm8DTMO2MKlOA%2Fiagyb4cBJxDpkD5gECPvi7qhkg7LsyFuj0a%2BK48Bsuj%2FgLHOU4MpB0dHwYnDO2UXzH%2FUPdgFCVak1P1V%2ByiDA%2B%2Ft4aDI5fD9qefKQiu3wsMDHzP71MNLzayrjqaqKKS4%3D' if content not in [content_8080, content_8081]: sys.stderr.write('Bad POST content: %s\n' % content) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') content = """{ "access_token" : "ACCESS_TOKEN", "token_type" : "Bearer", "expires_in" : 3600, }""" request.send_header('Content-Length', len(content)) request.end_headers() request.wfile.write(content.encode('ascii')) handler.add('POST', '/oauth2/v4/token', custom_method=method) def method(request): if 'Authorization' not in request.headers: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return expected_authorization = 'Bearer ACCESS_TOKEN' if request.headers['Authorization'] != expected_authorization: sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization'])) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.end_headers() request.wfile.write("""foo""".encode('ascii')) handler.add('GET', '/gs_fake_bucket/resource', custom_method=method) try: with webserver.install_http_handler(handler): f = open_for_read('/vsigs/gs_fake_bucket/resource') if f is None: gdal.Unlink('/vsimem/service_account.json') pytest.fail() data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) signed_url = gdal.GetSignedURL('/vsigs/gs_fake_bucket/resource', ['START_DATE=20180212T123456Z']) if signed_url not in ( 'http://127.0.0.1:8080/gs_fake_bucket/resource?Expires=1518442496&GoogleAccessId=CLIENT_EMAIL&Signature=b19I62KdqV51DpWGxhxGXLGJIA8MHvSJofwOygoeQuIxkM6PmmQFvJYTNWRt9zUVTUoVC0UHVB7ee5Z35NqDC8K4i0quu1hb8Js2B4h0W6OAupvyF3nSQ5D0OJmiSbomGMq0Ehyro5cqJ%2FU%2Fd8oAaKrGKVQScKfXoFrSJBbWkNs%3D', 'http://127.0.0.1:8081/gs_fake_bucket/resource?Expires=1518442496&GoogleAccessId=CLIENT_EMAIL&Signature=b19I62KdqV51DpWGxhxGXLGJIA8MHvSJofwOygoeQuIxkM6PmmQFvJYTNWRt9zUVTUoVC0UHVB7ee5Z35NqDC8K4i0quu1hb8Js2B4h0W6OAupvyF3nSQ5D0OJmiSbomGMq0Ehyro5cqJ%2FU%2Fd8oAaKrGKVQScKfXoFrSJBbWkNs%3D' ): gdal.Unlink('/vsimem/service_account.json') pytest.fail(signed_url) except: if gdal.GetLastErrorMsg().find( 'CPLRSASHA256Sign() not implemented') >= 0: pytest.skip() finally: gdal.SetConfigOption('GO2A_AUD', None) gdal.SetConfigOption('GO2A_NOW', None) gdal.SetConfigOption('GOOGLE_APPLICATION_CREDENTIALS', '') gdal.Unlink('/vsimem/service_account.json') assert data == 'foo'
def test_hfa_update_existing_aux_overviews(): gdal.SetConfigOption('USE_RRD', 'YES') ds = gdal.GetDriverByName('BMP').Create( 'tmp/hfa_update_existing_aux_overviews.bmp', 100, 100, 1) ds.GetRasterBand(1).Fill(255) ds = None # Create overviews ds = gdal.Open('tmp/hfa_update_existing_aux_overviews.bmp') ds.BuildOverviews('NEAR', overviewlist=[2, 4]) ds = None # Save overviews checksum ds = gdal.Open('tmp/hfa_update_existing_aux_overviews.bmp') cs_ovr0 = ds.GetRasterBand(1).GetOverview(0).Checksum() cs_ovr1 = ds.GetRasterBand(1).GetOverview(1).Checksum() # and regenerate them ds.BuildOverviews('NEAR', overviewlist=[2, 4]) ds = None ds = gdal.Open('tmp/hfa_update_existing_aux_overviews.bmp') # Check overviews checksum new_cs_ovr0 = ds.GetRasterBand(1).GetOverview(0).Checksum() new_cs_ovr1 = ds.GetRasterBand(1).GetOverview(1).Checksum() if cs_ovr0 != new_cs_ovr0: gdal.SetConfigOption('USE_RRD', None) pytest.fail() if cs_ovr1 != new_cs_ovr1: gdal.SetConfigOption('USE_RRD', None) pytest.fail() # and regenerate them twice in a row ds.BuildOverviews('NEAR', overviewlist=[2, 4]) ds.BuildOverviews('NEAR', overviewlist=[2, 4]) ds = None ds = gdal.Open('tmp/hfa_update_existing_aux_overviews.bmp') # Check overviews checksum new_cs_ovr0 = ds.GetRasterBand(1).GetOverview(0).Checksum() new_cs_ovr1 = ds.GetRasterBand(1).GetOverview(1).Checksum() if cs_ovr0 != new_cs_ovr0: gdal.SetConfigOption('USE_RRD', None) pytest.fail() if cs_ovr1 != new_cs_ovr1: gdal.SetConfigOption('USE_RRD', None) pytest.fail() # and regenerate them with an extra overview level ds.BuildOverviews('NEAR', overviewlist=[8]) ds = None ds = gdal.Open('tmp/hfa_update_existing_aux_overviews.bmp') # Check overviews checksum new_cs_ovr0 = ds.GetRasterBand(1).GetOverview(0).Checksum() new_cs_ovr1 = ds.GetRasterBand(1).GetOverview(1).Checksum() if cs_ovr0 != new_cs_ovr0: gdal.SetConfigOption('USE_RRD', None) pytest.fail() if cs_ovr1 != new_cs_ovr1: gdal.SetConfigOption('USE_RRD', None) pytest.fail() ds = None gdal.GetDriverByName('BMP').Delete( 'tmp/hfa_update_existing_aux_overviews.bmp') gdal.SetConfigOption('USE_RRD', None)
def wms_8(): if gdaltest.wms_drv is None: return 'skip' if gdaltest.metacarta_tms is not True: return 'skip' tms = """<GDAL_WMS> <Service name="TMS"> <ServerUrl>http://tilecache.osgeo.org/wms-c/Basic.py</ServerUrl> <Layer>basic</Layer> <Format>png</Format> </Service> <DataWindow> <UpperLeftX>-180.0</UpperLeftX> <UpperLeftY>90.0</UpperLeftY> <LowerRightX>180.0</LowerRightX> <LowerRightY>-90.0</LowerRightY> <TileLevel>19</TileLevel> <TileCountX>2</TileCountX> <TileCountY>1</TileCountY> </DataWindow> <Projection>EPSG:4326</Projection> <BlockSizeX>256</BlockSizeX> <BlockSizeY>256</BlockSizeY> <BandsCount>3</BandsCount> <Cache><Path>./tmp/gdalwmscache</Path></Cache> </GDAL_WMS>""" try: shutil.rmtree('tmp/gdalwmscache') except: pass ds = gdal.Open(tms) if ds is None: gdaltest.post_reason('open failed.') return 'fail' gdal.ErrorReset() data = ds.GetRasterBand(1).GetOverview(18).ReadRaster(0, 0, 512, 256) if gdal.GetLastErrorMsg() != '': if gdaltest.gdalurlopen( 'http://tilecache.osgeo.org/wms-c/Basic.py/1.0.0/basic/0/0/0.png' ) is None: return 'skip' ds = None expected_files = [ 'tmp/gdalwmscache/d/b/dbbfe17f22c9d54f2c45ec7dc5042bc8', 'tmp/gdalwmscache/5/4/548f0e98b56a8c104cfe2df9f7ef8685' ] for expected_file in expected_files: try: os.stat(expected_file) except: gdaltest.post_reason('%s should exist' % expected_file) return 'fail' # Now, we should read from the cache ds = gdal.Open(tms) cached_data = ds.GetRasterBand(1).GetOverview(18).ReadRaster( 0, 0, 512, 256) ds = None if data != cached_data: gdaltest.post_reason('data != cached_data') return 'fail' # Replace the cache with fake data for expected_file in expected_files: ds = gdal.GetDriverByName('GTiff').Create(expected_file, 256, 256, 4) ds.GetRasterBand(1).Fill(0) ds.GetRasterBand(2).Fill(0) ds.GetRasterBand(3).Fill(0) ds.GetRasterBand(4).Fill(255) ds = None # Read again from the cache, and check that it is actually used ds = gdal.Open(tms) cs = ds.GetRasterBand(1).GetOverview(18).Checksum() ds = None if cs != 0: gdaltest.post_reason('cs != 0') return 'fail' # Test with GDAL_DEFAULT_WMS_CACHE_PATH tms_nocache = """<GDAL_WMS> <Service name="TMS"> <ServerUrl>http://tilecache.osgeo.org/wms-c/Basic.py</ServerUrl> <Layer>basic</Layer> <Format>png</Format> </Service> <DataWindow> <UpperLeftX>-180.0</UpperLeftX> <UpperLeftY>90.0</UpperLeftY> <LowerRightX>180.0</LowerRightX> <LowerRightY>-90.0</LowerRightY> <TileLevel>19</TileLevel> <TileCountX>2</TileCountX> <TileCountY>1</TileCountY> </DataWindow> <Projection>EPSG:4326</Projection> <BlockSizeX>256</BlockSizeX> <BlockSizeY>256</BlockSizeY> <BandsCount>3</BandsCount> <Cache/> <!-- this is needed for GDAL_DEFAULT_WMS_CACHE_PATH to be triggered --> </GDAL_WMS>""" # Now, we should read from the cache gdal.SetConfigOption("GDAL_DEFAULT_WMS_CACHE_PATH", "./tmp/gdalwmscache") ds = gdal.Open(tms_nocache) cs = ds.GetRasterBand(1).GetOverview(18).Checksum() ds = None gdal.SetConfigOption("GDAL_DEFAULT_WMS_CACHE_PATH", None) if cs != 0: gdaltest.post_reason('cs != 0') return 'fail' return 'success'
def vsicurl_streaming_1(): try: drv = gdal.GetDriverByName( 'HTTP' ) except: drv = None if drv is None: return 'skip' gdal.SetConfigOption('GDAL_HTTP_CONNECTTIMEOUT', '5') fp = gdal.VSIFOpenL('/vsicurl_streaming/http://download.osgeo.org/gdal/data/usgsdem/cded/114p01_0100_deme.dem', 'rb') gdal.SetConfigOption('GDAL_HTTP_CONNECTTIMEOUT', None) if fp is None: if gdaltest.gdalurlopen('http://download.osgeo.org/gdal/data/usgsdem/cded/114p01_0100_deme.dem', timeout = 4) is None: print('cannot open URL') return 'skip' gdaltest.post_reason('fail') return 'fail' if gdal.VSIFTellL(fp) != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' data = gdal.VSIFReadL(1, 50, fp) if data.decode('ascii') != ' 114p01DEMe Base Ma': gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' if gdal.VSIFTellL(fp) != 50: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' gdal.VSIFSeekL(fp, 0, 0) if gdal.VSIFTellL(fp) != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' data = gdal.VSIFReadL(1, 50, fp) if data.decode('ascii') != ' 114p01DEMe Base Ma': gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' if gdal.VSIFTellL(fp) != 50: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' time.sleep(0.5) gdal.VSIFSeekL(fp, 2001, 0) data_2001 = gdal.VSIFReadL(1, 20, fp) if data_2001.decode('ascii') != '7-32767-32767-32767-': print(data_2001) gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' if gdal.VSIFTellL(fp) != 2001+20: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' gdal.VSIFSeekL(fp, 0, 2) if gdal.VSIFTellL(fp) != 9839616: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' nRet = len(gdal.VSIFReadL(1, 10, fp)) if nRet != 0: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' gdal.VSIFSeekL(fp, 2001, 0) data_2001_2 = gdal.VSIFReadL(1, 20, fp) if gdal.VSIFTellL(fp) != 2001+20: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' if data_2001 != data_2001_2: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' gdal.VSIFSeekL(fp, 1024 * 1024 + 100, 0) data = gdal.VSIFReadL(1, 20, fp) if data.decode('ascii') != '67-32767-32767-32767': print(data) gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' if gdal.VSIFTellL(fp) != 1024 * 1024 + 100+ 20: gdaltest.post_reason('fail') gdal.VSIFCloseL(fp) return 'fail' gdal.VSIFCloseL(fp) return 'success'
def eedai_gce_credentials(): if gdaltest.eedai_drv is None: return 'skip' if sys.platform not in ('linux', 'linux2', 'win32'): return 'skip' gdaltest.webserver_process = None gdaltest.webserver_port = 0 if not gdaltest.built_against_curl(): return 'skip' (gdaltest.webserver_process, gdaltest.webserver_port) = webserver.launch( handler=webserver.DispatcherHttpHandler) if gdaltest.webserver_port == 0: return 'skip' gdal.SetConfigOption( 'CPL_GCE_CREDENTIALS_URL', 'http://localhost:%d/computeMetadata/v1/instance/service-accounts/default/token' % gdaltest.webserver_port) # Disable hypervisor related check to test if we are really on EC2 gdal.SetConfigOption('CPL_GCE_CHECK_LOCAL_FILES', 'NO') gdal.VSICurlClearCache() def method(request): if 'Authorization' not in request.headers: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return expected_authorization = 'Bearer ACCESS_TOKEN' if request.headers['Authorization'] != expected_authorization: sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization'])) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.end_headers() request.wfile.write("""foo""".encode('ascii')) handler = webserver.SequentialHandler() handler.add( 'GET', '/computeMetadata/v1/instance/service-accounts/default/token', 200, {}, """{ "access_token" : "ACCESS_TOKEN", "token_type" : "Bearer", "expires_in" : 3600, }""") with webserver.install_http_handler(handler): gdal.SetConfigOption('EEDA_URL', '/vsimem/ee/') ds = gdal.Open('EEDAI:image') gdal.SetConfigOption('EEDA_URL', None) gdal.SetConfigOption('CPL_GCE_CREDENTIALS_URL', None) gdal.SetConfigOption('CPL_GCE_CHECK_LOCAL_FILES', None) webserver.server_stop(gdaltest.webserver_process, gdaltest.webserver_port) if ds is None: return 'fail' return 'success'
def test_vsigs_read_credentials_gce_expiration(): if gdaltest.webserver_port == 0: pytest.skip() if sys.platform not in ('linux', 'linux2', 'win32'): pytest.skip() gdal.SetConfigOption('CPL_GS_CREDENTIALS_FILE', '') gdal.SetConfigOption('GS_SECRET_ACCESS_KEY', '') gdal.SetConfigOption('GS_ACCESS_KEY_ID', '') gdal.SetConfigOption( 'CPL_GCE_CREDENTIALS_URL', 'http://localhost:%d/computeMetadata/v1/instance/service-accounts/default/token' % gdaltest.webserver_port) # Disable hypervisor related check to test if we are really on EC2 gdal.SetConfigOption('CPL_GCE_CHECK_LOCAL_FILES', 'NO') gdal.VSICurlClearCache() def method(request): if 'Authorization' not in request.headers: sys.stderr.write('Bad headers: %s\n' % str(request.headers)) request.send_response(403) return expected_authorization = 'Bearer ACCESS_TOKEN' if request.headers['Authorization'] != expected_authorization: sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization'])) request.send_response(403) return request.send_response(200) request.send_header('Content-type', 'text/plain') request.send_header('Content-Length', 3) request.end_headers() request.wfile.write("""foo""".encode('ascii')) handler = webserver.SequentialHandler() # First time is used when trying to establish if GCE authentication is available handler.add( 'GET', '/computeMetadata/v1/instance/service-accounts/default/token', 200, {}, """{ "access_token" : "ACCESS_TOKEN", "token_type" : "Bearer", "expires_in" : 0, }""") # Second time is needed because f the access to th file handler.add( 'GET', '/computeMetadata/v1/instance/service-accounts/default/token', 200, {}, """{ "access_token" : "ACCESS_TOKEN", "token_type" : "Bearer", "expires_in" : 0, }""") handler.add('GET', '/gs_fake_bucket/resource', custom_method=method) with webserver.install_http_handler(handler): f = open_for_read('/vsigs/gs_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' gdal.SetConfigOption('CPL_GCE_CREDENTIALS_URL', '') gdal.SetConfigOption('CPL_GCE_CHECK_LOCAL_FILES', None)
def eedai_geotiff(): if gdaltest.eedai_drv is None: return 'skip' gdal.FileFromMemBuffer( '/vsimem/ee/assets/image', json.dumps({ 'type': 'IMAGE', 'bands': [{ "id": "B1", "dataType": { "precision": "INTEGER", "range": { "max": 65535 } }, "grid": { "crsCode": "EPSG:32610", "affineTransform": { "translateX": 499980, "translateY": 4200000, "scaleX": 60, "scaleY": -60 }, "dimensions": { "width": 1830, "height": 1831 } } }] })) gdal.SetConfigOption('EEDA_BEARER', 'mybearer') gdal.SetConfigOption('EEDA_URL', '/vsimem/ee/') ds = gdal.Open('EEDAI:image') gdal.SetConfigOption('EEDA_URL', None) mem_ds = gdal.GetDriverByName('MEM').Create('', 256, 256, 1, gdal.GDT_UInt16) mem_ds.GetRasterBand(1).Fill(12345) gdal.GetDriverByName('GTiff').CreateCopy('/vsimem/out.tif', mem_ds) f = gdal.VSIFOpenL('/vsimem/out.tif', 'rb') data = gdal.VSIFReadL(1, 1000000, f) gdal.VSIFCloseL(f) gdal.Unlink('/vsimem/out.tif') gdal.FileFromMemBuffer( '/vsimem/ee/assets:getPixels&CUSTOMREQUEST=POST&POSTFIELDS={ "path": "image", "encoding": "GEO_TIFF", "bandIds": [ "B1" ], "grid": { "affineTransform": { "translateX": 499980.0, "translateY": 4200000.0, "scaleX": 60.0, "scaleY": -60.0, "shearX": 0.0, "shearY": 0.0 }, "dimensions": { "width": 256, "height": 256 } } }', data) got_data = ds.GetRasterBand(1).ReadRaster(0, 0, 1, 1) got_data = struct.unpack('H', got_data)[0] if got_data != 12345: gdaltest.post_reason('fail') print(got_data) return 'fail' ds = None gdal.SetConfigOption('EEDA_BEARER', None) return 'success'
def test_vsigs_cleanup(): for var in gdaltest.gs_vars: gdal.SetConfigOption(var, gdaltest.gs_vars[var])
def calculate(self, original_grid, original_school, tyson_polygon, temp_txt, new_grid, new_school, temp_result, end_result, n, field_school_name, field_school_zrs, field_grid_rks): global type_dict, driver, ref3 type_dict = { 1: ogr.wkbPoint, 2: ogr.wkbLineString, 3: ogr.wkbPolygon, 4: ogr.wkbMultiPoint, 5: ogr.wkbMultiLineString, 6: ogr.wkbMultiPolygon, 7: ogr.wkbGeometryCollection } driver = ogr.GetDriverByName('ESRI Shapefile') gdal.SetConfigOption("SHAPE_ENCODING", "") result = [] col = [] for i in range(int(n) - 2): print(i) if i == 0: mask = original_grid filename = original_school out_put = tyson_polygon path = temp_txt get_voronoi(mask, filename, out_put, path) filename_v1 = tyson_polygon dataSource1 = driver.Open(filename_v1, 0) if dataSource1 is None: print('File cannot be opened!') layer1 = dataSource1.GetLayer(0) type1 = layer1.GetGeomType() ref1 = layer1.GetSpatialRef() ts_df = self.get_fields(layer1) filename_v2 = original_school dataSource2 = driver.Open(filename_v2, 0) if dataSource1 is None: print('File cannot be opened!') layer2 = dataSource2.GetLayer(0) type2 = layer2.GetGeomType() ref2 = layer2.GetSpatialRef() xx_df = self.get_fields1(layer2) xx_df[field_school_zrs] = xx_df[field_school_zrs].astype(float) xx_df[field_school_zrs] = xx_df[field_school_zrs] * 1.29 ts_df.sort_values(by=['X', 'Y'], ascending=(True, True), inplace=True) ts_df['sort'] = [i for i in range(len(ts_df))] min_wkt = ts_df['WKT'].values[0] min_value = 0 min_name = '' col_xxdf = list(xx_df.columns) name_index = col_xxdf.index(field_school_name) value_index = col_xxdf.index(field_school_zrs) xx_values = xx_df.values.tolist() #print(xx_values) for row in range(len(xx_values)): if ogr.CreateGeometryFromWkt(min_wkt).Contains( ogr.CreateGeometryFromWkt(xx_values[row][-1]) ) or ogr.CreateGeometryFromWkt(min_wkt).Intersects( ogr.CreateGeometryFromWkt(xx_values[row][-1])): min_value = float(xx_values[row][value_index]) min_name = xx_values[row][name_index] break filename_v3 = original_grid dataSource3 = driver.Open(filename_v3, 0) if dataSource3 is None: print('File cannot be opened!') layer3 = dataSource3.GetLayer(0) type3 = layer3.GetGeomType() ref3 = layer3.GetSpatialRef() wg_df = self.get_fields1(layer3) wg_dfcopy = wg_df.copy() wg_df[field_grid_rks] = wg_df[field_grid_rks].fillna(0) wg_df[field_grid_rks] = wg_df[field_grid_rks].replace('', 0) wg_df[field_grid_rks] = wg_df[field_grid_rks].astype(float) wg_values = wg_df.values.tolist() flag = [] for row in range(len(wg_values)): if ogr.CreateGeometryFromWkt(min_wkt).Intersects( ogr.CreateGeometryFromWkt(wg_values[row][-1])): flag.append(1) else: flag.append(0) wg_df['flag'] = flag select_1 = wg_df[wg_df['flag'] > 0] print('min_value RKS') print(min_value) print(select_1[field_grid_rks].sum()) print(select_1.columns) print(min_value) print(type(select_1[field_grid_rks].sum())) select_1.sort_values(by=['Y', 'X'], ascending=(True, True), inplace=True) select_1_values = select_1.values.tolist() select_1_col = list(select_1.columns) select_1_vindex = select_1_col.index(field_grid_rks) s = 0.0 print(select_1.columns) flag_xx = [] for row in range(len(select_1_values)): if s <= int(min_value - min_value * 0.1): flag_xx.append(min_name) s = s + select_1_values[row][select_1_vindex] else: flag_xx.append(0) select_1['flag_xx'] = flag_xx select_2 = select_1[select_1['flag_xx'] == min_name] tem_wkt = select_2['WKT'] del select_2['WKT'] select_2['WKT'] = tem_wkt col = list(select_2.columns) list1 = select_2.values.tolist() for a in list1: result.append(a) rest_xx = xx_df[~xx_df[field_school_name].isin([min_name])] tem_wkt = rest_xx['WKT'] del rest_xx['WKT'] rest_xx['WKT'] = tem_wkt del rest_xx['X'] del rest_xx['Y'] self.get_shp(rest_xx, new_school, type2, ref3) rest_wg = wg_df[~wg_df['WKT'].isin(select_2['WKT'].values. tolist())] tem_wkt = rest_wg['WKT'] del rest_wg['WKT'] rest_wg['WKT'] = tem_wkt del rest_wg['X'] del rest_wg['Y'] self.get_shp(rest_wg, new_grid, type3, ref3) print(min_name) dataSource1.Destroy() dataSource2.Destroy() dataSource3.Destroy() else: mask = new_grid filename = new_school out_put = tyson_polygon # path = temp_txt get_voronoi(mask, filename, out_put, path) filename_v1 = tyson_polygon dataSource1 = driver.Open(filename_v1, 0) if dataSource1 is None: print('File cannot be opened!') layer1 = dataSource1.GetLayer(0) type1 = layer1.GetGeomType() ref1 = layer1.GetSpatialRef() ts_df = self.get_fields(layer1) filename_v2 = new_school dataSource2 = driver.Open(filename_v2, 0) if dataSource1 is None: print('File cannot be opened!') layer2 = dataSource2.GetLayer(0) type2 = layer2.GetGeomType() ref2 = layer2.GetSpatialRef() xx_df = self.get_fields1(layer2) xx_df[field_school_zrs] = xx_df[field_school_zrs].astype(float) # xx_df['ZRS'] = xx_df['ZRS'] * 1.2 ts_df.sort_values(by=['X', 'Y'], ascending=(True, True), inplace=True) ts_df['sort'] = [i for i in range(len(ts_df))] min_wkt = ts_df['WKT'].values[0] min_value = 0 min_name = '' col_xxdf = list(xx_df.columns) name_index = col_xxdf.index(field_school_name) value_index = col_xxdf.index(field_school_zrs) xx_values = xx_df.values.tolist() print(xx_values) for row in range(len(xx_values)): if ogr.CreateGeometryFromWkt(min_wkt).Contains( ogr.CreateGeometryFromWkt(xx_values[row][-1]) ) or ogr.CreateGeometryFromWkt(min_wkt).Intersects( ogr.CreateGeometryFromWkt(xx_values[row][-1])): min_value = xx_values[row][value_index] min_name = xx_values[row][name_index] break filename_v3 = new_grid dataSource3 = driver.Open(filename_v3, 0) if dataSource3 is None: print('File cannot be opened!') layer3 = dataSource3.GetLayer(0) type3 = layer3.GetGeomType() ref3 = layer3.GetSpatialRef() wg_df = self.get_fields1(layer3) wg_dfcopy = wg_df.copy() wg_df[field_grid_rks] = wg_df[field_grid_rks].fillna(0) wg_df[field_grid_rks] = wg_df[field_grid_rks].replace('', 0) wg_df[field_grid_rks] = wg_df[field_grid_rks].astype(float) wg_values = wg_df.values.tolist() flag = [] sum_RKS = 0.0 for row in range(len(wg_values)): if ogr.CreateGeometryFromWkt(min_wkt).Intersects( ogr.CreateGeometryFromWkt(wg_values[row][-1])): flag.append(1) else: flag.append(0) wg_df['flag'] = flag select_1 = wg_df[wg_df['flag'] > 0] sum_RKS = select_1[field_grid_rks].sum() boder = 0.001 while sum_RKS < min_value - min_value * 0.1: print('while') print(min_value, sum_RKS) flag1 = [] wg_df['flag'] = None min_wkt = ogr.CreateGeometryFromWkt(min_wkt).Buffer( boder).ExportToWkt() for row in range(len(wg_values)): if ogr.CreateGeometryFromWkt(min_wkt).Intersects( ogr.CreateGeometryFromWkt(wg_values[row][-1])): flag1.append(1) else: flag1.append(0) wg_df['flag'] = flag1 select_1 = wg_df[wg_df['flag'] > 0] sum_RKS = select_1[field_grid_rks].sum() boder = boder + 0.0001 print('min_value RKS') print(min_value) print(select_1[field_grid_rks].sum()) print(select_1.columns) print(min_value) print(type(select_1[field_grid_rks].sum())) select_1.sort_values(by=['Y', 'X'], ascending=(True, True), inplace=True) select_1_values = select_1.values.tolist() select_1_col = list(select_1.columns) select_1_vindex = select_1_col.index(field_grid_rks) s = 0.0 print(select_1.columns) flag_xx = [] print('min_value') print(min_value) print(type(min_value)) for row in range(len(select_1_values)): if s <= min_value: flag_xx.append(min_name) s = s + select_1_values[row][select_1_vindex] else: flag_xx.append(0) select_1['flag_xx'] = flag_xx select_2 = select_1[select_1['flag_xx'] == min_name] tem_wkt = select_2['WKT'] del select_2['WKT'] select_2['WKT'] = tem_wkt list1 = select_2.values.tolist() for a in list1: result.append(a) rest_xx = xx_df[~xx_df[field_school_name].isin([min_name])] tem_wkt = rest_xx['WKT'] del rest_xx['WKT'] rest_xx['WKT'] = tem_wkt del rest_xx['X'] del rest_xx['Y'] dataSource2.Destroy() self.get_shp(rest_xx, new_school, type2, ref3) rest_wg = wg_df[~wg_df['WKT'].isin(select_2['WKT'].values. tolist())] tem_wkt = rest_wg['WKT'] del rest_wg['WKT'] rest_wg['WKT'] = tem_wkt del rest_wg['X'] del rest_wg['Y'] dataSource3.Destroy() self.get_shp(rest_wg, new_grid, type3, ref3) dataSource1.Destroy() print(min_name) result_end1 = pd.DataFrame(None, columns=col) result_end1 = pd.DataFrame(result, columns=col) self.get_shp(result_end1, temp_result, type_dict[1], ref3) result_end = pd.DataFrame(result, columns=col) del result_end['flag'] # REST filename_v4 = new_school dataSource4 = driver.Open(filename_v4, 0) if dataSource4 is None: print('File cannot be opened!') layer4 = dataSource4.GetLayer(0) type4 = layer4.GetGeomType() ref4 = layer4.GetSpatialRef() xx_df = self.get_fields1(layer4) xx_df[field_school_zrs] = xx_df[field_school_zrs].astype(float) xx_df.sort_values(by=['X', 'Y'], ascending=(True, True), inplace=True) filename_v5 = new_grid dataSource5 = driver.Open(filename_v5, 0) if dataSource5 is None: print('File cannot be opened!') layer5 = dataSource5.GetLayer(0) type5 = layer5.GetGeomType() ref5 = layer5.GetSpatialRef() wg_df = self.get_fields1(layer5) wg_df[field_grid_rks] = wg_df[field_grid_rks].fillna(0) wg_df[field_grid_rks] = wg_df[field_grid_rks].replace('', 0) wg_df[field_grid_rks] = wg_df[field_grid_rks].astype(float) xx_xy = xx_df[['X', 'Y', field_school_name, field_school_zrs]].values.tolist() all_list = [] # ONE dis = [] WG_XY = wg_df[['X', 'Y', field_grid_rks]].values.tolist() for loc in WG_XY: dis1 = self.getDistance(loc[1], loc[0], xx_xy[0][1], xx_xy[0][0]) dis.append(dis1) wg_df['dis'] = dis wg_df.sort_values(by=['dis'], ascending=True, inplace=True) wg_dis = wg_df['dis'].values.tolist() WG_XY = wg_df[['X', 'Y', field_grid_rks]].values.tolist() flag1 = [] sum_RKS = 0 for row in range(len(wg_dis)): if sum_RKS <= xx_xy[0][3]: sum_RKS = sum_RKS + WG_XY[row][2] flag1.append(xx_xy[0][2]) else: flag1.append(0) wg_df['flag_xx'] = flag1 select_11 = wg_df[wg_df['flag_xx'] == xx_xy[0][2]] wg_df1 = wg_df[~wg_df['flag_xx'].isin(select_11['flag_xx'].values. tolist())] # TWO dis2 = [] WG_XY1 = wg_df1[['X', 'Y', field_grid_rks]].values.tolist() for loc in WG_XY1: dis1 = self.getDistance(loc[1], loc[0], xx_xy[1][1], xx_xy[1][0]) dis2.append(dis1) wg_df1['dis'] = dis2 wg_df1.sort_values(by=['dis'], ascending=True, inplace=True) wg_dis1 = wg_df1['dis'].values.tolist() flag2 = [] sum_RKS2 = 0 WG_XY2 = wg_df1[['X', 'Y', field_grid_rks]].values.tolist() print(WG_XY2) for row in range(len(wg_dis1)): if sum_RKS2 <= xx_xy[1][3]: sum_RKS2 = sum_RKS2 + WG_XY2[row][2] flag2.append(xx_xy[1][2]) else: flag2.append('rest_net') wg_df1['flag_xx'] = flag2 select_22 = wg_df1 for a in select_11.values.tolist(): all_list.append(a) for b in select_22.values.tolist(): all_list.append(b) result1 = pd.DataFrame(all_list, columns=list(select_11.columns)) del result1['dis'] del result1['flag'] tem_wkt = result1['WKT'] del result1['WKT'] result1['WKT'] = tem_wkt all_result = pd.concat([result_end, result1], axis=0) X = [ ogr.CreateGeometryFromWkt(g).Centroid().GetX() for g in all_result['WKT'].values.tolist() ] Y = [ ogr.CreateGeometryFromWkt(g).Centroid().GetY() for g in all_result['WKT'].values.tolist() ] all_result['X'] = X all_result['Y'] = Y self.get_shp(all_result, end_result, type_dict[1], ref3) end = datetime.datetime.now() print("start_time:" + start.strftime("%Y-%m-%d %H:%M:%S")) print("end_time:" + end.strftime("%Y-%m-%d %H:%M:%S"))
def test_vsigs_2(): if gdaltest.webserver_port == 0: pytest.skip() # header file gdal.FileFromMemBuffer('/vsimem/my_headers.txt', 'foo: bar') handler = webserver.SequentialHandler() handler.add('GET', '/gs_fake_bucket_http_header_file/resource', 200, {'Content-type': 'text/plain'}, 'Y', expected_headers={'foo': 'bar'}) with webserver.install_http_handler(handler): with gdaltest.config_option('GDAL_HTTP_HEADER_FILE', '/vsimem/my_headers.txt'): f = open_for_read( '/vsigs/gs_fake_bucket_http_header_file/resource') assert f is not None data = gdal.VSIFReadL(1, 1, f) gdal.VSIFCloseL(f) assert len(data) == 1 gdal.Unlink('/vsimem/my_headers.txt') gdal.SetConfigOption('GS_SECRET_ACCESS_KEY', 'GS_SECRET_ACCESS_KEY') gdal.SetConfigOption('GS_ACCESS_KEY_ID', 'GS_ACCESS_KEY_ID') gdal.SetConfigOption('CPL_GS_TIMESTAMP', 'my_timestamp') signed_url = gdal.GetSignedURL('/vsigs/gs_fake_bucket/resource', ['START_DATE=20180212T123456Z']) assert (signed_url in ( 'http://127.0.0.1:8080/gs_fake_bucket/resource?Expires=1518442496&GoogleAccessId=GS_ACCESS_KEY_ID&Signature=xTphUyMqtKA6UmAX3PEr5VL3EOg%3D', 'http://127.0.0.1:8081/gs_fake_bucket/resource?Expires=1518442496&GoogleAccessId=GS_ACCESS_KEY_ID&Signature=xTphUyMqtKA6UmAX3PEr5VL3EOg%3D' )) handler = webserver.SequentialHandler() handler.add('GET', '/gs_fake_bucket/resource', 200, {'Content-type': 'text/plain'}, 'foo', expected_headers={ 'Authorization': 'GOOG1 GS_ACCESS_KEY_ID:8tndu9//BfmN+Kg4AFLdUMZMBDQ=' }) with webserver.install_http_handler(handler): f = open_for_read('/vsigs/gs_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' handler = webserver.SequentialHandler() handler.add('GET', '/gs_fake_bucket/resource', 200, {'Content-type': 'text/plain'}, 'foo', expected_headers={ 'Authorization': 'GOOG1 GS_ACCESS_KEY_ID:8tndu9//BfmN+Kg4AFLdUMZMBDQ=' }) with webserver.install_http_handler(handler): f = open_for_read('/vsigs_streaming/gs_fake_bucket/resource') assert f is not None data = gdal.VSIFReadL(1, 4, f).decode('ascii') gdal.VSIFCloseL(f) assert data == 'foo' handler = webserver.SequentialHandler() handler.add('GET', '/gs_fake_bucket/resource2.bin', 206, {'Content-Range': 'bytes 0-0/1000000'}, 'x') with webserver.install_http_handler(handler): stat_res = gdal.VSIStatL('/vsigs/gs_fake_bucket/resource2.bin') if stat_res is None or stat_res.size != 1000000: if stat_res is not None: print(stat_res.size) else: print(stat_res) pytest.fail() handler = webserver.SequentialHandler() handler.add('HEAD', '/gs_fake_bucket/resource2.bin', 200, {'Content-Length': 1000000}) with webserver.install_http_handler(handler): stat_res = gdal.VSIStatL( '/vsigs_streaming/gs_fake_bucket/resource2.bin') if stat_res is None or stat_res.size != 1000000: if stat_res is not None: print(stat_res.size) else: print(stat_res) pytest.fail()
def main(): nbr_samples_in_total = 1e8 nbr_parallel = 8 pool = Pool(processes=nbr_parallel) nbr_samples_per_worker = nbr_samples_in_total / nbr_parallel print("Making {}".format(nbr_samples_per_worker)) nbr_trials_per_process = [nbr_samples_per_worker] * nbr_parallel t1 = time.time() nbr_in_unit = pool.map(estimate_nbr_points_in_quarter_circle, nbr_trials_per_process) pi_estimate = sum(nbr_in_unit) * 4 / nbr_samples_in_total print("estimate pi {} ".format(pi_estimate)) print("Delta:{}".format(time.time() - t1)) return None if __name__ == '__main__': # 支持中文路径 gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES") # 支持中文属性字段 gdal.SetConfigOption("SHAPE_ENCODING", "GBK") # 注册所有ogr驱动 ogr.RegisterAll() # 注册所有gdal驱动 gdal.AllRegister() start_time = time.time() main() end_time = time.time() print("time: %.4f secs." % (end_time - start_time))
print('List of GDAL Autotest modules') for test in all_test_list: print('* ' + test) sys.exit(0) elif arg == '-run_as_external': run_as_external = True elif arg == '-h' or arg[0] == '-': print('Usage: ' + sys.argv[0] + ' [OPTION]') print('\t<tests> - list of test modules to run, run all if none specified') print('\t-l - list available test modules') print('\t-h - print this usage message') print('\t-run_as_external - run each test script in a dedicated Python instance') sys.exit(0) else: test_list.append( arg ) if len(test_list) == 0: test_list = all_test_list # we set ECW to not resolve projection and datum strings to get 3.x behavior. gdal.SetConfigOption("ECW_DO_NOT_RESOLVE_DATUM_PROJECTION", "YES") gdaltest.setup_run( 'gdalautotest_all' ) gdaltest.run_all( test_list, run_as_external = run_as_external ) errors = gdaltest.summarize() sys.exit( errors )