def ogr_vrt_9(): if gdaltest.vrt_ds is None: return 'skip' lyr = gdaltest.vrt_ds.GetLayerByName('test3') lyr.SetAttributeFilter('other = "Second"') lyr.ResetReading() feat = lyr.GetNextFeature() if feat.GetField('other') != 'Second': gdaltest.post_reason('attribute filter did not work.') return 'fail' feat.Destroy() sub_ds = ogr.OpenShared('data/flat.dbf') sub_layer = sub_ds.GetLayerByName('flat') sub_layer.ResetReading() if sub_layer.GetFeatureCount() != 1: print(sub_layer.GetFeatureCount()) gdaltest.post_reason('attribute filter not passed to sublayer.') return 'fail' lyr.SetAttributeFilter(None) sub_ds.Release() sub_ds = None return 'success'
def ogr_index_2(): drv = ogr.GetDriverByName('ESRI Shapefile') gdaltest.s_ds = drv.CreateDataSource('join_t.dbf') gdaltest.s_lyr = gdaltest.s_ds.CreateLayer('join_t', geom_type=ogr.wkbNone) ogrtest.quick_create_layer_def(gdaltest.s_lyr, [('SKEY', ogr.OFTInteger), ('VALUE', ogr.OFTString, 16)]) for i in range(20): ogrtest.quick_create_feature(gdaltest.s_lyr, [i, 'Value ' + str(i)], None) if gdaltest.s_lyr.GetFeatureCount() != 20: gdaltest.post_reason('FeatureCount wrong') return 'failure' gdaltest.s_ds.Release() gdaltest.s_lyr = None gdaltest.s_ds = None gdaltest.s_ds = ogr.OpenShared('join_t.dbf', update=1) gdaltest.s_lyr = gdaltest.s_ds.GetLayerByName('join_t') return 'success'
def grid_generation(shp_studyarea, pixel_width, pixel_height): ''' 生成插值格网 :param shp_studyarea: 结果范围 :param pixel_width: 像素宽度 :param pixel_height: 像素高度 :return: 格网的横纵坐标数组 ''' ds = ogr.OpenShared(shp_studyarea) layer = ds.GetLayer() # feat = layer.GetFeature(0) # geom = feat.GetGeometryRef().ExportToWkt() # init_proj = layer.GetSpatialRef().ExportToWkt() # coordinate system transformation # geom_proj = GeomTrans(init_proj, proj).transform_geom(geom) # minx, maxx, miny, maxy = geom_proj.GetEnvelope() extent = layer.GetExtent( ) # (364541.09976471704, 542955.54090274, 4365990.607469364, 4545397.762843872)minx, maxx, miny, maxy minx = extent[0] maxx = extent[1] miny = extent[2] maxy = extent[3] # Creates the kriged grid and the variance grid # grid of points, on a masked rectangular grid of points, or with arbitrary points gridx = np.arange(minx, maxx, pixel_width) gridy = np.arange(miny, maxy, pixel_height) return gridx, gridy
def walkall(walkloc, dsfileout, layfileout): # The following counters will be used for generating # unique datasource and layer ids (i.e. primary keys) dscounter = 0 layercounter = 0 # Check if the user wants to comment the first/header line in the output # If so, use the character given in the 3rd argument if len(sys.argv) > 3: headercmt = sys.argv[3] else: headercmt = '' # Open up the output files dsfileout.write(''.join([headercmt, 'dsid|datasource|format|layercount\n'])) layfileout.write(''.join([ headercmt, 'layerid|dsid|datasource|format|layernumber|layername|featurecount|extent\n' ])) # Start walking through all the folders in specified path for walkdirs in walkloc: topdir = walkdirs[0] dirs = walkdirs[1] files = walkdirs[2] for walkdir in dirs: currentpath = os.path.join(topdir, walkdir) if (checkds(currentpath)): ds = ogr.OpenShared(currentpath) dscounter += 1 try: dsdetails, dslcount = getdsdetails(currentpath, ds) except: print "** unable to get dataset details for", currentpath dsfileout.write('|'.join( [str(dscounter), str(dsdetails), str(dslcount)])) dsfileout.write('\n') for laynum in range(dslcount): layercounter += 1 layfileout.write('|'.join([ str(layercounter), str(dscounter), str(getlayerdetails(currentpath, laynum, ds)) ])) layfileout.write('\n') for walkfile in files: currentfile = os.path.join(currentpath, walkfile) if (checkds(currentfile)): layercounter += 1 layfileout.write('|'.join([ str(layercounter), str(dscounter), str(getlayerdetails(currentfile, laynum, ds)) ])) layfileout.write('\n')
def ogr_refcount_1(): if ogr.GetOpenDSCount() != 0: gdaltest.post_reason('Initial Open DS count is not zero!') return 'failed' gdaltest.ds_1 = ogr.OpenShared('data/idlink.dbf') gdaltest.ds_2 = ogr.OpenShared('data/poly.shp') if ogr.GetOpenDSCount() != 2: gdaltest.post_reason('Open DS count not 2 after shared opens.') return 'failed' if gdaltest.ds_1.GetRefCount() != 1 or gdaltest.ds_2.GetRefCount() != 1: gdaltest.post_reason('Reference count not 1 on one of datasources.') return 'failed' return 'success'
def ogr_index_6(): gdaltest.s_ds.Release() gdaltest.s_ds = ogr.OpenShared('join_t.dbf', update=1) gdaltest.s_lyr = gdaltest.s_ds.GetLayerByName('join_t') gdaltest.s_lyr.SetAttributeFilter('VALUE="Value 5"') expect = [5] tr = ogrtest.check_features_against_list(gdaltest.s_lyr, 'SKEY', expect) if tr: return 'success' else: return 'fail'
def utm_grid_into_es(utm_shp, zone): utm_ds = ogr.OpenShared(utm_shp) if utm_ds is None: print("shapefile not exists:", utm_shp) return utm_layer = utm_ds.GetLayerByIndex(0) wgs_proj4 = "+proj=longlat +datum=WGS84 +no_defs " wgs_sr = osr.SpatialReference() wgs_sr.SetFromUserInput(wgs_proj4) utm_sr = utm_layer.GetSpatialRef() utm_proj4 = utm_sr.ExportToProj4() transform = osr.CoordinateTransformation(utm_sr, wgs_sr) # utm_objs = {} for utm_feat in utm_layer: utm_feat_json = utm_feat.ExportToJson(as_object=True) geom_properties = utm_feat_json['properties'] gridid_ = geom_properties['GridID'] utm_geom = utm_feat.geometry() wgs_geom = utm_geom.Clone() wgs_geom.Transform(transform) utm_geom_json = json.loads(utm_geom.ExportToJson()) utm_geom_json["coordinates"][0].append( utm_geom_json["coordinates"][0][0]) wgs_geom_json = json.loads(wgs_geom.ExportToJson()) wgs_geom_json["coordinates"][0].append( wgs_geom_json["coordinates"][0][0]) geojson_l = {} geojson_l["zone"] = zone geojson_l['wgs_crs'] = wgs_proj4 geojson_l['utm_crs'] = utm_proj4 geojson_l['gridid'] = gridid_ geojson_l["wgs_geometry"] = wgs_geom_json geojson_l["utm_geometry"] = utm_geom_json print(geojson_l) es.index(index=index_name, doc_type="grids", body=geojson_l)
def ogr_index_1(): import gdal gdal.PushErrorHandler('CPLQuietErrorHandler') try: ogr.GetDriverByName('MapInfo File').DeleteDataSource('index_p.mif') except: pass try: ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('join_t.dbf') except: pass gdal.PopErrorHandler() drv = ogr.GetDriverByName('MapInfo File') gdaltest.p_ds = drv.CreateDataSource('index_p.mif') gdaltest.p_lyr = gdaltest.p_ds.CreateLayer('index_p') ogrtest.quick_create_layer_def(gdaltest.p_lyr, [('PKEY', ogr.OFTInteger)]) ogrtest.quick_create_feature(gdaltest.p_lyr, [5], None) ogrtest.quick_create_feature(gdaltest.p_lyr, [10], None) ogrtest.quick_create_feature(gdaltest.p_lyr, [9], None) ogrtest.quick_create_feature(gdaltest.p_lyr, [4], None) ogrtest.quick_create_feature(gdaltest.p_lyr, [3], None) ogrtest.quick_create_feature(gdaltest.p_lyr, [1], None) # It turns out mapinfo format doesn't allow GetFeatureCount() calls while # writing ... it just blows an assert! # if gdaltest.p_lyr.GetFeatureCount() != 7: # gdaltest.post_reason( 'FeatureCount wrong' ) # return 'failure' # Close and reopen, since it seems the .mif driver does not allow reading # from a newly created (updatable) file. gdaltest.p_ds.Destroy() gdaltest.p_ds = ogr.OpenShared('index_p.mif', update=0) gdaltest.p_lyr = gdaltest.p_ds.GetLayerByName('index_p') return 'success'
def ogr_refcount_2(): ds_3 = ogr.OpenShared('data/idlink.dbf') if ogr.GetOpenDSCount() != 2: gdaltest.post_reason('Open DS count not 2 after third open.') return 'failed' # This test only works with the old bindings. try: if ds_3._o != gdaltest.ds_1._o: gdaltest.post_reason('We did not get the expected pointer.') return 'failed' except: pass if ds_3.GetRefCount() != 2: gdaltest.post_reason('Refcount not 2 after reopened.') return 'failed' gdaltest.ds_3 = ds_3 return 'success'
def ogr_index_9(): gdaltest.s_ds.ExecuteSQL('DROP INDEX ON join_t USING value') gdaltest.s_ds.ExecuteSQL('DROP INDEX ON join_t USING skey') gdaltest.s_lyr.SetAttributeFilter('SKEY = 5') expect = ['Value 5'] tr = ogrtest.check_features_against_list(gdaltest.s_lyr, 'VALUE', expect) if not tr: return 'fail' gdaltest.s_ds.Release() # After dataset closing, check that the index files do not exist after # dropping the index for filename in ['join_t.idm', 'join_t.ind']: try: os.stat(filename) gdaltest.post_reason("%s shouldn't exist" % filename) return 'fail' except: pass # Re-create an index gdaltest.s_ds = ogr.OpenShared('join_t.dbf', update=1) gdaltest.s_ds.ExecuteSQL('CREATE INDEX ON join_t USING value') gdaltest.s_ds.Release() for filename in ['join_t.idm', 'join_t.ind']: try: os.stat(filename) except: gdaltest.post_reason("%s should exist" % filename) return 'fail' pass f = open('join_t.idm', 'rt') xml = f.read() f.close() if xml.find('VALUE') == -1: gdaltest.post_reason('VALUE column is not indexed (1)') print(xml) return 'fail' # Close the dataset and re-open gdaltest.s_ds = ogr.OpenShared('join_t.dbf', update=1) # At this point the .ind was opened in read-only. Now it # will be re-opened in read-write mode gdaltest.s_ds.ExecuteSQL('CREATE INDEX ON join_t USING skey') gdaltest.s_ds.Release() f = open('join_t.idm', 'rt') xml = f.read() f.close() if xml.find('VALUE') == -1: gdaltest.post_reason('VALUE column is not indexed (2)') print(xml) return 'fail' if xml.find('SKEY') == -1: gdaltest.post_reason('SKEY column is not indexed (2)') print(xml) return 'fail' return 'success'
def checkds(filepath): try: ds = ogr.OpenShared(filepath) return True except ogr.OGRError: return False
# out_bytes = pickle.dumps(ret) # return out_bytes, "bytes" if __name__ == '__main__': es_host = "10.0.138.156" es_port = 9200 ltq = LandsatTilesQuery(es_host, es_port) start_time = "20010117" end_time = "20150101" # test1: temporal_polygon query of landsat tiles and corresponding data value import ogr wgs_grids = "/mnt/win/L45grids/wgs_grid_50.shp" ds = ogr.OpenShared(wgs_grids) layer = ds.GetLayerByIndex(0) feat = layer.GetFeature(476) wgs_geometry = feat.GetGeometryRef() res = ltq.query_by_geom(wgs_geometry, start_time, end_time) tile_list = ltq.tile_list_from_es_res(res) data = ltq.read_by_geom(wgs_geometry, tile_list[1]) # test2: temporal_point query of landsat tiles and corresponding data value x = 115.514253 y = 23.056427 res = ltq.query_by_point(x, y, start_time, end_time) tile_list = ltq.tile_list_from_es_res(res) print(tile_list[1])