def main(input_filename, output_filename): print "Processing: %s - Ctrl-Z to cancel" % input_filename merc = GlobalMercator() # open the shapefile ds = ogr.Open(input_filename) if ds is None: print "Open failed.\n" sys.exit(1) lyr = ds.GetLayerByIndex(0) lyr.ResetReading() feat_defn = lyr.GetLayerDefn() field_defns = [ feat_defn.GetFieldDefn(i) for i in range(feat_defn.GetFieldCount()) ] # look up the index of the field we're interested in for i, defn in enumerate(field_defns): if defn.GetName() == "POP10": pop_field = i # set up the output file # if it already exists, ask for confirmation to delete and remake it if os.path.isfile(output_filename): if not confirm(" Database %s exists, overwrite?" % output_filename, False): return False else: os.system("rm %s" % output_filename) # if file removal failed, the file may be locked: # ask for confirmation to unlock it if os.path.isfile(output_filename): if not confirm(" Attempt to unlock database %s?" % output_filename, False): return False else: unlock(output_filename) # if it's still there, there's a problem, bail if os.path.isfile(output_filename): print "Trouble - exiting." sys.exit() else: print "Success - continuing:" conn = sqlite3.connect(output_filename) c = conn.cursor() c.execute( "create table if not exists people (x real, y real, quadkey text)") n_features = len(lyr) for j, feat in enumerate(lyr): if j % 1000 == 0: conn.commit() if j % 10000 == 0: print " %s/%s (%0.2f%%)" % (j + 1, n_features, 100 * ((j + 1) / float(n_features))) else: sys.stdout.write(".") sys.stdout.flush() pop = feat.GetField(pop_field) geom = feat.GetGeometryRef() if geom is None: continue bbox = get_bbox(geom) if not bbox: continue ll, bb, rr, tt = bbox # generate a sample within the geometry for every person for i in range(pop): while True: samplepoint = make_ogr_point(uniform(ll, rr), uniform(bb, tt)) if geom.Intersects(samplepoint): break x, y = merc.LatLonToMeters(samplepoint.GetY(), samplepoint.GetX()) tx, ty = merc.MetersToTile(x, y, 21) quadkey = merc.QuadTree(tx, ty, 21) c.execute("insert into people values (?,?,?)", (x, y, quadkey)) conn.commit() print "Finished processing %s" % output_filename
def ogr_gft_ogr2ogr_spatial(): if ogrtest.gft_drv is None: return 'skip' if not ogrtest.gft_can_write: return 'skip' import test_cli_utilities if test_cli_utilities.get_ogr2ogr_path() is None: return 'skip' layer_name = 'geometry_table_%d' % ogrtest.gft_rand_val copied_layer_name = 'copied_geometry_table_%d' % ogrtest.gft_rand_val f = open('tmp/geometry_table.csv', 'wt') f.write('foo,bar,WKT\n') f.write('"baz",2,"POINT (0 1)"\n') f.write('"baz2",4,"POINT (2 3)"\n') f.write('"baz\'3",6,"POINT (4 5)"\n') f.close() f = open('tmp/geometry_table.csvt', 'wt') f.write('String,Integer,String\n') f.close() # Create a first table ret = gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f GFT "GFT:auth=' + ogrtest.gft_auth_key + '" tmp/geometry_table.csv -nln ' + layer_name + ' -select foo,bar -overwrite') # Test round-tripping ret = gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f GFT "GFT:auth=' + ogrtest.gft_auth_key + '" "GFT:auth=' + ogrtest.gft_auth_key + '" ' + layer_name + ' -nln ' + copied_layer_name + ' -overwrite') os.unlink('tmp/geometry_table.csv') os.unlink('tmp/geometry_table.csvt') ds = ogr.Open('GFT:auth=%s' % ogrtest.gft_auth_key, update = 1) for name in [layer_name, copied_layer_name]: lyr = ds.GetLayerByName(name) if lyr.GetGeometryColumn() != 'geometry': gdaltest.post_reason('layer %s: did not get expected geometry column' % name) ds.ExecuteSQL('DELLAYER:' + layer_name) ds.ExecuteSQL('DELLAYER:' + copied_layer_name) return 'fail' if lyr.GetLayerDefn().GetFieldCount() != 3: gdaltest.post_reason('layer %s: did not get expected field count' % name) ds.ExecuteSQL('DELLAYER:' + layer_name) ds.ExecuteSQL('DELLAYER:' + copied_layer_name) return 'fail' if lyr.GetGeomType() != ogr.wkbUnknown: gdaltest.post_reason('layer %s: did not get expected layer geometry type' % name) ds.ExecuteSQL('DELLAYER:' + layer_name) ds.ExecuteSQL('DELLAYER:' + copied_layer_name) return 'fail' if lyr.GetFeatureCount() != 3: gdaltest.post_reason('layer %s: did not get expected feature count' % name) ds.ExecuteSQL('DELLAYER:' + layer_name) ds.ExecuteSQL('DELLAYER:' + copied_layer_name) return 'fail' feat = lyr.GetNextFeature() if feat.GetGeometryRef().ExportToWkt() != "POINT (0 1)": gdaltest.post_reason('layer %s: did not get expected geometry' % name) ds.ExecuteSQL('DELLAYER:' + layer_name) ds.ExecuteSQL('DELLAYER:' + copied_layer_name) return 'fail' if feat.GetFieldAsInteger('bar') != 2: gdaltest.post_reason('layer %s: did not get expected field value' % name) ds.ExecuteSQL('DELLAYER:' + layer_name) ds.ExecuteSQL('DELLAYER:' + copied_layer_name) return 'fail' ds.ExecuteSQL('DELLAYER:' + layer_name) ds.ExecuteSQL('DELLAYER:' + copied_layer_name) ds = None return 'success'
def ogr_htf_1(): ds = ogr.Open('data/test.htf') if ds is None: gdaltest.post_reason('cannot open dataset') return 'fail' lyr = ds.GetLayer(0) if lyr.GetName() != 'polygon': gdaltest.post_reason('layer 0 is not polygon') return 'fail' lyr = ds.GetLayerByName('polygon') if lyr is None: gdaltest.post_reason('cannot find layer polygon') return 'fail' feat = lyr.GetNextFeature() geom = feat.GetGeometryRef() if ogrtest.check_feature_geometry(feat,'POLYGON ((320830 7678810,350840 7658030,308130 7595560,278310 7616820,320830 7678810))', max_error = 0.0000001 ) != 0: gdaltest.post_reason('did not get expected first geom') print(geom.ExportToWkt()) return 'fail' feat = lyr.GetNextFeature() geom = feat.GetGeometryRef() if ogrtest.check_feature_geometry(feat,'POLYGON ((320830 7678810,350840 7658030,308130 7595560,278310 7616820,320830 7678810),(0 0,0 1,1 1,0 0))', max_error = 0.0000001 ) != 0: gdaltest.post_reason('did not get expected first geom') print(geom.ExportToWkt()) return 'fail' if feat.GetField('IDENTIFIER') != 2: gdaltest.post_reason('did not get expected identifier') print(feat.GetField('IDENTIFIER')) return 'fail' lyr = ds.GetLayerByName('sounding') if lyr is None: gdaltest.post_reason('cannot find layer sounding') return 'fail' if lyr.GetFeatureCount() != 2: gdaltest.post_reason('did not get expected feature count') return 'fail' feat = lyr.GetNextFeature() geom = feat.GetGeometryRef() if ogrtest.check_feature_geometry(feat,'POINT (278670 7616330)', max_error = 0.0000001 ) != 0: gdaltest.post_reason('did not get expected first geom') print(geom.ExportToWkt()) return 'fail' if feat.GetField('other3') != 'other3': gdaltest.post_reason('did not get expected other3 val') print(feat.GetField('other3')) return 'fail' return 'success'
def __init__(self, inputdata): """ The constructor for the GDALData class. Arguments: inputdata (string): The path of the file to be opened. """ supportedRasterData = ['.tif', '.TIF', '.img'] supportedVectorData = ['.shp', '.SHP'] self.fileextension = os.path.splitext(inputdata)[1] if self.fileextension in supportedRasterData: log.debug('Opening raster file') try: self.data = gdal.Open(inputdata) except: log.error('Failed to open raster file') raise self.driver = self.data.GetDriver().LongName self.geotransform = self.data.GetGeoTransform() self.resolution = self.geotransform[1] #In case no srs is defined fill all values with 'undefined' try: self.spatialRef = osr.SpatialReference( wkt=self.data.GetProjection()) self.EPSG = functions.EPSGfromWKT(self.spatialRef) self.BBOX = self.getBoundingBox() except: log.debug('No EPSG or BBOX could be extracted') self.spatialRef = 'undefined' self.EPSG = 'undefined' self.BBOX = ['0', '0', '0', '0'] if self.fileextension in supportedVectorData: log.debug('Opening vector file') try: self.data = ogr.Open(inputdata) except: log.error('Failed to open vector file') raise self.driver = 'ESRI Shapefile' self.layer = self.data.GetLayer() self.resolution = None #In case no srs is defined fill all values with 'undefined' try: self.spatialRef = self.layer.GetSpatialRef() self.EPSG = functions.EPSGfromWKT(self.spatialRef) #Get bounding box from vector self.BBOX = [0, 0, 0, 0] self.BBOX[0], self.BBOX[2], self.BBOX[1], self.BBOX[ 3] = self.layer.GetExtent() if self.EPSG != '4326': self.BBOX = functions.BBOXtoWGS84(self.BBOX, self.EPSG) #Convert to string self.BBOX[0] = str(self.BBOX[0]) self.BBOX[1] = str(self.BBOX[1]) self.BBOX[2] = str(self.BBOX[2]) self.BBOX[3] = str(self.BBOX[3]) except: log.debug('No EPSG or BBOX could be extracted') self.EPSG = 'undefined' self.BBOX = ['0', '0', '0', '0']
if tmpdir is not ".": tmp_name = tempfile.mkstemp(dir=tmpdir, prefix="hires_") outfilehires = tmp_name[1] else: tmp_name = tempfile.mkstemp(dir=tmpdir, prefix="hires_") outfilehires = tmp_name[1] print "gdal_density starting with options: " + str(argv) print "tmpfile hires: " + tmp_name[1] if not os.path.exists(poly_ds): return # Open shape to check if it is not empty ds = ogr.Open(poly_ds) lyr = ds.GetLayer(0) #lyr = 1 hirescellsize = cellsize / rsamp pcroutfilehires = outfilehires + ".map" pcroutfile = outfile + ".map" width = (extent[2] - extent[0]) / cellsize height = (extent[3] - extent[1]) / cellsize width = int(math.ceil(width)) height = int(math.ceil(height)) if lyr.GetFeatureCount() > 0: if burninmetres: exestr = gdal_rasterize + " -tr " + str(hirescellsize) + " " + str(
import ogr,csv,sys shpfile=r'C:\Users\UserUNIR1\Desktop\FINAL_WIT\ImagenesSentinel2\imagenes\20180802T105621_N0206_ndvi_final.shp' #sys.argv[1] csvfile=r'C:\Users\UserUNIR1\Desktop\FINAL_WIT\ImagenesSentinel2\imagenes\prueba.csv' #sys.argv[2] #Open files csvfile=open(csvfile,'wb') ds=ogr.Open(shpfile) lyr=ds.GetLayer() #Get field names dfn=lyr.GetLayerDefn() nfields=dfn.GetFieldCount() fields=[] for i in range(nfields): fields.append(dfn.GetFieldDefn(i).GetName()) fields.append('kmlgeometry') csvwriter = csv.DictWriter(csvfile, fields) try:csvwriter.writeheader() #python 2.7+ except:csvfile.write(','.join(fields)+'\n') # Write attributes and kml out to csv for feat in lyr: attributes=feat.items() geom=feat.GetGeometryRef() attributes['kmlgeometry']=geom.ExportToKML() csvwriter.writerow(attributes) #clean up del csvwriter,lyr,ds csvfile.close()
def ogr_osm_1(filename='data/test.pbf'): try: ogrtest.osm_drv = ogr.GetDriverByName('OSM') except: ogrtest.osm_drv = None if ogrtest.osm_drv is None: return 'skip' ds = ogr.Open(filename) if ds is None: if filename == 'data/test.osm': ogrtest.osm_drv_parse_osm = False if gdal.GetLastErrorMsg().find( 'OSM XML detected, but Expat parser not available') == 0: return 'skip' gdaltest.post_reason('fail') return 'fail' else: if filename == 'data/test.osm': ogrtest.osm_drv_parse_osm = True # Test points lyr = ds.GetLayer('points') if lyr.GetGeomType() != ogr.wkbPoint: gdaltest.post_reason('fail') return 'fail' sr = lyr.GetSpatialRef() if sr.ExportToWkt().find('GEOGCS["WGS 84",DATUM["WGS_1984",') != 0 and \ sr.ExportToWkt().find('GEOGCS["GCS_WGS_1984",DATUM["WGS_1984"') != 0 : gdaltest.post_reason('fail') print(sr.ExportToWkt()) return 'fail' if filename == 'data/test.osm': if lyr.GetExtent() != (2.0, 3.0, 49.0, 50.0): gdaltest.post_reason('fail') print(lyr.GetExtent()) return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '3': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if ogrtest.check_feature_geometry( feat, ogr.CreateGeometryFromWkt('POINT (3.0 49.5)')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' # Test lines lyr = ds.GetLayer('lines') if lyr.GetGeomType() != ogr.wkbLineString: gdaltest.post_reason('fail') return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '1': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if ogrtest.check_feature_geometry( feat, ogr.CreateGeometryFromWkt('LINESTRING (2 49,3 50)')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '6': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if ogrtest.check_feature_geometry( feat, ogr.CreateGeometryFromWkt( 'LINESTRING (2 49,3 49,3 50,2 50,2 49)')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' # Test multipolygons lyr = ds.GetLayer('multipolygons') if filename == 'tmp/ogr_osm_3': if lyr.GetGeomType() != ogr.wkbPolygon: gdaltest.post_reason('fail') return 'fail' else: if lyr.GetGeomType() != ogr.wkbMultiPolygon: gdaltest.post_reason('fail') return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '1': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if feat.GetFieldAsString('natural') != 'forest': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if filename == 'tmp/ogr_osm_3': if ogrtest.check_feature_geometry( feat, ogr.CreateGeometryFromWkt( 'POLYGON ((2 49,2 50,3 50,3 49,2 49),(2.1 49.1,2.2 49.1,2.2 49.2,2.1 49.2,2.1 49.1))' )) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' else: if ogrtest.check_feature_geometry( feat, ogr.CreateGeometryFromWkt( 'MULTIPOLYGON (((2 49,3 49,3 50,2 50,2 49),(2.1 49.1,2.2 49.1,2.2 49.2,2.1 49.2,2.1 49.1)))' )) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '5': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if feat.GetFieldAsString('natural') != 'wood': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_way_id') != '8': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if feat.GetFieldAsString('name') != 'standalone_polygon': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' # Test multilinestrings lyr = ds.GetLayer('multilinestrings') if filename == 'tmp/ogr_osm_3': if lyr.GetGeomType() != ogr.wkbLineString: gdaltest.post_reason('fail') return 'fail' else: if lyr.GetGeomType() != ogr.wkbMultiLineString: gdaltest.post_reason('fail') return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '3': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if filename == 'tmp/ogr_osm_3': if ogrtest.check_feature_geometry( feat, ogr.CreateGeometryFromWkt('LINESTRING (2 49,3 50)')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' else: if ogrtest.check_feature_geometry( feat, ogr.CreateGeometryFromWkt( 'MULTILINESTRING ((2 49,3 50))')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' # Test other_relations lyr = ds.GetLayer('other_relations') if filename == 'tmp/ogr_osm_3': if lyr is not None: gdaltest.post_reason('fail') return 'fail' else: if lyr.GetGeomType() != ogr.wkbGeometryCollection: gdaltest.post_reason('fail') return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '4': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if ogrtest.check_feature_geometry( feat, ogr.CreateGeometryFromWkt( 'GEOMETRYCOLLECTION (POINT (2 49),LINESTRING (2 49,3 50))') ) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if ds.GetDriver().GetName() == 'OSM': sql_lyr = ds.ExecuteSQL("GetBytesRead()") if sql_lyr is None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' feat = sql_lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' sql_lyr.ResetReading() feat = sql_lyr.GetNextFeature() if feat is None: gdaltest.post_reason('fail') return 'fail' sql_lyr.GetLayerDefn() sql_lyr.TestCapability("foo") ds.ReleaseResultSet(sql_lyr) ds = None return 'success'
def ogr_pgeo_1(tested_driver = 'PGeo', other_driver = 'MDB'): ogrtest.pgeo_ds = None try: ogrtest.other_driver = ogr.GetDriverByName(other_driver) except: ogrtest.other_driver = None if ogrtest.other_driver is not None: print('Unregistering %s driver' % ogrtest.other_driver.GetName()) ogrtest.other_driver.Deregister() if other_driver == 'PGeo': # Re-register Geomedia at the end, *after* MDB geomedia_driver = ogr.GetDriverByName('Geomedia') if geomedia_driver is not None: geomedia_driver.Deregister() geomedia_driver.Register() try: drv = ogr.GetDriverByName(tested_driver) except: drv = None if drv is None: return 'skip' if not gdaltest.download_file('http://download.osgeo.org/gdal/data/pgeo/PGeoTest.zip', 'PGeoTest.zip'): return 'skip' try: os.stat('tmp/cache/Autodesk Test.mdb') except: try: gdaltest.unzip( 'tmp/cache', 'tmp/cache/PGeoTest.zip') try: os.stat('tmp/cache/Autodesk Test.mdb') except: return 'skip' except: return 'skip' ogrtest.pgeo_ds = ogr.Open('tmp/cache/Autodesk Test.mdb') if ogrtest.pgeo_ds is None: gdaltest.post_reason('could not open DB. Driver probably misconfigured') return 'skip' if ogrtest.pgeo_ds.GetLayerCount() != 3: gdaltest.post_reason('did not get expected layer count') return 'fail' lyr = ogrtest.pgeo_ds.GetLayer(0) feat = lyr.GetNextFeature() if feat.GetField('OBJECTID') != 1 or \ feat.GetField('IDNUM') != 9424 or \ feat.GetField('OWNER') != 'City': gdaltest.post_reason('did not get expected attributes') feat.DumpReadable() return 'fail' if ogrtest.check_feature_geometry(feat,'LINESTRING (1910941.703951031 445833.57942859828 0,1910947.927691862 445786.43811868131 0)', max_error = 0.0000001) != 0: gdaltest.post_reason('did not get expected geometry') feat.DumpReadable() return 'fail' feat_count = lyr.GetFeatureCount() if feat_count != 9418: gdaltest.post_reason('did not get expected feature count') print(feat_count) return 'fail' return 'success'
def createDetectionData(shapefile, railwayshapefile, imglist, outputfolder, size, test=False): #print "\n\n ------------- Creating points of interest ------------------- \n\n" imgs = imglist rlwds = ogr.Open(railwayshapefile) rlwlayer = rlwds.GetLayer(0) rlwref = rlwlayer.GetSpatialRef() ds = None layer = None shpref = None closetorlw = None imgs_files = {} if not test: ds = ogr.Open(shapefile) layer = ds.GetLayer(0) shpref = layer.GetSpatialRef() rlwtransform = osr.CoordinateTransformation(rlwref, shpref) # Checa os poligonos que estao a menos de 5,1m do shp da ferrovia closetorlw = [False] * len(layer) for fid, f in enumerate(layer): geometry = f.GetGeometryRef() d = float('inf') rlwlayer.ResetReading() for feature in rlwlayer: railway = feature.GetGeometryRef() railway.Transform(rlwtransform) d = min(d, geometry.Distance(railway)) if geometry.Distance(railway) <= 5.1: closetorlw[fid] = True break for num, file in enumerate(imgs): points_img = [] references = set() imgs_files[num] = file img = gdal.Open(file) geot = img.GetGeoTransform() xAxis = img.RasterXSize # Max columns yAxis = img.RasterYSize # Max rows # Para fazer transformacoes de coordenadas nos shapefiles imgref = osr.SpatialReference(wkt=img.GetProjectionRef()) if not test: transform = osr.CoordinateTransformation(shpref, imgref) rlwtransform = osr.CoordinateTransformation(rlwref, imgref) # Extent da Imagem // Usado para verificar se poligonos estao sobre uma imagem ext = utils.GetExtentGeometry(geot, xAxis, yAxis) ext.FlattenTo2D() # Ground Truth // Matriz e Arquivos groundTruth = np.zeros((yAxis, xAxis), dtype='bool8') gtname = os.path.splitext(os.path.split(file)[-1]) if not os.path.isdir(os.path.join(outputfolder, "GroundTruths")): os.mkdir(os.path.join(outputfolder, "GroundTruths")) gtfile = os.path.join(outputfolder, "GroundTruths", "mask_" + gtname[0] + ".png") # Percorre as linhas do shape da ferrovia e adiciona os pontos centrais que irao gerar os crops rlwlayer.ResetReading() for feature in rlwlayer: railway = feature.GetGeometryRef() railway.Transform(rlwtransform) if ext.Intersect(railway): intersection = ext.Intersection(railway) addPoints(references, intersection, geot) # Adiciona os poligonos de erosao no groundTruth (somente para fine tunning//treino) if not test: layer.ResetReading() for fid, feature in enumerate(layer): #print(feature.items()) if closetorlw[fid]: geometry = feature.GetGeometryRef() geometry.Transform(transform) if ext.Intersect(geometry): intersection = geometry.Intersection(ext) FillImage(groundTruth, intersection, geot) refs = list(references) train_ref = [] val_ref = [] if not test: val_ref, train_ref = create_sets(np.asarray(refs)) createData(file, groundTruth, os.path.join(outputfolder, 'Validation'), val_ref, geot, size) createData(file, groundTruth, os.path.join(outputfolder, 'Train'), train_ref, geot, size) else: # Cria os crops tanto da imagem quanto do groundTruth e salva o arquivo de groundTruth createData(file, groundTruth, outputfolder, references, geot, size) scipy.misc.imsave(gtfile, groundTruth * 255) # Caso seja para teste, salva os arrays auxiliares para a # reconstrucao da predicao completa em uma img np.save(os.path.join(outputfolder, "referencepoints.npy"), np.asarray(refs)) np.save(os.path.join(outputfolder, "referenceimgspaths.npy"), np.asarray([file]))
def gen_zonalstats(zones_json, raster): # Open data raster = get_dataset(raster) shp = None if type(zones_json) is str: shp = ogr.Open(zones_json) zones_json = json.loads(zones_json) else: shp = ogr.Open(json.dumps(zones_json)) lyr = shp.GetLayer() # Get raster georeference info transform = raster.GetGeoTransform() xOrigin = transform[0] yOrigin = transform[3] pixelWidth = transform[1] pixelHeight = transform[5] # Reproject vector geometry to same projection as raster sourceSR = lyr.GetSpatialRef() targetSR = osr.SpatialReference() targetSR.ImportFromWkt(raster.GetProjectionRef()) coordTrans = osr.CoordinateTransformation(sourceSR, targetSR) for feature in zones_json['features']: geom = ogr.CreateGeometryFromJson(json.dumps(feature['geometry'])) geom.Transform(coordTrans) # Get extent of feat if (geom.GetGeometryName() == 'MULTIPOLYGON'): count = 0 pointsX = [] pointsY = [] for polygon in geom: geomInner = geom.GetGeometryRef(count) ring = geomInner.GetGeometryRef(0) numpoints = ring.GetPointCount() for p in range(numpoints): lon, lat, z = ring.GetPoint(p) pointsX.append(lon) pointsY.append(lat) count += 1 elif (geom.GetGeometryName() == 'POLYGON'): ring = geom.GetGeometryRef(0) numpoints = ring.GetPointCount() pointsX = [] pointsY = [] for p in range(numpoints): lon, lat, z = ring.GetPoint(p) pointsX.append(lon) pointsY.append(lat) else: raise GaiaException( "ERROR: Geometry needs to be either Polygon or Multipolygon") xmin = min(pointsX) xmax = max(pointsX) ymin = min(pointsY) ymax = max(pointsY) # Specify offset and rows and columns to read xoff = int((xmin - xOrigin) / pixelWidth) yoff = int((yOrigin - ymax) / pixelWidth) xcount = int((xmax - xmin) / pixelWidth) + 1 ycount = int((ymax - ymin) / pixelWidth) + 1 # Create memory target raster target_ds = gdal.GetDriverByName('MEM').Create('', xcount, ycount, 1, gdal.GDT_Byte) target_ds.SetGeoTransform(( xmin, pixelWidth, 0, ymax, 0, pixelHeight, )) # Create for target raster the same projection as for the value raster raster_srs = osr.SpatialReference() raster_srs.ImportFromWkt(raster.GetProjectionRef()) target_ds.SetProjection(raster_srs.ExportToWkt()) # Rasterize zone polygon to raster gdal.RasterizeLayer(target_ds, [1], lyr, burn_values=[1]) # Read raster as arrays banddataraster = raster.GetRasterBand(1) dataraster = banddataraster.ReadAsArray(xoff, yoff, xcount, ycount).astype(numpy.float) bandmask = target_ds.GetRasterBand(1) datamask = bandmask.ReadAsArray(0, 0, xcount, ycount).astype(numpy.float) # Mask zone of raster zoneraster = numpy.ma.masked_array(dataraster, numpy.logical_not(datamask)) properties = feature['properties'] properties['count'] = zoneraster.count() properties['sum'] = zoneraster.sum() properties['mean'] = zoneraster.mean() properties['median'] = numpy.median(zoneraster) properties['min'] = zoneraster.min() properties['max'] = zoneraster.max() properties['stddev'] = zoneraster.std() yield (feature)
def zonal_stats(feat, input_zone_polygon, raster_paths): zonal_min = [] zonal_max = [] zonal_average = [] zonal_var = [] zonal_std = [] raster = gdal.Open(raster_paths) shp = ogr.Open(input_zone_polygon) lyr = shp.GetLayer() transform = raster.GetGeoTransform() band = raster.RasterCount xOrigin = transform[0] yOrigin = transform[3] pixelWidth = transform[1] pixelHeight = transform[5] sourceSR = lyr.GetSpatialRef() targetSR = osr.SpatialReference() targetSR.ImportFromWkt(raster.GetProjectionRef()) coordTrans = osr.CoordinateTransformation(sourceSR, targetSR) # feat = lyr.GetNextFeature() geom = feat.GetGeometryRef() geom.Transform(coordTrans) # Get extent of feat geom = feat.GetGeometryRef() if (geom.GetGeometryName() == "MULTIPOLYGON"): count = 0 pointsX = [] pointsY = [] for polygon in geom: geomInner = geom.GetGeometryRef(count) ring = geomInner.GetGeometryRef(0) numpoints = ring.GetPointCount() for p in range(numpoints): lon, lat, z = ring.GetPoint(p) pointsX.append(lon) pointsY.append(lat) count += 1 elif (geom.GetGeometryName() == 'POLYGON'): ring = geom.GetGeometryRef(0) numpoints = ring.GetPointCount() pointsX = [] pointsY = [] for p in range(numpoints): lon, lat, z = ring.GetPoint(p) pointsX.append(lon) pointsY.append(lat) else: sys.exit("ERROR: Geometry needs to be either Polygon or Multipolygon") xmin = min(pointsX) xmax = max(pointsX) ymin = min(pointsY) ymax = max(pointsY) # Specify offset and rows and columns to read xoff = int((xmin - xOrigin) / pixelWidth) yoff = int((yOrigin - ymax) / pixelWidth) xcount = int((xmax - xmin) / pixelWidth) + 1 ycount = int((ymax - ymin) / pixelWidth) + 1 # Create memory target raster target_ds = gdal.GetDriverByName("MEM").Create('', xcount, ycount, 1, gdal.GDT_Byte) target_ds.SetGeoTransform(( xmin, pixelWidth, 0, ymax, 0, pixelHeight, )) # Create for target raster the same projection as for the value raster raster_srs = osr.SpatialReference() raster_srs.ImportFromWkt(raster.GetProjectionRef()) target_ds.SetProjection(raster_srs.ExportToWkt()) # Rasterize zone polygon to raster gdal.RasterizeLayer(target_ds, [1], lyr, burn_values=[1]) banddataraster = raster.GetRasterBand(1) dataraster = banddataraster.ReadAsArray(xoff, yoff, xcount, ycount).astype(numpy.float) bandmask = target_ds.GetRasterBand(1) datamask = bandmask.ReadAsArray(0, 0, xcount, ycount).astype(numpy.float) # Mask zone of raster zoneraster = numpy.ma.masked_array(dataraster, numpy.logical_not(datamask)) #print(zoneraster) # Calculate statistics of zonal raster zonal_min.append(numpy.min(zoneraster)) zonal_max.append(numpy.max(zoneraster)) zonal_average.append(numpy.mean(zoneraster)) zonal_var.append(numpy.var(zoneraster)) zonal_std.append(numpy.std(zoneraster)) #print(zonal_min, zonal_max, zonal_average, zonal_var, zonal_std) return zonal_min, zonal_max, zonal_average, zonal_var, zonal_std
def main(): #### Set Up Arguments parser = argparse.ArgumentParser( description="move/copy ASP deliverable files") #### Positional Arguments parser.add_argument('src', help="source dir/shp of ASP dems") parser.add_argument('dstdir', help="destination directory") #### Optional Arguments parser.add_argument('-m', '--move', action='store_true', default=False, help='move files instead of copy') parser.add_argument('--dryrun', action='store_true', default=False, help='print action but do not alter files\n') parser.add_argument('--exclude-drg', action='store_true', default=False, help='exclude DRG/Ortho') parser.add_argument( '--dems-only', action='store_true', default=False, help= 'copy DEMs only - overrides --exclude and --include options, except --include-fltr' ) parser.add_argument('--no-dirs', action='store_true', default=False, help='do not make pairname subdirs for overlaps\n') parser.add_argument( '--tar-only', action='store_true', default=False, help='copy only tar archive, overrides --exclude and --include options' ) parser.add_argument('--exclude-err', action='store_true', default=False, help='ASP: exclude intersectionErr raster') parser.add_argument('--include-pc', action='store_true', default=False, help='ASP: include point cloud') parser.add_argument('--include-fltr', action='store_true', default=False, help='ASP: include non-interpolated DEM') parser.add_argument('--include-logs', action='store_true', default=False, help='ASP: include stereo logs') #### Parse Arguments args = parser.parse_args() src = os.path.abspath(args.src) if args.dems_only and args.tar_only: parser.error( "options --tar-only and --dems-only are not not compatible") #### Validate args if os.path.isdir(src): srctype = 'dir' elif os.path.isfile(src) and src.endswith(".shp"): srctype = "shp" else: parser.error("Src is not a valid directory or shapefile: %s" % src) print "Collecting DEMs from source..." #### ID all dems, pairname is dirname overlaps = [] if srctype == 'dir': for root, dirs, files in os.walk(src): for f in files: if (f.endswith(('-DEM.tif', '_dem.tif')) and not 'fltr' in f): overlaps.append(os.path.join(root, f)) elif srctype == 'shp': #### open shp flds = ("FILEPATH", "WIN_PATH") dem_fld = "DEM_NAME" ds = ogr.Open(src) if ds is not None: lyr = ds.GetLayerByName(os.path.splitext(os.path.basename(src))[0]) lyr.ResetReading() src_srs = lyr.GetSpatialRef() featDefn = lyr.GetLayerDefn() for feat in lyr: path = None paths = [] try: i = feat.GetFieldIndex(dem_fld) dem_name = feat.GetField(i) except ValueError, e: print "Cannot locate DEM name field (%s)" % (dem_fld) if not dem_name: print "Cannot locate DEM name field (%s)" % (dem_fld) else: for fld in flds: try: i = feat.GetFieldIndex(fld) attrib = feat.GetField(i) except ValueError, e: print "Cannot locate candidate field (%s) in source feature class" % ( fld) else: if attrib: attrib_path = os.path.join(attrib, dem_name) paths.append(attrib) paths.append(attrib_path) if os.path.isfile(attrib): path = attrib elif os.path.isfile(attrib_path): path = attrib_path if path: print path overlaps.append(path) else: if len(paths) > 0: print "Cannot locate path for DEM in any of the following locations: \n%s" % ( '\n\t'.join(paths)) else: print "Cannot get valid values from candidate fields (%s) in source feature class" % ( ', '.join(flds)) ds = None
def main(): if len(sys.argv) > 1: infile = sys.argv[1] outfile = sys.argv[2] else: infile = 'input.osm' outfile = 'output.png' gdal.SetConfigOption('OGR_INTERLEAVED_READING', 'YES') osm = ogr.Open(infile) nLayerCount = osm.GetLayerCount() thereIsDataInLayer = True sourceRef = osm.GetLayer(0).GetSpatialRef() targetRef = osr.SpatialReference() targetRef.ImportFromEPSG(102003) transform = osr.CoordinateTransformation(sourceRef, targetRef) Xmin, Xmax, Ymin, Ymax = osm.GetLayer(0).GetExtent() print("orig extents:", Xmin, Ymin, Xmax, Ymax) if custom_extend: Xmin, Ymin, Xmax, Ymax = custom_extend print("custom extents:", Xmin, Ymin, Xmax, Ymax) minPoint = ogr.Geometry(ogr.wkbLineString) minPoint.AddPoint(Xmin, Ymin) minPoint.Transform(transform) maxPoint = ogr.Geometry(ogr.wkbLineString) maxPoint.AddPoint(Xmax, Ymax) maxPoint.Transform(transform) Xmin, Ymin, Xmax, Ymax = (minPoint.GetX(), minPoint.GetY(), maxPoint.GetX(), maxPoint.GetY()) print("target extents:", Xmin, Ymin, Xmax, Ymax) pixel_size = 100 # meter per pixel pixel_size = abs(Xmax - Xmin) / width target_Width = int(abs(Xmax - Xmin) / pixel_size) target_Height = int(abs(Ymax - Ymin) / pixel_size) print("width: {0:.2f} km".format(abs(Xmax - Xmin) / 1000)) print("height: {0:.2f} km".format(abs(Ymax - Ymin) / 1000)) print("target width: {} px ".format(target_Width)) print("target height: {} px".format(target_Height)) image = Image.new('RGBA', (target_Width, target_Height), "white") draw = ImageDraw.Draw(image) counter = 0 def linestring(points, color="black", width=1): for i in range(1, len(points)): draw.line([points[i - 1], points[i]], fill=color, width=width) while thereIsDataInLayer: thereIsDataInLayer = False for iLayer in range(nLayerCount): lyr = osm.GetLayer(iLayer) feature = lyr.GetNextFeature() while (feature is not None): thereIsDataInLayer = True geom = feature.GetGeometryRef() counter += 1 print("\r ... processing feature {} ... ".format(counter), end="") # we ignore points if geom.GetGeometryName() == "POINT": pass # Linestring are for ways elif geom.GetGeometryName() == "LINESTRING": geom.Transform(transform) points = list( map( lambda p: ((p[0] - Xmin) / pixel_size, (Ymax - p[1]) / pixel_size), geom.GetPoints())) # this is for rivers and streams if feature.GetFieldIndex("waterway"): if feature.GetField( feature.GetFieldIndex("waterway")) == "river": linestring(points, primary_color, 1) elif feature.GetField(feature.GetFieldIndex( "waterway")) == "riverbank": linestring(points, primary_color, 3) elif feature.GetField( feature.GetFieldIndex("waterway")) == "canal": linestring(points, primary_color, 1) elif feature.GetField( feature.GetFieldIndex("waterway")) == "stream": pass # this if for roads #if feature.GetFieldIndex("highway"): # if feature.GetField(feature.GetFieldIndex("highway")) == "motorway": # linestring(points, primary_color, 4) # elif feature.GetField(feature.GetFieldIndex("highway")) == "primary": # linestring(points, primary_color, 3) # elif feature.GetField(feature.GetFieldIndex("highway")) == "secondary": # linestring(points, primary_color, 2) # elif feature.GetField(feature.GetFieldIndex("highway")): # linestring(points, primary_color, 1) #if feature.GetFieldIndex("railway"): # linestring(points, primary_color, 1) # #if feature.GetFieldIndex("pipepline"): # linestring(points, primary_color, 1) # Polygons? Haven't seen them yet. elif geom.GetGeometryName() == "POLYGON": pass # multipolygons are for lakes and stuff elif geom.GetGeometryName() == "MULTIPOLYGON": geom.Transform(transform) for i in range(0, geom.GetGeometryCount()): g = geom.GetGeometryRef(i) for j in range(0, g.GetGeometryCount()): ring = g.GetGeometryRef(j) if ring.GetPoints(): points = list( map( lambda p: ((p[0] - Xmin) / pixel_size, (Ymax - p[1]) / pixel_size), ring.GetPoints())) draw.polygon(points, fill=primary_color, outline=primary_color) linestring(points, primary_color, 2) else: print("MULTIPOLYGON inner", ring.GetGeometryCount(), ring.GetGeometryName()) # Everything else is not necessary. I guess these are relations. # We just draw everthing over, so it makes important ways stronger elif geom.GetGeometryName() == "MULTILINESTRING": geom.Transform(transform) for j in range(0, geom.GetGeometryCount()): ring = geom.GetGeometryRef(j) if ring.GetPoints(): points = list( map( lambda p: ((p[0] - Xmin) / pixel_size, (Ymax - p[1]) / pixel_size), ring.GetPoints())) if len(points) > 1: linestring(points, primary_color, 2) else: print("MULTILINESTRINGr inner", ring.GetGeometryCount(), ring.GetGeometryName()) elif geom.GetGeometryName() == "GEOMETRYCOLLECTION": geom.Transform(transform) for j in range(0, geom.GetGeometryCount()): ring = geom.GetGeometryRef(j) if ring.GetGeometryName() == "POINT": #ignore points again pass elif ring.GetGeometryName() == "LINESTRING": if ring.GetPoints(): points = list( map( lambda p: ((p[0] - Xmin) / pixel_size, (Ymax - p[1]) / pixel_size), ring.GetPoints())) if len(points) > 1: linestring(points, primary_color, 2) else: print("MULTILINESTRINGr inner", ring.GetGeometryCount(), ring.GetGeometryName()) elif ring.GetGeometryName() == "POLYGON": for i in range(0, geom.GetGeometryCount()): g = geom.GetGeometryRef(i) for j in range(0, g.GetGeometryCount()): ring = g.GetGeometryRef(j) if ring.GetPoints(): points = list( map( lambda p: ((p[0] - Xmin) / pixel_size, (Ymax - p[1]) / pixel_size), ring.GetPoints())) draw.polygon(points, fill=primary_color, outline=primary_color) linestring(points, primary_color, 3) else: print("MULTIPOLYGON inner", ring.GetGeometryCount(), ring.GetGeometryName()) else: #something else? what am I? print(ring.GetGeometryName()) else: pass print("ohter geom", geom) #The destroy method is necessary for interleaved reading feature.Destroy() feature = lyr.GetNextFeature() # save intermediate results for debugging if counter % 1000000 == 0: image.save(outfile) # destroy and save print("") osm.Destroy() image.save(outfile)
def open_File(): filePath = str(input("file path")) print ("Entered File Path is %s" % filePath) datasource = ogr.Open(filePath) return datasource
def iterate_features(layer): for feature_index in range(layer.GetFeatureCount() - 1): yield layer.GetFeature(feature_index) def iterate_fields(feature): feature_def = feature.GetDefnRef() for field_index in range(feature.GetFieldCount() - 1): yield feature_def.GetFieldDefn(field_index) if __name__ == "__main__": bad_shapefile = ogr.Open(bad_shapefile_path) old_layer = bad_shapefile.GetLayer() srs = old_layer.GetSpatialRef().Clone() driver = ogr.GetDriverByName("ESRI Shapefile") new_shapefile = driver.CreateDataSource(new_shapefile_path) new_layer = new_shapefile.CreateLayer("remarks", srs, geom_type=ogr.wkbPolygon) for field in iterate_fields(old_layer.GetFeature(0)): new_layer.CreateField(field) for old_feature in iterate_features(old_layer): geo = old_feature.GetGeometryRef() new_feature = old_feature.Clone()
def HandleTile(t, src, dstdir, csvpath, args, exclude_list): otxtpath = os.path.join(dstdir, "{}_{}_orig.txt".format(os.path.basename(csvpath)[:-4], t.name)) mtxtpath = os.path.join(dstdir, "{}_{}_ortho.txt".format(os.path.basename(csvpath)[:-4], t.name)) if os.path.isfile(otxtpath) and os.path.isfile(mtxtpath) and args.overwrite is False: logger.info("Tile %s processing files already exist", t.name) else: logger.info("Tile %s", t.name) t_srs = utils.osr_srs_preserve_axis_order(osr.SpatialReference()) t_srs.ImportFromEPSG(t.epsg) #### Open mfp dsp, lyrn = utils.get_source_names(src) ds = ogr.Open(dsp) if ds is None: logger.error("Open failed") else: lyr = ds.GetLayerByName(lyrn) if not lyr: raise RuntimeError("Layer {} does not exist in dataset {}".format(lyrn, dsp)) else: s_srs = lyr.GetSpatialRef() #logger.debug(str(s_srs)) #logger.debug(str(t.geom)) tile_geom_in_s_srs = t.geom.Clone() if not t_srs.IsSame(s_srs): ict = osr.CoordinateTransformation(t_srs, s_srs) ct = osr.CoordinateTransformation(s_srs, t_srs) tile_geom_in_s_srs.Transform(ict) # if the geometry crosses meridian, split it into multipolygon (else this breaks SetSpatialFilter) if utils.doesCross180(tile_geom_in_s_srs): logger.debug("tile_geom_in_s_srs crosses 180 meridian; splitting to multiple polygons...") tile_geom_in_s_srs = utils.getWrappedGeometry(tile_geom_in_s_srs) lyr.ResetReading() lyr.SetSpatialFilter(tile_geom_in_s_srs) feat = lyr.GetNextFeature() imginfo_list1 = [] while feat: iinfo = mosaic.ImageInfo(feat, "RECORD", srs=s_srs) if iinfo.geom is not None and iinfo.geom.GetGeometryType() in (ogr.wkbPolygon, ogr.wkbMultiPolygon): if not t_srs.IsSame(s_srs): iinfo.geom.Transform(ct) ## fix self-intersection errors caused by reprojecting over 180 temp = iinfo.geom.Buffer(0.1) # assumes a projected coordinate system with meters or feet as units iinfo.geom = temp if iinfo.geom.Intersects(t.geom): if iinfo.scene_id in exclude_list: logger.debug("Scene in exclude list, excluding: %s", iinfo.srcfp) elif not os.path.isfile(iinfo.srcfp) and iinfo.status != "tape": #logger.info("iinfo.status != tape: {0}".format(iinfo.status != "tape")) logger.warning("Scene path is invalid, excluding %s (path = %s) (status = %s)", iinfo.scene_id, iinfo.srcfp, iinfo.status) elif args.require_pan: srcfp = iinfo.srcfp srcdir, mul_name = os.path.split(srcfp) if iinfo.sensor in ["WV02", "WV03", "QB02"]: pan_name = mul_name.replace("-M", "-P") elif iinfo.sensor == "GE01": if "_5V" in mul_name: pan_name_base = srcfp[:-24].replace("M0", "P0") candidates = glob.glob(pan_name_base + "*") candidates2 = [f for f in candidates if f.endswith(('.ntf', '.NTF', '.tif', '.TIF'))] if len(candidates2) == 0: pan_name = '' elif len(candidates2) == 1: pan_name = os.path.basename(candidates2[0]) else: pan_name = '' logger.error('%i panchromatic images match the multispectral image name ' '%s', len(candidates2), mul_name) else: pan_name = mul_name.replace("-M", "-P") elif iinfo.sensor == "IK01": pan_name = mul_name.replace("blu", "pan") pan_name = mul_name.replace("msi", "pan") pan_name = mul_name.replace("bgrn", "pan") pan_srcfp = os.path.join(srcdir, pan_name) if not os.path.isfile(pan_srcfp): logger.debug("Image does not have a panchromatic component, excluding: %s", iinfo.srcfp) else: logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom)) imginfo_list1.append(iinfo) else: logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom)) imginfo_list1.append(iinfo) feat = lyr.GetNextFeature() ds = None logger.info("Number of intersects in tile %s: %i", t.name, len(imginfo_list1)) if len(imginfo_list1) > 0: #### Get mosaic parameters logger.debug("Getting mosaic parameters") params = mosaic.getMosaicParameters(imginfo_list1[0], args) #### Remove images that do not match ref logger.debug("Setting image pattern filter") imginfo_list2 = mosaic.filterMatchingImages(imginfo_list1, params) logger.info("Number of images matching filter: %i", len(imginfo_list2)) if args.nosort is False: #### Sort by quality logger.debug("Sorting images by quality") imginfo_list3 = [] for iinfo in imginfo_list2: iinfo.getScore(params) if iinfo.score > 0: imginfo_list3.append(iinfo) # sort so highest score is last imginfo_list3.sort(key=lambda x: x.score) else: imginfo_list3 = list(imginfo_list2) #### Overlay geoms and remove non-contributors logger.debug("Overlaying images to determine contributors") contribs = mosaic.determine_contributors(imginfo_list3, t.geom, args.min_contribution_area) logger.info("Number of contributing images: %i", len(contribs)) if len(contribs) > 0: if args.build_shp: ####################################################### #### Create Shp shp = os.path.join(dstdir, "{}_{}_imagery.shp".format(args.mosaic, t.name)) logger.debug("Creating shapefile of geoms: %s", shp) fields = [("IMAGENAME", ogr.OFTString, 100), ("SCORE", ogr.OFTReal, 0)] OGR_DRIVER = "ESRI Shapefile" ogrDriver = ogr.GetDriverByName(OGR_DRIVER) if ogrDriver is None: logger.debug("OGR: Driver %s is not available", OGR_DRIVER) sys.exit(-1) if os.path.isfile(shp): ogrDriver.DeleteDataSource(shp) vds = ogrDriver.CreateDataSource(shp) if vds is None: logger.debug("Could not create shp") sys.exit(-1) shpd, shpn = os.path.split(shp) shpbn, shpe = os.path.splitext(shpn) lyr = vds.CreateLayer(shpbn, t_srs, ogr.wkbPolygon) if lyr is None: logger.debug("ERROR: Failed to create layer: %s", shpbn) sys.exit(-1) for fld, fdef, flen in fields: field_defn = ogr.FieldDefn(fld, fdef) if fdef == ogr.OFTString: field_defn.SetWidth(flen) if lyr.CreateField(field_defn) != 0: logger.debug("ERROR: Failed to create field: %s", fld) for iinfo, geom in contribs: logger.debug("Image: %s", iinfo.srcfn) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField("IMAGENAME", iinfo.srcfn) feat.SetField("SCORE", iinfo.score) feat.SetGeometry(geom) if lyr.CreateFeature(feat) != 0: logger.debug("ERROR: Could not create feature for image %s", iinfo.srcfn) else: logger.debug("Created feature for image: %s", iinfo.srcfn) feat.Destroy() #### Write textfiles if not os.path.isdir(dstdir): os.makedirs(dstdir) otxtpath = os.path.join(dstdir, "{}_{}_orig.txt".format(args.mosaic, t.name)) otxtpath_ontape = os.path.join(dstdir, "{}_{}_orig_ontape.csv".format(args.mosaic, t.name)) mtxtpath = os.path.join(dstdir, "{}_{}_ortho.txt".format(args.mosaic, t.name)) rn_fromtape_basedir = os.path.join(dstdir, "renamed_fromtape") rn_fromtape_path = os.path.join(rn_fromtape_basedir, t.name) otxt = open(otxtpath, 'w') ttxt = open(otxtpath_ontape, 'w') mtxt = open(mtxtpath, 'w') # write header ttxt.write("{0},{1},{2}\n".format("SCENE_ID", "S_FILEPATH", "STATUS")) tape_ct = 0 for iinfo, geom in contribs: if not os.path.isfile(iinfo.srcfp) and iinfo.status != "tape": logger.warning("Image does not exist: %s", iinfo.srcfp) if iinfo.status == "tape": tape_ct += 1 ttxt.write("{0},{1},{2}\n".format(iinfo.scene_id, iinfo.srcfp, iinfo.status)) # get srcfp with file extension srcfp_file = os.path.basename(iinfo.srcfp) otxt.write("{}\n".format(os.path.join(rn_fromtape_path, srcfp_file))) else: otxt.write("{}\n".format(iinfo.srcfp)) m_fn = "{0}_u08{1}{2}.tif".format( os.path.splitext(iinfo.srcfn)[0], args.stretch, t.epsg ) mtxt.write(os.path.join(dstdir, 'ortho', t.name, m_fn) + "\n") otxt.close() if tape_ct == 0: logger.debug("No files need to be pulled from tape.") os.remove(otxtpath_ontape) else: # make output dirs from tape if not os.path.isdir(rn_fromtape_basedir): os.mkdir(rn_fromtape_basedir) if not os.path.isdir(rn_fromtape_path): os.mkdir(rn_fromtape_path) tape_tmp = os.path.join(dstdir, "{0}_{1}_tmp".format(args.mosaic, t.name)) if not os.path.isdir(tape_tmp): os.mkdir(tape_tmp) logger.warning("{0} scenes are not accessible, as they are on tape. Please use ir.py to pull " "scenes using file '{1}'. They must be put in directory '{2}', as file '{3}' " "contains hard-coded paths to said files (necessary to perform " "orthorectification). Please set a --tmp path (use '{4}').\n" "Note that if some (or all) scenes have already been pulled from tape, ir.py " "will not pull them again.\n". format(tape_ct, otxtpath_ontape, rn_fromtape_path, otxtpath, tape_tmp)) tape_log = "{0}_{1}_ir_log_{2}.log".format(args.mosaic, t.name, datetime.today().strftime("%Y%m%d%H%M%S")) root_pgclib_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "pgclib", "") logger.info("Suggested ir.py command:\n\n" "" "python {}ir.py -i {} -o {} --tmp {} -tm link 2>&1 | tee {}" .format(root_pgclib_path, otxtpath_ontape, rn_fromtape_path, tape_tmp, os.path.join(dstdir, tape_log)))
def zonal_stats(feat, input_zone_polygon, input_value_raster): try: # Open data raster = gdal.Open(input_value_raster) shp = ogr.Open(input_zone_polygon) lyr = shp.GetLayer() # Get raster georeference info transform = raster.GetGeoTransform() xOrigin = transform[0] yOrigin = transform[3] pixelWidth = transform[1] pixelHeight = transform[5] # Reproject vector geometry to same projection as raster # sourceSR = osr.SpatialReference() # sourceSR.ImportFromEPSG(4326) sourceSR = lyr.GetSpatialRef() targetSR = osr.SpatialReference() targetSR.ImportFromWkt(raster.GetProjectionRef()) coordTrans = osr.CoordinateTransformation(sourceSR, targetSR) geom = feat.GetGeometryRef() geom.Transform(coordTrans) if geom.GetGeometryName() == 'MULTIPOLYGON': count = 0 pointsX = [] pointsY = [] for polygon in geom: geomInner = geom.GetGeometryRef(count) ring = geomInner.GetGeometryRef(0) numpoints = ring.GetPointCount() for p in range(numpoints): lon, lat, z = ring.GetPoint(p) pointsX.append(lon) pointsY.append(lat) count += 1 elif geom.GetGeometryName() == 'POLYGON': ring = geom.GetGeometryRef(0) numpoints = ring.GetPointCount() pointsX = [] pointsY = [] for p in range(numpoints): lon, lat, z = ring.GetPoint(p) pointsX.append(lon) pointsY.append(lat) else: sys.exit('ERROR') xmin = min(pointsX) xmax = max(pointsX) ymin = min(pointsY) ymax = max(pointsY) # Specify offset and rows and columns to read xoff = int((xmin - xOrigin) / pixelWidth) yoff = int((yOrigin - ymax) / pixelWidth) xcount = int((xmax - xmin) / pixelWidth) + 1 ycount = int((ymax - ymin) / pixelWidth) + 1 # Create memory target raster target_ds = gdal.GetDriverByName('MEM').Create( '', xcount, ycount, 1, gdal.GDT_Byte) target_ds.SetGeoTransform(( xmin, pixelWidth, 0, ymax, 0, pixelHeight, )) # Create for target raster the same projection as for the value raster raster_srs = osr.SpatialReference() raster_srs.ImportFromWkt(raster.GetProjectionRef()) target_ds.SetProjection(raster_srs.ExportToWkt()) # Rasterize zone polygon to raster gdal.RasterizeLayer(target_ds, [1], lyr, burn_values=[1]) # Read raster as arrays banddataraster = raster.GetRasterBand(1) dataraster = banddataraster.ReadAsArray(xoff, yoff, xcount, ycount).astype(numpy.float) bandmask = target_ds.GetRasterBand(1) datamask = bandmask.ReadAsArray(0, 0, xcount, ycount).astype(numpy.float) # Mask zone of raster zoneraster = numpy.ma.masked_array(dataraster, numpy.logical_not(datamask)) # Calculate statistics of zonal raster return numpy.average(zoneraster), numpy.mean( zoneraster), numpy.median(zoneraster), numpy.std( zoneraster), numpy.var(zoneraster) except Exception: return 0, 0, 0, 0, 0
def main(ingpxfile, outlineshapefile, inspeclib, outspeclib, secdiff): img = spectral.envi.open( os.path.splitext(inspeclib)[0] + ".hdr", inspeclib) specs = img.spectra specnames = img.names b400 = np.argmin(abs(np.asarray(img.bands.centers) - 400)) b700 = np.argmin(abs(np.asarray(img.bands.centers) - 700)) ## infile = "GPSTrack_D5T4.gpx" ## outfile = "GPSTrack_D5T4_utm6_with_segments.shp" ## Read in time data for each ASD file. ## they are stored in a pre-made list with the ASD file name and the date/Time in each row ## csvtablefile = "/Carnegie/DGE/caodata/Scratch/dknapp/ASD/Spectroscopy/list_spec_time.txt" specrows = [] ## with open(csvtablefile, 'r') as csvfile: ## csvreader = csv.reader(csvfile) ## for row in csvreader: ## specrows.append(row) for row in specnames: vals = row.split() specrows.append(vals) transroot = os.path.splitext(ingpxfile)[0][-4:] + "*.asd.ref" ## Create a list with the date/times as datetime objects. spectimedates = [] for row in specrows: ## uggh. The format for timezone offset (%z) does not include a colon (:), ## so we have to skip that colon character. temp = row[1][0:22] + row[1][23:] trydate = datetime.datetime.strptime(temp, "%Y-%m-%dT%H:%M:%S%z") spectimedates.append(trydate) ## Create output spatial reference for Moorea (UTM Zone 6 South) spatialReference = osr.SpatialReference() spatialReference.ImportFromEPSG(32706) ## Create output data file drv = ogr.GetDriverByName("ESRI Shapefile") outDS = drv.CreateDataSource(outlineshapefile) ## outlayer = outDS.CreateLayer('moorea', spatialReference, ogr.wkbLineString) outlayer = outDS.CreateLayer('moorea', spatialReference, ogr.wkbLineString) outlayerDefn = outlayer.GetLayerDefn() tnameDefn = ogr.FieldDefn('specname', ogr.OFTString) time1pntDefn = ogr.FieldDefn('starttime', ogr.OFTString) time2pntDefn = ogr.FieldDefn('endtime', ogr.OFTString) outlayer.CreateField(tnameDefn) outlayer.CreateField(time1pntDefn) outlayer.CreateField(time2pntDefn) ## Get input data layer (track_points) inDS = ogr.Open(ingpxfile) lyr = inDS.GetLayerByName('track_points') lyrdefn = lyr.GetLayerDefn() numpnts = lyr.GetFeatureCount() fldcnt = lyrdefn.GetFieldCount() projutm6s = pyproj.Proj("+init=EPSG:32706") pntutm = [] times = [] azimuths = [] lyr.ResetReading() ## create utc and french polynesia timezone objects utc = datetime.timezone.utc fptz = datetime.timezone(datetime.timedelta(hours=-10)) for k in range(0, numpnts): feat = lyr.GetFeature(k) mytime = feat.GetFieldAsDateTime('time') print(mytime) mydatetime = datetime.datetime(mytime[0], mytime[1], mytime[2], mytime[3], \ mytime[4], int(mytime[5]), tzinfo=utc) geom = feat.GetGeometryRef() lon = geom.GetX() lat = geom.GetY() temputm = projutm6s(lon, lat) pntutm.append(temputm) times.append(mydatetime) segspecmeans = np.zeros((len(pntutm) - 1, b700 - b400 + 1), dtype=np.float32) segspecsdevs = np.zeros((len(pntutm) - 1, b700 - b400 + 1), dtype=np.float32) meanspeclib = copy.deepcopy(img) sdevspeclib = copy.deepcopy(img) segspecmeannames = [] segspecsdevnames = [] keep = [] for j in np.arange(0, (len(pntutm) - 1)): pnt1 = pntutm[j] pnt2 = pntutm[j + 1] diffx = pnt2[0] - pnt1[0] diffy = pnt2[1] - pnt1[1] initial_azimuth = math.degrees(math.atan2(diffx, diffy)) azimuth = (initial_azimuth + 360) % 360 azimuths.append(azimuth) time1 = times[j] time2 = times[j + 1] segtimediff = (time2 - time1).total_seconds() segdist = math.sqrt(math.pow(diffx, 2) + math.pow(diffy, 2)) ## Find the spectra that are between these 2 points myregex = fnmatch.translate(transroot) asdobj = re.compile(myregex) speclist = [] specnamelist = [] for i, asdrow in enumerate(specrows): gotit = asdobj.match(asdrow[0]) if gotit is not None: spectime = spectimedates[i] + datetime.timedelta( seconds=secdiff) # is it between these 2 segment points? if (spectime > time1) and (spectime < time2): speclist.append(specs[i, :]) specnamelist.append(asdrow[0]) ## propo = ((spectime-time1).total_seconds())/float(segtimediff) ## azrad = azimuth * (math.pi/180.0) ## xlen = math.sin(azrad) * (propo * segdist) ## ylen = math.cos(azrad) * (propo * segdist) ## xnewpnt = pnt1[0] + xlen ## ynewpnt = pnt1[1] + ylen numsegspecs = len(speclist) if (numsegspecs == 0): keep.append(False) continue else: keep.append(True) temparr = np.zeros((numsegspecs, specs.shape[1]), dtype=np.float32) for col in range(numsegspecs): temparr[col, :] = speclist[col] meanspec = np.mean(temparr, axis=0) sdevspec = np.std(temparr, axis=0) segspecmeans[j, :] = meanspec[b400:(b700 + 1)] segspecsdevs[j, :] = sdevspec[b400:(b700 + 1)] segspecmeannames.append(("Mean_Seg%03d_N%03d_" % (j, numsegspecs)) + os.path.splitext(ingpxfile)[0][-4:]) segspecsdevnames.append(("SD_Seg%03d_N%03d_" % (j, numsegspecs)) + os.path.splitext(ingpxfile)[0][-4:]) feature = ogr.Feature(outlayerDefn) line = ogr.Geometry(ogr.wkbLineString) line.AddPoint_2D(pnt1[0], pnt1[1]) line.AddPoint_2D(pnt2[0], pnt2[1]) feature.SetGeometry(line) feature.SetFID(j) feature.SetField('specname', ("Mean_Seg%03d_N%03d_" % (j, numsegspecs)) + os.path.splitext(ingpxfile)[0][-4:]) timestr1 = time1.astimezone(utc).strftime("%Y-%m-%dT%H:%M:%SZ") timestr2 = time2.astimezone(utc).strftime("%Y-%m-%dT%H:%M:%SZ") feature.SetField('starttime', timestr1) feature.SetField('endtime', timestr2) outlayer.CreateFeature(feature) inDS, outDS = None, None meanspeclib.spectra = segspecmeans[keep, :] meanspeclib.names = segspecmeannames meanspeclib.bands.centers = img.bands.centers[b400:(b700 + 1)] sdevspeclib.spectra = segspecsdevs[keep, :] sdevspeclib.names = segspecsdevnames sdevspeclib.bands.centers = img.bands.centers[b400:(b700 + 1)] meanspeclib.save(outspeclib + "_mean", description="Mean spectra by Segment") sdevspeclib.save(outspeclib + "_sdev", description="Standard Deviation spectra by Segment")
#Read in unlassified raster data, xx, yy, gt = read_raster(in_raster) #Bin data in to catagories data = cat_data(data) CreateRaster(xx, yy, data, gt, oName) data, xx, yy, gt = read_raster(oName) in_shp = r"C:\workspace\Merged_SS\window_analysis\shapefiles\tex_seg_800_3class.shp" stats = zonal_stats(in_shp, oName, categorical=True, nodata=-99) ds = ogr.Open(in_shp) lyr = ds.GetLayer(0) a = [] b = [] for row in lyr: geom = row.GetGeometryRef() a.append(row.substrate) b.append(geom.GetArea()) lyr.ResetReading() del ds df = pd.DataFrame(stats) df['substrate'] = a t = df
# col_name_lst = ['ID', 'RAF2018'] file = open(out_pth, 'w+') for i in range(len(col_name_lst)): if i < len(col_name_lst) - 1: file.write(str(col_name_lst[i]) + ",") elif i == len(col_name_lst) - 1: file.write(str(col_name_lst[i]) + "\n") file.close() # for tile in tiles_lst: # def workFunc(tile): for tile in tiles_lst: ras = gdal.Open(r'{0}\{1}\{2}'.format(dir, tile, file_name)) shp = ogr.Open( r"O:\Student_Data\CJaenicke\00_MA\data\vector\random_sample\stratrand_sample_4km.shp" ) lyr = shp.GetLayer() gt = ras.GetGeoTransform() x_min = gt[0] x_max = gt[0] + gt[1] * ras.RasterXSize y_max = gt[3] y_min = gt[3] + gt[5] * ras.RasterYSize lyr.SetSpatialFilterRect(x_min, y_min, x_max, y_max) feat_count = lyr.GetFeatureCount() if feat_count > 0:
def ogr_osm_10(): if ogrtest.osm_drv is None: return 'skip' # Non existing file ds = ogr.Open('/nonexisting/foo.osm') if ds is not None: gdaltest.post_reason('fail') return 'fail' # Empty .osm file f = gdal.VSIFOpenL('/vsimem/foo.osm', 'wb') gdal.VSIFCloseL(f) ds = ogr.Open('/vsimem/foo.osm') if ds is not None: gdaltest.post_reason('fail') return 'fail' gdal.Unlink('/vsimem/foo.osm') # Empty .pbf file f = gdal.VSIFOpenL('/vsimem/foo.pbf', 'wb') gdal.VSIFCloseL(f) ds = ogr.Open('/vsimem/foo.pbf') if ds is not None: gdaltest.post_reason('fail') return 'fail' gdal.Unlink('/vsimem/foo.pbf') if ogrtest.osm_drv_parse_osm: # Invalid .osm file f = gdal.VSIFOpenL('/vsimem/foo.osm', 'wb') data = "<osm>" gdal.VSIFWriteL(data, 1, len(data), f) gdal.VSIFCloseL(f) ds = ogr.Open('/vsimem/foo.osm') lyr = ds.GetLayer(0) gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') feat = lyr.GetNextFeature() gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' ds = None gdal.Unlink('/vsimem/foo.osm') # Invalid .pbf file f = gdal.VSIFOpenL('/vsimem/foo.pbf', 'wb') data = "OSMHeader\n" gdal.VSIFWriteL(data, 1, len(data), f) gdal.VSIFCloseL(f) ds = ogr.Open('/vsimem/foo.pbf') lyr = ds.GetLayer(0) gdal.ErrorReset() gdal.PushErrorHandler('CPLQuietErrorHandler') feat = lyr.GetNextFeature() gdal.PopErrorHandler() if gdal.GetLastErrorMsg() == '': gdaltest.post_reason('fail') return 'fail' ds = None gdal.Unlink('/vsimem/foo.pbf') # Test million laugh pattern if ogrtest.osm_drv_parse_osm: gdal.PushErrorHandler('CPLQuietErrorHandler') ds = ogr.Open('data/billionlaugh.osm') gdal.PopErrorHandler() if ds is not None: gdaltest.post_reason('fail') return 'fail' return 'success'
def ogr_gmt_5(): ####################################################### # Create gmtory Layer gmt_drv = ogr.GetDriverByName('GMT') gdaltest.gmt_ds = gmt_drv.CreateDataSource('tmp/mpoly.gmt') gdaltest.gmt_lyr = gdaltest.gmt_ds.CreateLayer('mpoly') ####################################################### # Setup Schema ogrtest.quick_create_layer_def(gdaltest.gmt_lyr, [('ID', ogr.OFTInteger)]) ####################################################### # Write a first multipolygon dst_feat = ogr.Feature(feature_def=gdaltest.gmt_lyr.GetLayerDefn()) dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt( 'MULTIPOLYGON(((0 0,0 10,10 10,0 10,0 0),(3 3,4 4, 3 4,3 3)),((12 0,14 0,12 3,12 0)))' )) dst_feat.SetField('ID', 15) gdaltest.gmt_lyr.CreateFeature(dst_feat) dst_feat = ogr.Feature(feature_def=gdaltest.gmt_lyr.GetLayerDefn()) dst_feat.SetGeometryDirectly( ogr.CreateGeometryFromWkt('MULTIPOLYGON(((30 20,40 20,30 30,30 20)))')) dst_feat.SetField('ID', 16) gdaltest.gmt_lyr.CreateFeature(dst_feat) gdaltest.gmt_lyr = None gdaltest.gmt_ds.Destroy() gdaltest.gmt_ds = None # Reopen. ds = ogr.Open('tmp/mpoly.gmt') lyr = ds.GetLayer(0) if lyr.GetLayerDefn().GetGeomType() != ogr.wkbMultiPolygon: gdaltest.post_reason('did not get expected multipolygon type.') return 'fail' feat = lyr.GetNextFeature() if ogrtest.check_feature_geometry( feat, 'MULTIPOLYGON(((0 0,0 10,10 10,0 10,0 0),(3 3,4 4, 3 4,3 3)),((12 0,14 0,12 3,12 0)))' ): return 'fail' if feat.GetField('ID') != 15: gdaltest.post_reason('got wrong id, first feature') return 'fail' feat = lyr.GetNextFeature() if ogrtest.check_feature_geometry( feat, 'MULTIPOLYGON(((30 20,40 20,30 30,30 20)))'): return 'fail' if feat.GetField('ID') != 16: gdaltest.post_reason('got wrong ID, second feature') return 'fail' feat = lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('did not get null feature when expected.') return 'fail' return 'success'
def _prepare_classif(self) : if not os.path.isdir(os.path.join(self._outPath,"DATA")): os.makedirs(os.path.join(self._outPath,"DATA")) if not os.path.isfile(os.path.join(self._outPath,"DATA","gt_labels.npy")) and not os.path.isfile(os.path.join(self._outPath,"DATA","gt_id.npy")) : lstSpectral = [os.path.join(self._inPath,"GAPF",File) for File in os.listdir(os.path.join(self._inPath,"GAPF")) if File.endswith("GAPF.tif")] lstSpectral.sort() lstSpectral.extend([os.path.join(self._inPath,"INDICES",File) for File in os.listdir(os.path.join(self._inPath,"INDICES")) if File.endswith("GAPF.tif")]) ds = gdal.Open(lstSpectral[0]) self._geoT = ds.GetGeoTransform() self._proj = ds.GetProjection() self._xsize = ds.RasterXSize self._ysize = ds.RasterYSize ds = None mem_drv = gdal.GetDriverByName("MEM") gt_shp = ogr.Open(self._gt) gt_layer = gt_shp.GetLayer() dest1 = mem_drv.Create('', self._xsize, self._ysize, 1, gdal.GDT_Byte) dest1.SetGeoTransform(self._geoT) dest1.SetProjection(self._proj) gdal.RasterizeLayer(dest1, [1], gt_layer, options=["ATTRIBUTE=Code"]) gt_rst = dest1.GetRasterBand(1).ReadAsArray() dest2 = mem_drv.Create('', self._xsize, self._ysize, 1, gdal.GDT_UInt16) dest2.SetGeoTransform(self._geoT) dest2.SetProjection(self._proj) gdal.RasterizeLayer(dest2, [1], gt_layer, options=["ATTRIBUTE=ID"]) ID_rst = dest2.GetRasterBand(1).ReadAsArray() gt_shp = None gt_layer = None dest1 = None dest2 = None mem_drv = None self._gt_indices = np.nonzero(gt_rst) self._gt_labels = gt_rst[self._gt_indices] np.save(os.path.join(self._outPath,"DATA","gt_labels.npy"),self._gt_labels) self._gt_ID = ID_rst[self._gt_indices] np.save(os.path.join(self._outPath,"DATA","gt_id.npy"),self._gt_ID) else: self._gt_labels = np.load(os.path.join(self._outPath,"DATA","gt_labels.npy")) self._gt_ID = np.load(os.path.join(self._outPath,"DATA","gt_id.npy")) # EMP self._emp99_data = os.path.join(self._outPath,"DATA","emp99_data.npy") if not os.path.isfile(self._emp99_data) : lstEMP99 = glob.glob(os.path.join(self._inPath,"EMP_99")+os.sep+"*EMP.tif") lstEMP99.sort() emp99_samples = None for File in lstEMP99 : with rasterio.open(File) as ds : for j in range(ds.count) : if emp99_samples is None : emp99_samples = ds.read(j+1)[self._gt_indices] else : emp99_samples = np.column_stack((emp99_samples,ds.read(j+1)[self._gt_indices])) np.save(self._emp99_data,emp99_samples) # Spectral data self._spectral_data = os.path.join(self._outPath,"DATA","spectral_data.npy") if not os.path.isfile(self._spectral_data) : spectral_samples = None for File in lstSpectral : with rasterio.open(File) as ds : for j in range(ds.count): if spectral_samples is None : spectral_samples = ds.read(j+1)[self._gt_indices] else : spectral_samples = np.column_stack((spectral_samples,ds.read(j+1)[self._gt_indices])) np.save(self._spectral_data,spectral_samples) # EMP99 + Spectral data self._total99_data = os.path.join(self._outPath,"DATA","total99_data.npy") if not os.path.isfile(self._total99_data) : emp99_samples = np.load(self._emp99_data) spectral_samples = np.load(self._spectral_data) np.save(self._total99_data,np.column_stack((emp99_samples,spectral_samples))) emp99_samples = None spectral_samples = None
def landsat_query(conf, geojsonfile, start_date, end_date, cloud=50): """ Queries the USGS dataset LANDSAT_8_C1 for imagery between the start_date and end_date, inclusive. This downloads all imagery touched by the bounding box of the provided geojson file. Parameters ---------- conf : dict A dictionary with ['landsat']['user'] and ['landsat']['pass'] values, containing your USGS credentials. geojsonfile : str The geojson file start_date : str The start date, in "yyyymmdd" format. Will truncate any longer string. end_date : str The end query date, in "yyyymmdd" format. Will truncate any longer string. cloud : float The maximum cloud cover to return. Returns ------- products : list of dict A list of products; each item being a dictionary returned from the USGS API. See https://earthexplorer.usgs.gov/inventory/documentation/datamodel#Scene """ footprint = ogr.Open(geojsonfile) feature = footprint.GetLayer(0).GetFeature(0) geometry = feature.GetGeometryRef() lon_south, lon_north, lat_west, lat_east = geometry.GetEnvelope() geometry = None feature = None footprint = None start_date = "{}-{}-{}".format(start_date[0:4], start_date[4:6], start_date[6:8]) end_date = "{}-{}-{}".format(end_date[0:4], end_date[4:6], end_date[6:8]) session = requests.Session() api_root = "https://earthexplorer.usgs.gov/inventory/json/v/1.4.1/" session_key = get_landsat_api_key(conf, session) if not session_key: log.error("Login to USGS failed.") return None data_request = { "apiKey": session_key, "datasetName": "LANDSAT_8_C1", "spatialFilter": { "filterType": "mbr", "lowerLeft": { "latitude": np.round(lat_west, 4), "longitude": np.round(lon_south, 4) }, "upperRight": { "latitude": np.round(lat_east, 4), "longitude": np.round(lon_north, 4), }, }, "temporalFilter": { "startDate": start_date, "endDate": end_date }, "maxCloudCover": cloud } log.info("Sending Landsat query:\n{}".format(data_request)) request = Request("GET", url=api_root + "search", params={"jsonRequest": json.dumps(data_request)}, headers={"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"} ) req_string = session.prepare_request(request) req_string.url = req_string.url.replace("+", "").replace("%27", "%22") # usgs why dont you like real url encoding -_- response = session.send(req_string) products = response.json()["data"]["results"] log.info("Retrieved {} product(s)".format(len(products))) log.info("Logging out of USGS") session.get( url=api_root + "logout", params={"jsonRequest": json.dumps({"apiKey": session_key})}, headers={"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"} ) return products
def ogr_gft_write(): if ogrtest.gft_drv is None: return 'skip' if ogrtest.gft_auth_key is None: ogrtest.gft_can_write = False return 'skip' ds = ogr.Open('GFT:auth=%s' % ogrtest.gft_auth_key, update = 1) if ds is None: ogrtest.gft_can_write = False return 'skip' ogrtest.gft_can_write = True import random ogrtest.gft_rand_val = random.randint(0,2147000000) table_name = "test_%d" % ogrtest.gft_rand_val lyr = ds.CreateLayer(table_name) lyr.CreateField(ogr.FieldDefn('strcol', ogr.OFTString)) lyr.CreateField(ogr.FieldDefn('numcol', ogr.OFTReal)) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField('strcol', 'foo') feat.SetField('numcol', '3.45') expected_wkt = "POLYGON ((0 0,0 1,1 1,1 0),(0.25 0.25,0.25 0.75,0.75 0.75,0.75 0.25))" geom = ogr.CreateGeometryFromWkt(expected_wkt) feat.SetGeometry(geom) if lyr.CreateFeature(feat) != 0: gdaltest.post_reason('CreateFeature() failed') return 'fail' fid = feat.GetFID() feat.SetField('strcol', 'bar') if lyr.SetFeature(feat) != 0: gdaltest.post_reason('SetFeature() failed') return 'fail' lyr.ResetReading() feat = lyr.GetNextFeature() if feat.GetFieldAsString('strcol') != 'bar': gdaltest.post_reason('GetNextFeature() did not get expected feature') feat.DumpReadable() return 'fail' feat = lyr.GetFeature(fid) if feat.GetFieldAsString('strcol') != 'bar': gdaltest.post_reason('GetFeature() did not get expected feature') feat.DumpReadable() return 'fail' got_wkt = feat.GetGeometryRef().ExportToWkt() if got_wkt != expected_wkt: gdaltest.post_reason('did not get expected geometry') print(got_wkt) return 'fail' if lyr.GetFeatureCount() != 1: gdaltest.post_reason('GetFeatureCount() did not returned expected value') return 'fail' if lyr.DeleteFeature(feat.GetFID()) != 0: gdaltest.post_reason('DeleteFeature() failed') return 'fail' ds.ExecuteSQL('DELLAYER:%s' % table_name) ds = None return 'success'
# ================================== LOAD REQUIRED LIBRARIES ========================================= # import time import ogr import pandas as pd import numpy as np # ======================================== SET TIME COUNT ============================================ # starttime = time.strftime("%a, %d %b %Y %H:%M:%S" , time.localtime()) print("--------------------------------------------------------") print("Starting process, time: " + starttime) print("") # =================================== DATA PATHS AND DIRECTORIES====================================== # dir = '/Users/Vince/Documents/Uni MSc/Msc 7 Geoprocessing with Python/' country_data = dir + 'Assignment07_data/SouthAmerica/gadm_SouthAmerica.shp' pa_data = dir + 'Assignment07_data/WDPA_May2019_shapefile_polygons.shp' s_america = ogr.Open(country_data) pa_glob = ogr.Open(pa_data) lyrc = s_america.GetLayer() lyrc_names = [field.name for field in lyrc.schema] lyr_pa = pa_glob.GetLayer() lyrpa_names = [field.name for field in lyr_pa.schema] # 'terrestrial (Marine = 0), Status: designated, Category: IUCN_CAT all; STATUS_YR; REP_AREA; NAME' #create data frame for results summary = pd.DataFrame(columns={'Country ID':[], 'Country Name':[], 'PA Category':[], '# PAs':[],
def clip2shp(inBuffFile, zone, inMGRS, outGridFile): inBuff = ogr.Open(inBuffFile, 0) if inBuff is None: print "Could not open file {0}. Exit.".format(inBuffFile) layerBuff = inBuff.GetLayer() inGrid = ogr.Open(inMGRS, 0) if inGrid is None: print "Could not open file {0}. Exit.".format(inMGRS) layerGrid = inGrid.GetLayer() if os.path.exists(outGridFile): esriDriver.DeleteDataSource(outGridFile) gridDataSource = esriDriver.CreateDataSource(outGridFile) if gridDataSource is None: print "Could not create file {0}".format(outGridFile) # Get the input spatial reference from the Grid file inSpatRef = osr.SpatialReference() inSpatRef.ImportFromEPSG(4269) # Set the output file from WGS84 Geo to WGS84 UTM outSpatRef = osr.SpatialReference() outSpatRef.ImportFromEPSG(getEPSG(zone)),1 # Transform the coordinates coordTrans = osr.CoordinateTransformation(inSpatRef,outSpatRef) # Create a layer and new shapefile layerName = inMGRS.split('\\')[-2] lyr = gridDataSource.CreateLayer(layerName, outSpatRef, ogr.wkbPolygon) # Copy fields from MGRS Grid layer layerGridDef = layerGrid.GetLayerDefn() for i in range(layerGridDef.GetFieldCount()): lyr.CreateField(layerGridDef.GetFieldDefn(i)) # Test the buffer and grid intersection and add feature/fields to output for i in range(layerBuff.GetFeatureCount()): feature1 = layerBuff.GetFeature(i) geometry1 = feature1.GetGeometryRef() #envelope1 = geometry1.GetEnvelope() for i in range(layerGrid.GetFeatureCount()): feature2 = layerGrid.GetFeature(i) geometry2 = feature2.GetGeometryRef() # Create fields for MGRS grid attEasting = feature2.GetField("EASTING") attNorthing = feature2.GetField("NORTHING") attkmSQ_ID = feature2.GetField("kmSQ_ID") attGZD = feature2.GetField("GZD") attShape_Leng = feature2.GetField("Shape_Leng") attMGRS = feature2.GetField("MGRS") attMGRS_10km = feature2.GetField("MGRS_10km") attShape_Le_1 = feature2.GetField("Shape_Le_1") attShape_Area = feature2.GetField("Shape_Area") if geometry2.Intersects(geometry1): #intersection = geometry1.Intersection(geometry2) dstfeature = ogr.Feature(lyr.GetLayerDefn()) geometry2.Transform(coordTrans) dstfeature.SetGeometry(geometry2) #print "Get geometry information {0}".format(dstfeature.GetGeometryRef()) # Create fields for MGRS grid dstfeature.SetField("EASTING", attEasting) dstfeature.SetField("NORTHING", attNorthing) dstfeature.SetField("kmSQ_ID", attkmSQ_ID) dstfeature.SetField("GZD", attGZD) dstfeature.SetField("Shape_Leng", attShape_Leng) dstfeature.SetField("MGRS", attMGRS) dstfeature.SetField("MGRS_10km", attMGRS_10km) dstfeature.SetField("Shape_Le_1", attShape_Le_1) dstfeature.SetField("Shape_Area", attShape_Area) # if dstfeature.IsFieldSetAndNotNull("MGRS") is True: # print "Field is set and not null" # print "Get field information {0}".format(dstfeature.GetFieldAsString("MGRS")) # else: # print "The field is empty" lyr.CreateFeature(dstfeature) dstfeature.Destroy()
def ogr_osm_1(filename = 'data/test.pbf'): try: drv = ogr.GetDriverByName('OSM') except: drv = None if drv is None: return 'skip' ds = ogr.Open(filename) if ds is None: if filename == 'data/test.osm' and gdal.GetLastErrorMsg().find('OSM XML detected, but Expat parser not available') == 0: return 'skip' gdaltest.post_reason('fail') return 'fail' # Test points lyr = ds.GetLayer('points') if lyr.GetGeomType() != ogr.wkbPoint: gdaltest.post_reason('fail') return 'fail' sr = lyr.GetSpatialRef() if sr.ExportToWkt().find('GEOGCS["WGS 84",DATUM["WGS_1984",') != 0 and \ sr.ExportToWkt().find('GEOGCS["GCS_WGS_1984",DATUM["WGS_1984"') != 0 : gdaltest.post_reason('fail') print(sr.ExportToWkt()) return 'fail' if filename == 'data/test.osm': if lyr.GetExtent() != (2.0, 3.0, 49.0, 50.0): gdaltest.post_reason('fail') print(lyr.GetExtent()) return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '3': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if ogrtest.check_feature_geometry(feat, ogr.CreateGeometryFromWkt('POINT (3.0 49.5)')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' # Test lines lyr = ds.GetLayer('lines') if lyr.GetGeomType() != ogr.wkbLineString: gdaltest.post_reason('fail') return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '1': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if ogrtest.check_feature_geometry(feat, ogr.CreateGeometryFromWkt('LINESTRING (2 49,3 50)')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '6': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if ogrtest.check_feature_geometry(feat, ogr.CreateGeometryFromWkt('LINESTRING (2 49,3 49,3 50,2 50,2 49)')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' feat = lyr.GetNextFeature() if feat is not None: gdaltest.post_reason('fail') return 'fail' # Test polygons lyr = ds.GetLayer('polygons') if lyr.GetGeomType() != ogr.wkbPolygon: gdaltest.post_reason('fail') return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '2': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if filename == 'tmp/ogr_osm_3': if ogrtest.check_feature_geometry(feat, ogr.CreateGeometryFromWkt('POLYGON ((2 49,2 50,3 50,3 49,2 49))')) != 0 : gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' else: if ogrtest.check_feature_geometry(feat, ogr.CreateGeometryFromWkt('POLYGON ((2 49,3 49,3 50,2 50,2 49))')) != 0 : gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' # Test multipolygons lyr = ds.GetLayer('multipolygons') if filename == 'tmp/ogr_osm_3': if lyr.GetGeomType() != ogr.wkbPolygon: gdaltest.post_reason('fail') return 'fail' else: if lyr.GetGeomType() != ogr.wkbMultiPolygon: gdaltest.post_reason('fail') return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '1': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if filename == 'tmp/ogr_osm_3': if ogrtest.check_feature_geometry(feat, ogr.CreateGeometryFromWkt('POLYGON ((2 49,2 50,3 50,3 49,2 49),(2.1 49.1,2.2 49.1,2.2 49.2,2.1 49.2,2.1 49.1))')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' else: if ogrtest.check_feature_geometry(feat, ogr.CreateGeometryFromWkt('MULTIPOLYGON (((2 49,3 49,3 50,2 50,2 49),(2.1 49.1,2.2 49.1,2.2 49.2,2.1 49.2,2.1 49.1)))')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' # Test multilinestrings lyr = ds.GetLayer('multilinestrings') if filename == 'tmp/ogr_osm_3': if lyr.GetGeomType() != ogr.wkbLineString: gdaltest.post_reason('fail') return 'fail' else: if lyr.GetGeomType() != ogr.wkbMultiLineString: gdaltest.post_reason('fail') return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '3': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if filename == 'tmp/ogr_osm_3': if ogrtest.check_feature_geometry(feat, ogr.CreateGeometryFromWkt('LINESTRING (2 49,3 50)')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' else: if ogrtest.check_feature_geometry(feat, ogr.CreateGeometryFromWkt('MULTILINESTRING ((2 49,3 50))')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' # Test other_relations lyr = ds.GetLayer('other_relations') if filename == 'tmp/ogr_osm_3': if lyr is not None: gdaltest.post_reason('fail') return 'fail' else: if lyr.GetGeomType() != ogr.wkbGeometryCollection: gdaltest.post_reason('fail') return 'fail' feat = lyr.GetNextFeature() if feat.GetFieldAsString('osm_id') != '4': gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' if ogrtest.check_feature_geometry(feat, ogr.CreateGeometryFromWkt('GEOMETRYCOLLECTION (POINT (2 49),LINESTRING (2 49,3 50))')) != 0: gdaltest.post_reason('fail') feat.DumpReadable() return 'fail' ds = None return 'success'
def analyse_existing_networks(NETWORK_NAME, conn, db, parameters, noioa, use_db, use_csv, logfilepath, nx_location): ''' ''' import ogr, sys, tools sys.path.append() import nx_pgnet import networkx as nx import interdependency_analysis as ia #unpack varaibles metrics, failure, handling_variables, fileName, a_to_b_edges, write_step_to_db, write_results_table, db_parameters, store_n_e_atts, length = parameters var = failure_type(failure) if type(var) == int: return var else: failuretype = var #if performing analysis on one network only if failure['stand_alone'] == True: count = 0 #loop through the listed networks for nets in NETWORK_NAME: if db == 'theoretic_networks_tree' or db == 'theoretic_networks_hc' or db == 'theoretic_networks_hr' or db == 'theoretic_networks_ahr' or db == 'theoretic_networks_ba' or db == 'theoretic_networks_ws' or db == 'theoretic_networks_gnm' or db == 'theoretic_networks_er': nets = str(nets) + '_' + str(count) count += 1 iterations = 0 #while noia(the number of simulation to perform) is greater then the number performed while iterations < noioa: #if the network is to be got from the database if use_db == True: #connect to the database and get the network conn = ogr.Open(conn) G = nx_pgnet.read(conn).pgnet(nets) #the network must come from a csv else: #get the text file # maybe replace file_path with fileName1 filepath = str(fileName) + '%s/%s.txt' % (db, nets) var = tools.get_nodes_edges_csv(filepath) if type(var) == int: return var else: nodelist, edgelist = var #build the network from the lists returned from the function G = nx.Graph() G.add_nodes_from(nodelist) G.add_edges_from(edgelist) #set the name of the results text file fileName = str(fileName) + '%s/%s%s.txt' % (db, nets, failuretype) #package the parameters together parameters = metrics, failure, handling_variables, fileName, a_to_b_edges, write_step_to_db, write_results_table, db_parameters, store_n_e_atts, length = parameters #need a value for network B (G2) G2 = None #perform the analusis ia.main(G, G2, parameters, logfilepath) iterations += 1 #if dependency or intersedpendcy elif failure['stand_alone'] == False: if use_db == True: conn = ogr.Open(conn) G = nx_pgnet.read(conn).pgnet(nets) raise error_classes.GeneralError( 'Error. This function does not work as yet.') elif use_db == False: #get both networks from csv filepath = str(fileName) + '%s/%s.txt' % (db, NETWORK_NAME[0]) var = tools.get_nodes_edges_csv(filepath) if type(var) == int: return var else: nodelist, edgelist = var G1 = nx.Graph() G1.add_nodes_from(nodelist) G1.add_edges_from(edgelist) filepath = str(fileName) + '%s/%s.txt' % (db, NETWORK_NAME[1]) var = tools.get_nodes_edges_csv(filepath) if type(var) == int: return var else: nodelist, edgelist = var G2 = nx.Graph() G2.add_nodes_from(nodelist) G2.add_edges_from(edgelist) ia.main(G1, G2, parameters, logfilepath) else: raise error_classes.GeneralError( 'Error. The STAND_ALONE variable must have a boolean value')
import matplotlib.pyplot as plt import ogr ds = ogr.Open(r'D:\Wordpress\Amritdai\04printmdb\Nilbarahi.mdb') lyr = ds.GetLayer("Parcel") #make list of all the parcelno this database have all_parcel = list() for parcel in lyr: all_parcel.append(parcel.GetField("PARCELNO")) #take parcel no as input and if parcel no do not exist in database prompt for #next parcel no inparcelno = 0 def inputparcel(): inparcelno1 = int(raw_input("enter parcel no")) #print inparcelno1 if inparcelno1 in all_parcel: global inparcelno inparcelno = inparcelno1 else: print "this parcel no doesnt exist in this dbase" inputparcel() inputparcel() #get the fid of corresponding parcel no fid = 0 for parcel in lyr: parcelno1 = parcel.GetField("PARCELNO") #fid1 = parcel.GetField("OBJECTID") if parcelno1 == inparcelno: