def _make_multi(type_, maker, coords): geom = ogr.Geometry(type_) for coord in coords: geom.AddGeometryDirectly(maker(coord)) return geom
# Save extent to a new Shapefile outShapefile = "tmp/bbox.shp" outDriver = ogr.GetDriverByName("ESRI Shapefile") # Remove output shapefile if it already exists if os.path.exists(outShapefile): outDriver.DeleteDataSource(outShapefile) # Create the output shapefile outDataSource = outDriver.CreateDataSource(outShapefile) outLayer = outDataSource.CreateLayer("bbox", geom_type=ogr.wkbPolygon) featureDefn = outLayer.GetLayerDefn() feature = ogr.Feature(featureDefn) ring = ogr.Geometry(ogr.wkbLinearRing) ring.AddPoint(-180, 85) ring.AddPoint(180, 85) ring.AddPoint(180, -85) ring.AddPoint(-180, -85) ring.AddPoint(-180, 85) poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring) feature.SetGeometry(poly) outLayer.CreateFeature(feature) feature = None # Save and close DataSource outDataSource = None
def main(argv=None): global bReadOnly global bVerbose global bSummaryOnly global nFetchFID global papszOptions pszWHERE = None pszDataSource = None papszLayers = None poSpatialFilter = None nRepeatCount = 1 bAllLayers = False pszSQLStatement = None pszDialect = None options = {} pszGeomField = None if argv is None: argv = sys.argv argv = ogr.GeneralCmdLineProcessor(argv) # -------------------------------------------------------------------- # Processing command line arguments. # -------------------------------------------------------------------- if argv is None: return 1 nArgc = len(argv) iArg = 1 while iArg < nArgc: if EQUAL(argv[iArg], "--utility_version"): print("%s is running against GDAL %s" % (argv[0], gdal.VersionInfo("RELEASE_NAME"))) return 0 elif EQUAL(argv[iArg], "-ro"): bReadOnly = True elif EQUAL(argv[iArg], "-q") or EQUAL(argv[iArg], "-quiet"): bVerbose = False elif EQUAL(argv[iArg], "-fid") and iArg < nArgc - 1: iArg = iArg + 1 nFetchFID = int(argv[iArg]) elif EQUAL(argv[iArg], "-spat") and iArg + 4 < nArgc: oRing = ogr.Geometry(ogr.wkbLinearRing) oRing.AddPoint(float(argv[iArg + 1]), float(argv[iArg + 2])) oRing.AddPoint(float(argv[iArg + 1]), float(argv[iArg + 4])) oRing.AddPoint(float(argv[iArg + 3]), float(argv[iArg + 4])) oRing.AddPoint(float(argv[iArg + 3]), float(argv[iArg + 2])) oRing.AddPoint(float(argv[iArg + 1]), float(argv[iArg + 2])) poSpatialFilter = ogr.Geometry(ogr.wkbPolygon) poSpatialFilter.AddGeometry(oRing) iArg = iArg + 4 elif EQUAL(argv[iArg], "-geomfield") and iArg < nArgc - 1: iArg = iArg + 1 pszGeomField = argv[iArg] elif EQUAL(argv[iArg], "-where") and iArg < nArgc - 1: iArg = iArg + 1 pszWHERE = argv[iArg] elif EQUAL(argv[iArg], "-sql") and iArg < nArgc - 1: iArg = iArg + 1 pszSQLStatement = argv[iArg] elif EQUAL(argv[iArg], "-dialect") and iArg < nArgc - 1: iArg = iArg + 1 pszDialect = argv[iArg] elif EQUAL(argv[iArg], "-rc") and iArg < nArgc - 1: iArg = iArg + 1 nRepeatCount = int(argv[iArg]) elif EQUAL(argv[iArg], "-al"): bAllLayers = True elif EQUAL(argv[iArg], "-so") or EQUAL(argv[iArg], "-summary"): bSummaryOnly = True elif len(argv[iArg]) > 8 and EQUAL(argv[iArg][0:8], "-fields="): options['DISPLAY_FIELDS'] = argv[iArg][7:len(argv[iArg])] elif len(argv[iArg]) > 6 and EQUAL(argv[iArg][0:6], "-geom="): options['DISPLAY_GEOMETRY'] = argv[iArg][6:len(argv[iArg])] elif argv[iArg][0] == '-': return Usage() elif pszDataSource is None: pszDataSource = argv[iArg] else: if papszLayers is None: papszLayers = [] papszLayers.append(argv[iArg]) bAllLayers = False iArg = iArg + 1 if pszDataSource is None: return Usage() # -------------------------------------------------------------------- # Open data source. # -------------------------------------------------------------------- poDS = None poDriver = None poDS = ogr.Open(pszDataSource, not bReadOnly) if poDS is None and not bReadOnly: poDS = ogr.Open(pszDataSource, False) if poDS is not None and bVerbose: print("Had to open data source read-only.") bReadOnly = True # -------------------------------------------------------------------- # Report failure. # -------------------------------------------------------------------- if poDS is None: print("FAILURE:\n" "Unable to open datasource `%s' with the following drivers." % pszDataSource) for iDriver in range(ogr.GetDriverCount()): print(" -> %s" % ogr.GetDriver(iDriver).GetName()) return 1 poDriver = poDS.GetDriver() # -------------------------------------------------------------------- # Some information messages. # -------------------------------------------------------------------- if bVerbose: print("INFO: Open of `%s'\n" " using driver `%s' successful." % (pszDataSource, poDriver.GetName())) poDS_Name = poDS.GetName() if str(type(pszDataSource)) == "<type 'unicode'>" and str( type(poDS_Name)) == "<type 'str'>": poDS_Name = poDS_Name.decode("utf8") if bVerbose and pszDataSource != poDS_Name: print("INFO: Internal data source name `%s'\n" " different from user name `%s'." % (poDS_Name, pszDataSource)) # -------------------------------------------------------------------- # Special case for -sql clause. No source layers required. # -------------------------------------------------------------------- if pszSQLStatement is not None: poResultSet = None nRepeatCount = 0 #// skip layer reporting. if papszLayers is not None: print("layer names ignored in combination with -sql.") if pszGeomField is None: poResultSet = poDS.ExecuteSQL(pszSQLStatement, poSpatialFilter, pszDialect) else: poResultSet = poDS.ExecuteSQL(pszSQLStatement, None, pszDialect) if poResultSet is not None: if pszWHERE is not None: if poResultSet.SetAttributeFilter(pszWHERE) != 0: print("FAILURE: SetAttributeFilter(%s) failed." % pszWHERE) return 1 if pszGeomField is not None: ReportOnLayer(poResultSet, None, pszGeomField, poSpatialFilter, options) else: ReportOnLayer(poResultSet, None, None, None, options) poDS.ReleaseResultSet(poResultSet) #gdal.Debug( "OGR", "GetLayerCount() = %d\n", poDS.GetLayerCount() ) for iRepeat in range(nRepeatCount): if papszLayers is None: # -------------------------------------------------------------------- # Process each data source layer. # -------------------------------------------------------------------- for iLayer in range(poDS.GetLayerCount()): poLayer = poDS.GetLayer(iLayer) if poLayer is None: print("FAILURE: Couldn't fetch advertised layer %d!" % iLayer) return 1 if not bAllLayers: line = "%d: %s" % (iLayer + 1, poLayer.GetLayerDefn().GetName()) nGeomFieldCount = poLayer.GetLayerDefn().GetGeomFieldCount( ) if nGeomFieldCount > 1: line = line + " (" for iGeom in range(nGeomFieldCount): if iGeom > 0: line = line + ", " poGFldDefn = poLayer.GetLayerDefn( ).GetGeomFieldDefn(iGeom) line = line + "%s" % ogr.GeometryTypeToName( poGFldDefn.GetType()) line = line + ")" if poLayer.GetLayerDefn().GetGeomType() != ogr.wkbUnknown: line = line + " (%s)" % ogr.GeometryTypeToName( poLayer.GetLayerDefn().GetGeomType()) print(line) else: if iRepeat != 0: poLayer.ResetReading() ReportOnLayer(poLayer, pszWHERE, pszGeomField, poSpatialFilter, options) else: # -------------------------------------------------------------------- # Process specified data source layers. # -------------------------------------------------------------------- for papszIter in papszLayers: poLayer = poDS.GetLayerByName(papszIter) if poLayer is None: print("FAILURE: Couldn't fetch requested layer %s!" % papszIter) return 1 if iRepeat != 0: poLayer.ResetReading() ReportOnLayer(poLayer, pszWHERE, pszGeomField, poSpatialFilter, options) # -------------------------------------------------------------------- # Close down. # -------------------------------------------------------------------- poDS.Destroy() return 0
def ogr_wasp_roughness_from_polygon_field(): if ogr_wasp_create_ds() != 'success': return 'skip' if not ogrtest.have_geos(): gdal.PushErrorHandler('CPLQuietErrorHandler') layer = gdaltest.wasp_ds.CreateLayer('mylayer', options=['WASP_FIELDS=roughness'], geom_type=ogr.wkbPolygon) if not ogrtest.have_geos(): gdal.PopErrorHandler() if layer == None: if ogrtest.have_geos(): gdaltest.post_reason('unable to create layer') return 'fail' else: return 'success' layer.CreateField(ogr.FieldDefn('roughness', ogr.OFTReal)) layer.CreateField(ogr.FieldDefn('dummy', ogr.OFTString)) for i in range(6): feat = ogr.Feature(layer.GetLayerDefn()) feat.SetField(0, float(i)) ring = ogr.Geometry(type=ogr.wkbLinearRing) ring.AddPoint(0, 0) ring.AddPoint(round(math.cos(i * math.pi / 3), 6), round(math.sin(i * math.pi / 3), 6)) ring.AddPoint(round(math.cos((i + 1) * math.pi / 3), 6), round(math.sin((i + 1) * math.pi / 3), 6)) ring.AddPoint(0, 0) poly = ogr.Geometry(type=ogr.wkbPolygon) poly.AddGeometry(ring) feat.SetGeometry(poly) if layer.CreateFeature(feat) != 0: gdaltest.post_reason('unable to create feature') return 'fail' del gdaltest.wasp_ds del layer f = open('tmp.map') for i in range(4): f.readline() i = 0 j = 0 res = set() for line in f: if not i % 2: [l, r, n] = [v for v in line.split()] if int(n) != 2: gdaltest.post_reason('number of points sould be 2 and is %d' % int(n)) return 'fail' if float(r) > float(l): res.add((float(l), float(r))) else: res.add((float(r), float(l))) j += 1 i += 1 if j != 6: gdaltest.post_reason('there should be 6 boundaries and there are %d' % j) return 'fail' if res != set([(0, 1), (0, 5), (1, 2), (2, 3), (3, 4), (4, 5)]): print(res) gdaltest.post_reason('wrong values f=in boundaries') return 'fail' return 'success'
def makeArea(i, xsig, ysig, nbpix): dem = gdal.Open( '/home/julien/sources/area_calculation/MNT25m_93_buech.tif') band1 = dem.GetRasterBand(1) data1 = BandReadAsArray(band1) demnodata = band1.GetNoDataValue() #ulx, xres, xskew, uly, yskew, yres = dem.GetGeoTransform() # Extract target reference from the tiff file target = osr.SpatialReference(wkt=dem.GetProjection()) source = osr.SpatialReference() source.ImportFromEPSG(2154) transform = osr.CoordinateTransformation(source, target) point = ogr.Geometry(ogr.wkbPoint) point.AddPoint(xsig, ysig) #point.AddPoint(916121, 6390656) point.Transform(transform) x, y = world_to_pixel(dem.GetGeoTransform(), point.GetX(), point.GetY()) #print('PLOP %s'%i) #print('%s %s'%(xsig,ysig)) #print('%s %s'%(x,y)) studyData = np.where(data1 == 0, 0, 0) studyData[y, x] = 1 #studyData[0, 0] = 1 #print(studyData[y, x+1]) #print(dem.RasterXSize, dem.RasterYSize) if nbpix == 1: studyData3 = studyData else: studyData2 = signal.convolve2d(studyData, np.ones((nbpix, nbpix)), mode='same') studyData3 = np.where(studyData2 > 0, 1, 0) driver = gdal.GetDriverByName('GTiff') studyOut = driver.Create( '/home/julien/sources/area_calculation/study%s.tif' % i, dem.RasterXSize, dem.RasterYSize, 1, band1.DataType) CopyDatasetInfo(dem, studyOut) bandOut = studyOut.GetRasterBand(1) bandOut.SetNoDataValue(0) BandWriteArray(bandOut, studyData3) del bandOut del studyOut studyOut = None bandOut = None del band1 del dem band1 = None dem = None del studyData #time.sleep(3) upslopeAreaSagaProcessing( '/home/julien/sources/area_calculation/filled.tif', '/home/julien/sources/area_calculation/study%s.tif' % i, '/home/julien/sources/area_calculation/upslope%s.sdat' % i) # get area upslope = gdal.Open( '/home/julien/sources/area_calculation/upslope%s.sdat' % i) band1 = upslope.GetRasterBand(1) data1 = BandReadAsArray(band1) upslopenodata = band1.GetNoDataValue() ulx, xres, xskew, uly, yskew, yres = upslope.GetGeoTransform() #print('xres %s yres %s'%(xres, yres)) nb = np.count_nonzero(data1) pixelArea = abs(xres) * abs(yres) area = int(nb * pixelArea / 1000000) del band1 del upslope band1 = None upslope = None return area
def GridFloodingAreas(mydb_path_user, PathFiles, DamID, UseEnergyHead): NotErr = bool('True') errMsg = 'OK' MatriceRisultati = [] # --------------------------------- PathFiles = os.path.realpath(PathFiles) mydb_path_user = os.path.realpath(mydb_path_user) # polygon floodable area AreaInondabile = PathFiles + os.sep + 'AreaInondabile_tot.shp' # Polygon Areas1: first component of the floodable area # ------------------------------------------------------ # area on the right and left of the evaluated river axis # based on the width in the right and left obtained # from model propagation calculation # one-dimensional AreaInondabile_1 = PathFiles + os.sep + 'AreaInondabile_1.shp' if not os.path.exists(PathFiles): errMsg = "There is no data for the dam num =%s \nEffettuare prima il calcolo delle sezioni a valle !" % ( DamID) NotErr = bool() return NotErr, errMsg else: os.chdir(PathFiles) log_file = open('log.txt', 'w') timenow_our = datetime.now().strftime('%y-%m-%d %H:%M') log_file.write('Start %s\n' % timenow_our) log_file.close() # trace intermediate sections representative of the section CrossMedie = PathFiles + os.sep + 'CrossSecMean.shp' if not os.path.exists(CrossMedie): errMsg = "Missing CrossSecMean.shp for the dam num =%s \nPerform the calculation of the downstream sections first!" % ( DamID) NotErr = bool() return NotErr, errMsg StreamDH = PathFiles + os.sep + 'StreamDHFilled.tif' if not os.path.exists(StreamDH): errMsg = "Missing for the dam num =%s il StreamDH\nCarry out first ModificaDH !" % ( DamID) NotErr = bool() return NotErr, errMsg # poligoni tratti CrossSecPoly = PathFiles + os.sep + 'CrossSecPoly.shp' if not os.path.exists(CrossSecPoly): errMsg = "Missing CrossSecPoly.shp for the dam num =%s \nPerform the calculation of the downstream sections first!" % ( DamID) NotErr = bool() return NotErr, errMsg # poligoni tratti divisi in destra e sinistra CrossSecPoly_2 = PathFiles + os.sep + 'CrossSecPoly_2.shp' if not os.path.exists(CrossSecPoly_2): errMsg = "Missing CrossSecPoly_2.shp for the dam num =%s \nPerform the calculation of the downstream sections first !" % ( DamID) NotErr = bool() return NotErr, errMsg # i due poligoni sinistra e destra idraulica PolySxDx = PathFiles + os.sep + 'PolySxDx.shp' if not os.path.exists(PolySxDx): errMsg = "Missing PolySxDx.shp for the dam num =%s \nPerform the calculation of the downstream sections first !" % ( DamID) NotErr = bool() return NotErr, errMsg FileMatricePixDestra = PathFiles + os.sep + 'MatricePixDestra.csv' if not os.path.exists(FileMatricePixDestra): errMsg = "Missing for the dam num =%s MatricePixDestra.csv\nPerform CreaCurveAreaAltezza first !" % ( DamID) NotErr = bool() return NotErr, errMsg # ======================================= # Reading the characteristics of the GRID # ======================================= gdal.AllRegister() indataset = gdal.Open(StreamDH, GA_ReadOnly) if indataset is None: errMsg = 'Could not open file %s' % StreamDH NotErr = bool() return NotErr, errMsg geotransform = indataset.GetGeoTransform() originX = geotransform[0] originY = geotransform[3] pixelWidth = geotransform[1] pixelHeight = geotransform[5] cols = indataset.RasterXSize rows = indataset.RasterYSize bands = indataset.RasterCount iBand = 1 inband = indataset.GetRasterBand(iBand) inNoData = inband.GetNoDataValue() prj = indataset.GetProjectionRef() spatialRef = osr.SpatialReference() try: spatialRef.ImportFromWkt(prj) except: pass inband = None indataset = None # -------------------- # reading from the database # -------------------- conn = sqlite3.connect(mydb_path_user, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) cur = conn.cursor() # reading the data for the calculation of flooding widths NomeTabella = 'Q_H_max' sql = 'SELECT ' sql += ' PixDist' sql += ', Progr_fiume' sql += ', Qmax' sql += ', Hmax' sql += ', Bmax' sql += ', Vmax' sql += ', Time' sql += ' FROM %s' % NomeTabella sql += ' WHERE DamID=%d' % (DamID) sql += ' ORDER BY PixDist;' cur.execute(sql) MatriceDati = cur.fetchall() if len(MatriceDati) == 0: errMsg = "Missing for the dam num =%s data Q_H_max\nCarry out first Calculation of propagation !" % ( DamID) NotErr = bool() return NotErr, errMsg ListaTratti = [] Progr_fiume = [] Qmax = [] Hmax = [] Bmax = [] Vmax = [] Time = [] for row in MatriceDati: ListaTratti.append(int(row[0])) Progr_fiume.append(float(row[1])) Qmax.append(float(row[2])) Hmax.append(float(row[3])) Bmax.append(float(row[4])) Vmax.append(float(row[5])) Time.append(float(row[6])) # array Progr_fiume_array = numpy.array(Progr_fiume, dtype=numpy.float) Qmax_array = numpy.array(Qmax, dtype=numpy.float) Hmax_array = numpy.array(Hmax, dtype=numpy.float) Bmax_array = numpy.array(Bmax, dtype=numpy.float) Vmax_array = numpy.array(Vmax, dtype=numpy.float) Time_array = numpy.array(Time, dtype=numpy.float) # finding the maximum depth value Hmax_tot = Hmax_array.max() # reading of the curves necessary to evaluate the shift to the right of the flood area fin = open(FileMatricePixDestra, 'r') reader = csv.reader(fin, delimiter=';') try: # python 2.7 headers = reader.next() except: # python 3.4 headers = reader.__next__() nn = len(headers) hvals = [] Vettoreh = [] for i in range(1, nn): hvals.append(headers[i]) pp = headers[i].split('=') Vettoreh.append(float(pp[1])) # water depth array H_Array = numpy.array(Vettoreh, dtype=numpy.float) # dictionary of section numbers dic_PixDist = {} # matrix of the quantities MatricePix = [] ii = -1 for row in reader: ii += 1 dic_PixDist[int(row[0])] = ii MatricePix.append(row[1:]) fin.close() # matrix of the percentage of area on the right bank of the river for each height MatriceArray = numpy.array(MatricePix, dtype=numpy.float) NomeTabellaMatrice = 'MatriceAexp' sql = 'SELECT ' sql += ' PixDist' sql += ', progr_lungo_fiume' sql += ', distanza_fiume' sql += ', distanza_linea_retta' sql += ', pend' sql += ', ka' sql += ', ma' sql += ', kq' sql += ', mq' sql += ', kcel' sql += ', mcel' sql += ' FROM %s' % NomeTabellaMatrice sql += ' WHERE DamID=%d' % (DamID) sql += ' ORDER BY PixDist;' cur.execute(sql) MatriceDati = cur.fetchall() if len(MatriceDati) == 0: errMsg = "Missing for the dam num =%s data MatriceAexp\nCarry out first calculation of geometric quantitiese !" % ( DamID) NotErr = bool() return NotErr, errMsg # coefficient matrix: ka;ma;kq;mq;kcel;mcel for each section dic_MatriceCoeff = {} # list of section numbers ListaNumSez = [] for row in MatriceDati: tratto_cur = int(row[0]) ListaNumSez.append(tratto_cur) dic_MatriceCoeff[tratto_cur] = row[3:] # Close communication with the database cur.close() conn.close() # reading of the sections # --------------------- nomecampoAltezza = 'hmax' driver = ogr.GetDriverByName('ESRI Shapefile') ds = driver.Open(CrossMedie, 1) if ds is None: errMsg = 'Could not open ' + CrossMedie NotErr = bool() return NotErr, errMsg layer = ds.GetLayer() feat = layer.GetNextFeature() Spatialref = layer.GetSpatialRef() Spatialref.AutoIdentifyEPSG() SourceEPSG = int(Spatialref.GetAuthorityCode(None)) # list of points in left and right ListaPtSx = [] ListaPtDx = [] # dictionary of flood limit distances in left and right dic_DistSx = {} dic_DistDx = {} DV_sez = {} Time_min_sez = {} while feat: NumSez = feat.GetField('id') if NumSez == 0: NumSez = ListaNumSez[0] # midpoint distance dist1 = feat.GetField('dist1') # progressive along the river path progr = feat.GetField('progr') linea = feat.GetGeometryRef() Length = linea.Length() pt1 = linea.GetPoint(0) pt2 = linea.GetPoint(1) Qsez = numpy.interp(progr, Progr_fiume_array, Qmax_array) Hsez = numpy.interp(progr, Progr_fiume_array, Hmax_array) Bsez = numpy.interp(progr, Progr_fiume_array, Bmax_array) Vsez = numpy.interp(progr, Progr_fiume_array, Vmax_array) Timesez = numpy.interp(progr, Progr_fiume_array, Time_array) # check if use energy elevation if UseEnergyHead: # instead of the depth of water use energy elevation hcinetica = Vsez**2 / 2.0 / 9.81 Htot = Hsez + hcinetica else: Htot = Hsez # load the dictionary DV_sez[NumSez] = Qsez / Bsez Time_min_sez[NumSez] = int(Timesez / 60.0) feat.SetField(nomecampoAltezza, Htot) layer.SetFeature(feat) # reading the widths of the wet area on the right and left # .......................................................... try: MatriceCoeff = dic_MatriceCoeff[NumSez] except: pass ka = float(MatriceCoeff[2]) ma = float(MatriceCoeff[3]) mb = ma - 1.0 # wet width for water level Bsez_tot = ka * ma * math.pow(Htot, mb) PercDx = numpy.interp(Htot, H_Array, MatriceArray[dic_PixDist[NumSez]]) Bdx = Bsez_tot * PercDx Bsx = Bsez_tot - Bdx dic_DistSx[NumSez] = Bsx dic_DistDx[NumSez] = Bdx PercAscSx = (dist1 - Bsx) / Length PercAscDx = (dist1 + Bdx) / Length Pt_Sx = PuntoIntermedio(pt1, pt2, PercAscSx) Pt_Dx = PuntoIntermedio(pt1, pt2, PercAscDx) ListaPtSx.append(Pt_Sx) ListaPtDx.append(Pt_Dx) feat = layer.GetNextFeature() ds.Destroy() log_file = open('log.txt', 'a') log_file.write('End scrittura hmax\n') log_file.close() # making the polygon based on the river path # ...................................................... try: # creating/connecting the db conn = db.connect(mydb_path_user) except: conn = sqlite3.connect(mydb_path_user, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) # import extention conn.enable_load_extension(True) conn.execute('SELECT load_extension("mod_spatialite")') # creating a Cursor cur = conn.cursor() NomeTabellaLinee = 'Downstreampath' sql = 'SELECT TotalLength,ST_AsText(geom) FROM %s WHERE DamID=%d' % ( NomeTabellaLinee, DamID) cur.execute(sql) ChkDiga = cur.fetchone() if ChkDiga == None: errMsg = "Nella tabella= %s non ci sono dati per la diga num =%s \nEffettuare prima il calcolo della linea a valle !" % ( NomeTabellaLinee, DamID) NotErr = bool() return NotErr, errMsg else: wkt_line = ChkDiga[1] TotalLength = ChkDiga[0] StreamLine = ogr.CreateGeometryFromWkt(wkt_line) StreamLine.FlattenTo2D() dic_StreamTratti = {} inDS1 = driver.Open(CrossSecPoly, 0) if inDS1 is None: errMsg = 'Could not open ' + CrossSecPoly NotErr = bool() return NotErr, errMsg InlayerCurve = inDS1.GetLayer() num_tratti = InlayerCurve.GetFeatureCount() feat = InlayerCurve.GetNextFeature() dic_NumTratto = {} ii = -1 while feat: NumSez = feat.GetField('id') ii += 1 dic_NumTratto[ii] = NumSez poly = feat.GetGeometryRef() line_curr = poly.Intersection(StreamLine) if line_curr != None: dic_StreamTratti[NumSez] = line_curr.ExportToWkt() else: txt = 'No intersection cross-sec num=%d' % NumSez print(txt) feat = InlayerCurve.GetNextFeature() inDS1.Destroy() # Close communication with the database cur.close() conn.close() ds = driver.Open(PolySxDx, 0) if ds is None: errMsg = 'Could not open ' + PolySxDx NotErr = bool() return NotErr, errMsg layer = ds.GetLayer() filtro = "lato = %d" % 0 layer.SetAttributeFilter(filtro) feat = layer.GetNextFeature() PoligonoSx = feat.GetGeometryRef() PoligonoSx_wkt = PoligonoSx.ExportToWkt() layer.SetAttributeFilter(None) layer.ResetReading() filtro = "lato = %d" % 1 layer.SetAttributeFilter(filtro) feat = layer.GetNextFeature() PoligonoDx = feat.GetGeometryRef() PoligonoDx_wkt = PoligonoDx.ExportToWkt() ds.Destroy() # initializing the polygon of the floodable area PoligonoAree1 = ogr.Geometry(ogr.wkbPolygon) PolySx = ogr.CreateGeometryFromWkt(PoligonoSx_wkt) PolyDx = ogr.CreateGeometryFromWkt(PoligonoDx_wkt) dist_min_pixel = pixelWidth for i in range(num_tratti): ii = dic_NumTratto[i] linea_curr_wkt = dic_StreamTratti[ii] linea_curr = ogr.CreateGeometryFromWkt(linea_curr_wkt) for lato in range(2): # check left if lato == 0: if dic_DistSx[ii] > dist_min_pixel: polytratto = linea_curr.Buffer(dic_DistSx[ii]) else: polytratto = linea_curr.Buffer(dist_min_pixel) NewGeom = polytratto.Intersection(PolySx) if NewGeom != None: PoligonoAree1 = PoligonoAree1.Union(NewGeom) polytratto.Destroy() NewGeom.Destroy() else: PoligonoAree1 = PoligonoAree1.Union(polytratto) # check right elif lato == 1: if dic_DistDx[ii] > dist_min_pixel: polytratto = linea_curr.Buffer(dic_DistDx[ii]) else: polytratto = linea_curr.Buffer(dist_min_pixel) NewGeom = polytratto.Intersection(PolyDx) if NewGeom != None: PoligonoAree1 = PoligonoAree1.Union(NewGeom) polytratto.Destroy() NewGeom.Destroy() else: PoligonoAree1 = PoligonoAree1.Union(polytratto) log_file = open('log.txt', 'a') log_file.write('End PoligonoAree1\n') log_file.close() # making a shapefile with the PolygonAree1 # ---------------------------------------- shpnew_1 = AreaInondabile_1 if os.path.exists(shpnew_1): driver.DeleteDataSource(shpnew_1) outDS_1 = driver.CreateDataSource(shpnew_1) if outDS_1 is None: errMsg = 'Could not create file %s' % shpnew_1 NotErr = bool() return NotErr, errMsg outLayer_1 = outDS_1.CreateLayer('AreaInondabile_1', Spatialref, geom_type=ogr.wkbMultiPolygon) fieldDefn2 = ogr.FieldDefn('id', ogr.OFTInteger) outLayer_1.CreateField(fieldDefn2) featureDefn_1 = outLayer_1.GetLayerDefn() feature = ogr.Feature(featureDefn_1) feature.SetField('id', 1) feature.SetGeometry(PoligonoAree1) outLayer_1.CreateFeature(feature) outDS_1.Destroy() # making the polygon # 2 based on the digital terrain model # --------------------------------------------------------------------- if not os.path.exists(StreamDH): errMsg = 'File StreamDHFilled %s does not exist' % os.path.realpath( StreamDH) NotErr = bool() return NotErr, errMsg infile = StreamDH indatasetElev = gdal.Open(infile, GA_ReadOnly) if indatasetElev is None: errMsg = 'Could not open ' + infile NotErr = bool() return NotErr, errMsg prj = indatasetElev.GetProjectionRef() geotransform = indatasetElev.GetGeoTransform() originXElev = geotransform[0] originYElev = geotransform[3] pixelWidthElev = geotransform[1] pixelHeightElev = geotransform[5] colsElev = indatasetElev.RasterXSize rowsElev = indatasetElev.RasterYSize bandsElev = indatasetElev.RasterCount iBand = 1 inbandElev = indatasetElev.GetRasterBand(iBand) inNoDataElev = inbandElev.GetNoDataValue() # reading the entire file at once DH = inbandElev.ReadAsArray(0, 0, colsElev, rowsElev).astype(numpy.float32) mask_Nodata = DH == inNoDataElev inDS1 = driver.Open(CrossMedie, 0) if inDS1 is None: errMsg = 'Could not open ' + CrossMedie NotErr = bool() return NotErr, errMsg InlayerCurve = inDS1.GetLayer() spatialRef_sez = InlayerCurve.GetSpatialRef() feat_defn = InlayerCurve.GetLayerDefn() NumFields = feat_defn.GetFieldCount() # creates a grid with depth to cross sections GridSez = numpy.zeros((rowsElev, colsElev), numpy.float32) format = 'MEM' type = GDT_Float32 driver2 = gdal.GetDriverByName(format) driver2.Register() gt = indatasetElev.GetGeoTransform() ds = driver2.Create('GridSez', indatasetElev.RasterXSize, indatasetElev.RasterYSize, 1, type) if gt is not None and gt != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0): ds.SetGeoTransform(gt) if prj is not None and len(prj) > 0: ds.SetProjection(prj) else: prj = spatialRef.ExportToWkt() ds.SetProjection(prj) iBand = 1 testo = "ATTRIBUTE=%s" % (nomecampoAltezza) # Rasterize outband = ds.GetRasterBand(iBand) outband.WriteArray(GridSez, 0, 0) CampoValore = [testo] err = gdal.RasterizeLayer(ds, [iBand], InlayerCurve, burn_values=[0], options=CampoValore) if err != 0: raise Exception("error rasterizing layer: %s" % err) # Reading WL GridSezWL = outband.ReadAsArray().astype(numpy.float32) ds = None # INTERPOLATE Water Level Grid # ---------------------------- #size of grid xmin = originXElev xmax = xmin + colsElev * pixelWidthElev ymax = originYElev ymin = originYElev + rowsElev * pixelHeightElev nx = int((xmax - xmin + 1) / pixelWidthElev) ny = int(-(ymax - ymin + 1) / pixelHeightElev) # Generate a regular grid to interpolate the data. xi = numpy.linspace(xmin, xmax, nx) yi = numpy.linspace(ymin, ymax, ny) xi, yi = numpy.meshgrid(xi, yi) # Reading x,y,z mask = GridSezWL > 0 x = xi[mask] y = yi[mask] z = GridSezWL[mask] # Otherwise, try Method 2 - Interpolate using scipy interpolate griddata WLArray = il.griddata( (x, y), z, (xi, yi), method='linear' ) #(may use 'nearest', 'linear' or 'cubic' - although constant problems w linear) checkMask = numpy.isnan(WLArray) nnan = checkMask.sum() Nodata = -9999 if nnan > 0: WLArray = numpy.choose(checkMask, (WLArray, Nodata)) # WaterDepth calculation by difference between water and ground level Wdepth = WLArray - DH # filtering of isolated points and internal empty points Wdepth = signal.medfilt2d(Wdepth, kernel_size=7) # eliminates negative values maskWd = Wdepth <= 0.0 Wdepth = numpy.choose(maskWd, (Wdepth, Nodata)) # eliminate external anomalous values due to the filtering algorithm maskWd = Wdepth > 9999 Wdepth = numpy.choose(maskWd, (Wdepth, Nodata)) # adds the nodata of the terrain model Wdepth = numpy.choose(mask_Nodata, (Wdepth, Nodata)) # output file FileDEM_out = PathFiles + os.sep + 'Hmax.tif' format = 'GTiff' driver = gdal.GetDriverByName(format) type = GDT_Float32 gt = indatasetElev.GetGeoTransform() ds = driver.Create(FileDEM_out, indatasetElev.RasterXSize, indatasetElev.RasterYSize, 1, type) if gt is not None and gt != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0): ds.SetGeoTransform(gt) # sets the reference system equal to the depth map of water: if it lacks sets the default if prj is not None and len(prj) > 0: ds.SetProjection(prj) else: prj = spatialRef.ExportToWkt() ds.SetProjection(prj) # writing raster iBand = 1 outband = ds.GetRasterBand(iBand) outband.WriteArray(Wdepth, 0, 0) outband.FlushCache() outband.SetNoDataValue(Nodata) outband.GetStatistics(0, 1) outband = None ds = None inDS1.Destroy() log_file = open('log.txt', 'a') log_file.write('End Hmax.tif\n') log_file.close() # ---------------------------- # Rasterize PoligonoAree1 # ------------------------ PoligonoAree1_Raster = PathFiles + os.sep + 'PoligonoAree1.tif' orig_data_source = ogr.Open(shpnew_1) source_ds = ogr.GetDriverByName("Memory").CopyDataSource( orig_data_source, "") source_layer = source_ds.GetLayer() format = 'Gtiff' type = GDT_Int16 driver3 = gdal.GetDriverByName(format) driver3.Register() dsRaster = driver3.Create(PoligonoAree1_Raster, cols, rows, 1, type) gt1 = geotransform if gt1 is not None and gt1 != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0): dsRaster.SetGeoTransform(gt1) if prj is not None and len(prj) > 0: dsRaster.SetProjection(prj) else: prj = spatialRef.ExportToWkt() dsRaster.SetProjection(prj) # Rasterize iBand = 1 outband = dsRaster.GetRasterBand(iBand) outNodata = -9999 ClassTratti = numpy.zeros((rows, cols)).astype(numpy.int) outband.WriteArray(ClassTratti, 0, 0) # Rasterize err = gdal.RasterizeLayer(dsRaster, [1], source_layer, burn_values=[0], options=["ATTRIBUTE=id"]) if err != 0: raise Exception("error rasterizing layer: %s" % err) # Reading from the raster of the matrix with value 1 in a flooded area MatriceDatiArea1 = outband.ReadAsArray(0, 0, cols, rows) # eliminates any points with H greater than Hmax DH_MatriceDatiArea1 = DH * MatriceDatiArea1 mask_greatHmax = DH_MatriceDatiArea1 > Hmax_tot nnn = mask_greatHmax.sum() MatriceDatiArea1 = numpy.choose(mask_greatHmax, (MatriceDatiArea1, 0)) # writing Nodata mask_Nodata = MatriceDatiArea1 == 0 MatriceDati = numpy.choose(mask_Nodata, (MatriceDatiArea1, outNodata)) outband.WriteArray(MatriceDati, 0, 0) outband.FlushCache() outband.SetNoDataValue(outNodata) outband.GetStatistics(0, 1) outband = None dsRaster = None orig_data_source.Destroy() # ---------------------------- # name of the output file with 1 in the wet cells FileDEM_out_1 = PathFiles + os.sep + 'HH.tif' format = 'GTiff' driver = gdal.GetDriverByName(format) type = GDT_Int16 gt = indatasetElev.GetGeoTransform() ds = driver.Create(FileDEM_out_1, indatasetElev.RasterXSize, indatasetElev.RasterYSize, 1, type) if gt is not None and gt != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0): ds.SetGeoTransform(gt) # sets the reference system equal to the depth map of water: if it lacks sets the default if prj is not None and len(prj) > 0: ds.SetProjection(prj) else: prj = spatialRef.ExportToWkt() ds.SetProjection(prj) # writing raster iBand = 1 outband = ds.GetRasterBand(iBand) WW = Wdepth > 0 # adding polygon areas 1 # --------------------------- mask_Data1 = MatriceDatiArea1 == 1 # saving in the raster WW = numpy.choose(mask_Data1, (WW, 1)) outband.WriteArray(WW, 0, 0) outband.FlushCache() outband.SetNoDataValue(Nodata) outband.GetStatistics(0, 1) outband = None ds = None log_file = open('log.txt', 'a') log_file.write('End HH.tif\n') log_file.close() # Raster to vector # ------------------------- # this allows GDAL to throw Python Exceptions gdal.UseExceptions() log_file = open('log.txt', 'a') log_file.write('End gdal.UseExceptions()\n') log_file.close() fileName = FileDEM_out_1 src_ds = gdal.Open(fileName) if src_ds is None: errMsg = 'Could not open ' + fileName NotErr = bool() return NotErr, errMsg srcband = src_ds.GetRasterBand(1) srs = osr.SpatialReference() srs.ImportFromWkt(src_ds.GetProjection()) log_file = open('log.txt', 'a') log_file.write('End srs.ImportFromWkt(src_ds.GetProjection()\n') log_file.close() dst_layername = "PolyFtr" drv = ogr.GetDriverByName("ESRI Shapefile") dst_filename = PathFiles + os.sep + dst_layername + ".shp" if os.path.exists(dst_filename): drv.DeleteDataSource(dst_filename) dst_ds = drv.CreateDataSource(dst_filename) dst_layer = dst_ds.CreateLayer(dst_layername, srs=srs) newField = ogr.FieldDefn('id', ogr.OFTInteger) dst_layer.CreateField(newField) log_file = open('log.txt', 'a') log_file.write('End dst_layer.CreateField(newField)\n') log_file.close() # con bandmask gdal.Polygonize(srcband, srcband, dst_layer, 0, [], callback=None) log_file = open('log.txt', 'a') log_file.write('End Polygonize\n') log_file.close() src_ds = None dst_ds.Destroy() # deleting the temporary grid os.remove(fileName) log_file = open('log.txt', 'a') log_file.write('End remove HH.tif\n') log_file.close() # performing the union of the polygons # ---------------------------------- in_layername = PathFiles + os.sep + "PolyFtr.shp" shpdriver = ogr.GetDriverByName('ESRI Shapefile') inDS1 = shpdriver.Open(in_layername, 0) if inDS1 is None: errMsg = 'Could not open ' + in_layername NotErr = bool() return NotErr, errMsg InlayerCurve = inDS1.GetLayer() feat = InlayerCurve.GetNextFeature() poly_tot = ogr.Geometry(ogr.wkbMultiPolygon) while feat: poly = feat.GetGeometryRef() # aggiungo geometria poligonale poly_tot.AddGeometry(poly) feat = InlayerCurve.GetNextFeature() inDS1.Destroy() log_file = open('log.txt', 'a') log_file.write('End PolyFtr.shp\n') log_file.close() # creating the final flood area # ----------------------------- # saving in the geodatabase # --------------------------------------- try: # creating/connecting the db conn = db.connect(mydb_path_user) except: conn = sqlite3.connect(mydb_path_user, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) # import extention conn.enable_load_extension(True) conn.execute('SELECT load_extension("mod_spatialite")') cur = conn.cursor() TargetTabella = 'FloodExtent' sql = "SELECT srid FROM geometry_columns WHERE f_table_name='%s'" % ( TargetTabella.lower()) cur.execute(sql) record = cur.fetchone() if record != None: OriginEPSG = record[0] else: OriginEPSG = 32632 sql = 'SELECT PKUID,id FROM %s WHERE DamID=%d' % (TargetTabella, DamID) cur.execute(sql) ListaTratti = cur.fetchall() if len(ListaTratti) > 0: # delete previous data sql = 'DELETE FROM %s WHERE DamID=%d' % (TargetTabella, DamID) cur.execute(sql) conn.commit() inDS1 = shpdriver.Open(CrossSecPoly, 0) if inDS1 is None: errMsg = 'Could not open ' + CrossSecPoly NotErr = bool() return NotErr, errMsg InlayerCurve = inDS1.GetLayer() feat = InlayerCurve.GetNextFeature() while feat: NumSez = feat.GetField('id') poly = feat.GetGeometryRef() FloodSeverityString = FloodSeverity(DV_sez[NumSez]) Factor = ConseqFactot(FloodSeverityString, Time_min_sez[NumSez]) # making the intersection to get the polygon poly_curr = poly.Intersection(poly_tot) if poly_curr != None: sql = 'INSERT INTO %s (DamID,id,DV,FloodSeverity,WarningTimeMin,FatalityRate,geom) VALUES (%d' % ( TargetTabella, DamID) sql += ',%d' % NumSez sql += ',%.2f' % DV_sez[NumSez] sql += ',"%s"' % FloodSeverityString sql += ',%d' % Time_min_sez[NumSez] sql += ',%.3f' % Factor poly_curr.FlattenTo2D() # check if MULTIPOLYGON TipoGeom = poly_curr.GetGeometryName() if TipoGeom == 'POLYGON': multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) multipolygon.AddGeometry(poly_curr) wkt2 = multipolygon.ExportToWkt() elif poly_curr.GetGeometryName() == 'MULTIPOLYGON': wkt2 = poly_curr.ExportToWkt() poly2 = ogr.CreateGeometryFromWkt(wkt2) GeomWKT = "GeomFromText('%s',%d)" % (wkt2, OriginEPSG) sql += ',%s' % GeomWKT sql += ');' cur.execute(sql) else: log_file = open('log.txt', 'a') log_file.write('Err tratto n=%d\n' % NumSez) log_file.close() feat = InlayerCurve.GetNextFeature() inDS1.Destroy() log_file = open('log.txt', 'a') log_file.write('End routine\n') log_file.close() conn.commit() # Close communication with the database cur.close() conn.close() return NotErr, errMsg
def ogr_wasp_elevation_from_linestring_z_toler(): if ogr_wasp_create_ds() != 'success': return 'skip' ref = osr.SpatialReference() ref.ImportFromProj4( '+proj=lcc +lat_1=46.8 +lat_0=46.8 +lon_0=0 +k_0=0.99987742 +x_0=600000 +y_0=2200000 +a=6378249.2 +b=6356514.999978254 +pm=2.337229167 +units=m +no_defs' ) if not ogrtest.have_geos(): gdal.PushErrorHandler('CPLQuietErrorHandler') layer = gdaltest.wasp_ds.CreateLayer('mylayer', ref, options=['WASP_TOLERANCE=.1'], geom_type=ogr.wkbLineString25D) if not ogrtest.have_geos(): gdal.PopErrorHandler() if layer == None: gdaltest.post_reason('unable to create layer') return 'fail' dfn = ogr.FeatureDefn() for i in range(10): feat = ogr.Feature(dfn) line = ogr.Geometry(type=ogr.wkbLineString25D) line.AddPoint(i, 0, i) line.AddPoint(i, 0.5, i) line.AddPoint(i, 1, i) feat.SetGeometry(line) if layer.CreateFeature(feat) != 0: gdaltest.post_reason('unable to create feature') return 'fail' del gdaltest.wasp_ds del layer f = open('tmp.map') for i in range(4): f.readline() i = 0 j = 0 for line in f: if not i % 2: [h, n] = line.split() if int(n) != 2: if ogrtest.have_geos(): gdaltest.post_reason( 'number of points sould be 2 and is %s' % n) return 'fail' elif int(n) != 3: gdaltest.post_reason( 'number of points sould be 3 and is %s' % n) return 'fail' if float(h) != j: gdaltest.post_reason('altitude should be %d and is %s' % (j, h)) return 'fail' j += 1 i += 1 if j != 10: gdaltest.post_reason('nb of feature should be 10 and is %d' % j) return 'fail' return 'success'
def export_profiles_metadata( self, project_name, output_folder, ogr_format=GdalAux.ogr_formats['ESRI Shapefile']): GdalAux() output = os.path.join(self.export_folder(output_folder=output_folder), project_name) # create the data source try: ds = GdalAux.create_ogr_data_source(ogr_format=ogr_format, output_path=output) lyr = self._create_ogr_lyr_and_fields(ds) except RuntimeError as e: logger.error("%s" % e) return rows = self.db.list_profiles() if rows is None: raise RuntimeError("Unable to retrieve profiles. Empty database?") if len(rows) == 0: raise RuntimeError("Unable to retrieve profiles. Empty database?") for row in rows: ft = ogr.Feature(lyr.GetLayerDefn()) ft.SetField('pk', int(row[0])) ft.SetField('datetime', row[1].isoformat()) ft.SetField('sensor', Dicts.first_match(Dicts.sensor_types, row[3])) ft.SetField('probe', Dicts.first_match(Dicts.probe_types, row[4])) ft.SetField('path', row[5]) if row[6]: ft.SetField('agency', row[6]) if row[7]: ft.SetField('survey', row[7]) if row[8]: ft.SetField('vessel', row[8]) if row[9]: ft.SetField('sn', row[9]) ft.SetField('proc_time', row[10].isoformat()) ft.SetField('proc_info', row[11]) if row[12]: ft.SetField('comments', row[12]) if row[13]: ft.SetField('press_uom', row[13]) if row[14]: ft.SetField('depth_uom', row[14]) if row[15]: ft.SetField('ss_uom', row[15]) if row[16]: ft.SetField('temp_uom', row[16]) if row[17]: ft.SetField('cond_uom', row[17]) if row[18]: ft.SetField('sal_uom', row[18]) if row[19]: ft.SetField('ss_at_mind', row[19]) if row[20]: ft.SetField('min_depth', row[20]) if row[21]: ft.SetField('max_depth', row[21]) if row[22]: ft.SetField('max_raw_d', row[22]) pt = ogr.Geometry(ogr.wkbPoint) lat = row[2].y lon = row[2].x if lon > 180.0: # Go back to negative longitude lon -= 360.0 pt.SetPoint_2D(0, lon, lat) ft.SetField('POINT_X', lon) ft.SetField('POINT_Y', lat) try: ft.SetGeometry(pt) except Exception as e: RuntimeError("%s > pt: %s, %s" % (e, lon, lat)) if lyr.CreateFeature(ft) != 0: raise RuntimeError("Unable to create feature") ft.Destroy() ds = None return True
import folium from osgeo import ogr import webbrowser multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) # Create test polygon ring1 = ogr.Geometry(ogr.wkbLinearRing) ring1.AddPoint(-10.92, 8.84) ring1.AddPoint(-10.67, 8.16) ring1.AddPoint(-10.99, 8.1) ring1.AddPoint(-11.02, 8.82) #ring.AddPoint(1218405.0658121984, 721108.1805541387) #ring.AddPoint(1179091.1646903288, 712782.8838459781) poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring1) multipolygon.AddGeometry(poly) ring2 = ogr.Geometry(ogr.wkbLinearRing) ring2.AddPoint(-11.24, 8.88) ring2.AddPoint(-11.38, 9.47) ring2.AddPoint(-11.04, 9.5) ring2.AddPoint(-11.07, 8.9) poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring2) multipolygon.AddGeometry(poly) #geojson = poly.ExportToJson()
def test_gdal_rasterize_1(): if test_cli_utilities.get_gdal_rasterize_path() is None: return 'skip' # Setup working spatial reference #sr_wkt = 'LOCAL_CS["arbitrary"]' #sr = osr.SpatialReference( sr_wkt ) sr = osr.SpatialReference() sr.ImportFromEPSG(32631) sr_wkt = sr.ExportToWkt() # Create a raster to rasterize into. target_ds = gdal.GetDriverByName('GTiff').Create( 'tmp/rast1.tif', 100, 100, 3, gdal.GDT_Byte ) target_ds.SetGeoTransform( (1000,1,0,1100,0,-1) ) target_ds.SetProjection( sr_wkt ) # Close TIF file target_ds = None # Create a layer to rasterize from. rast_ogr_ds = \ ogr.GetDriverByName('MapInfo File').CreateDataSource( 'tmp/rast1.tab' ) rast_lyr = rast_ogr_ds.CreateLayer( 'rast1', srs=sr ) rast_lyr.GetLayerDefn() field_defn = ogr.FieldDefn('foo') rast_lyr.CreateField(field_defn) # Add a polygon. wkt_geom = 'POLYGON((1020 1030,1020 1045,1050 1045,1050 1030,1020 1030))' feat = ogr.Feature( rast_lyr.GetLayerDefn() ) feat.SetGeometryDirectly( ogr.Geometry(wkt = wkt_geom) ) rast_lyr.CreateFeature( feat ) # Add feature without geometry to test fix for #3310 feat = ogr.Feature( rast_lyr.GetLayerDefn() ) rast_lyr.CreateFeature( feat ) # Add a linestring. wkt_geom = 'LINESTRING(1000 1000, 1100 1050)' feat = ogr.Feature( rast_lyr.GetLayerDefn() ) feat.SetGeometryDirectly( ogr.Geometry(wkt = wkt_geom) ) rast_lyr.CreateFeature( feat ) # Close file rast_ogr_ds.Destroy() # Run the algorithm. (out, err) = gdaltest.runexternal_out_and_err(test_cli_utilities.get_gdal_rasterize_path() + ' -b 3 -b 2 -b 1 -burn 200 -burn 220 -burn 240 -l rast1 tmp/rast1.tab tmp/rast1.tif') if not (err is None or err == '') : gdaltest.post_reason('got error/warning') print(err) return 'fail' # Check results. target_ds = gdal.Open('tmp/rast1.tif') expected = 6452 checksum = target_ds.GetRasterBand(2).Checksum() if checksum != expected: print(checksum) gdaltest.post_reason( 'Did not get expected image checksum' ) return 'fail' target_ds = None return 'success'
try: A = ensure_numeric(A, numpy.float) except Exception, e: msg = ('Array (%s) could not be converted to numeric array. ' 'I got type %s. Error message: %s' % (A, str(type(A)), e)) raise Exception(msg) msg = 'Array must be a 2d array of vertices. I got %s' % (str(A.shape)) verify(len(A.shape) == 2, msg) msg = 'A array must have two columns. I got %s' % (str(A.shape[0])) verify(A.shape[1] == 2, msg) N = A.shape[0] # Number of vertices line = ogr.Geometry(geometry_type) for i in range(N): line.AddPoint(A[i, 0], A[i, 1]) return line def rings_equal(x, y, rtol=1.0e-6, atol=1.0e-8): """Compares to linear rings as numpy arrays Args * x, y: Nx2 numpy arrays Returns: * True if x == y or x' == y (up to the specified tolerance)
def main(): gdal.UseExceptions() # Enable errors ##### load user configurable paramters here ####### # Check user defined configuraiton file if len(sys.argv) == 1: print('ERROR: main.py requires one argument [configuration file] (i.e. python main.py vtu2geo_config.py)') return # Get name of configuration file/module configfile = sys.argv[1] # Load in configuration file as module X = imp.load_source('',configfile) # if a 2nd command line argument is present, it is the input_path so use that, otherwise try to use the one from passed script input_path = '' if len(sys.argv) == 3: # we have a 2nd CLI arg input_path = sys.argv[2] if hasattr(X,'input_path'): print('Warning: Overwriting script defined input path with CL path') elif hasattr(X,'input_path'): input_path = X.input_path else: print('ERROR: No input path. A pvd or vtu file must be specified.') exit(-1) if os.path.isdir(input_path): print('ERROR: Either a pvd or vtu file must be specified.') exit(-1) variables = X.variables parameters = [] if hasattr(X,'parameters'): parameters = X.parameters # Check if we want to constrain output to a example geotif constrain_flag = False if hasattr(X,'constrain_tif_file'): constrain_tif_file = X.constrain_tif_file var_resample_method = X.var_resample_method param_resample_method = X.param_resample_method constrain_flag = True output_path = input_path[:input_path.rfind('/')+1] if hasattr(X,'output_path'): output_path = X.output_path pixel_size = 10 if hasattr(X,'pixel_size'): pixel_size = X.pixel_size else: print('Default pixel size of 10 mx10 m will be used.') user_define_extent = False if hasattr(X,'user_define_extent'): user_define_extent = X.user_define_extent #Produces a lat/long regular grid in CF netCDF format instead of the TIF files nc_archive = False if hasattr(X,'nc_archive'): nc_archive = X.nc_archive #user defined output EPSG to use instead of the proj4 as defined in the vtu out_EPSG = None if hasattr(X,'out_EPSG'): out_EPSG = X.out_EPSG if parameters is not None and nc_archive: print('Parameters are ignored when writing the nc archive.') parameters = [] all_touched = True if hasattr(X,'all_touched'): all_touched = X.all_touched ##### reader = vtk.vtkXMLUnstructuredGridReader() # see if we were given a single vtu file or a pvd xml file filename, file_extension = os.path.splitext(input_path) is_pvd = False pvd = [input_path] # if not given a pvd file, make this iterable for the below code timesteps=None if file_extension == '.pvd': print('Detected pvd file, processing all linked vtu files') is_pvd = True parse = ET.parse(input_path) pvd = parse.findall(".//*[@file]") timesteps = parse.findall(".//*[@timestep]") # Get info for constrained output extent/resolution if selected if constrain_flag : ex_ds = gdal.Open(constrain_tif_file,GA_ReadOnly) gt = ex_ds.GetGeoTransform() pixel_width = np.abs(gt[1]) pixel_height = np.abs(gt[5]) # Take extent from user input if user_define_extent: o_xmin = X.o_xmin o_xmax = X.o_xmax o_ymin = X.o_ymin o_ymax = X.o_ymax else: # Get extent for clipping from input tif o_xmin = gt[0] o_ymax = gt[3] o_xmax = o_xmin + gt[1] * ex_ds.RasterXSize o_ymin = o_ymax + gt[5] * ex_ds.RasterYSize print("Output pixel size is " + str(pixel_width) + " by " + str(pixel_height)) ex_ds = None if constrain_flag: print(" Constrain flag currently not supported!") return -1 files_processed=1 # this really should be 1 for useful output #information to build up the nc meta data nc_rasters = {} for v in variables: nc_rasters[v]=[] nc_time_counter=0 tifs_to_remove = [] epoch=np.datetime64(0,'s') # if we are loading a pvd, we have access to the timestep information if we want to build if is_pvd and timesteps is not None: epoch = np.datetime64(int(timesteps[0].get('timestep')),'s') dt=1 # model timestep, in seconds if timesteps is not None and len(timesteps) > 1: dt = int(timesteps[1].get('timestep')) - int(timesteps[0].get('timestep')) print(('Start epoch: %s, model dt = %i (s)' %(epoch,dt))) #because of how the netcdf is built we hold a file of file handles before converting. ensure we can do so soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) total_output_files = len(pvd) * (len(variables)+len(parameters)) if soft < total_output_files or hard < total_output_files: print('The users soft or hard file limit is less than the total number of tmp files to be created.') print(('The system ulimit should be raised to at least ' + total_output_files)) return -1 for vtu in pvd: path = vtu vtu_file = '' if is_pvd: vtu_file = vtu.get('file') path = input_path[:input_path.rfind('/')+1] + vtu_file else: base = os.path.basename(path) # since we have a full path to vtu, we need to get just the vtu filename vtu_file = os.path.splitext(base)[0] #we strip out vtu later so keep here printProgress(files_processed,len(pvd),decimals=0) reader.SetFileName(path) #shut up a deprecated warning from vtk 8.1 with stdchannel_redirected(sys.stderr, os.devnull): reader.Update() mesh = reader.GetOutput() #default the pixel size to (min+max)/2 if not pixel_size: area_range = mesh.GetCellData().GetArray('Area').GetRange() pixel_size = (math.sqrt(area_range[0]) + math.sqrt(area_range[1]))/2 pixel_size = int( math.ceil(pixel_size) ) driver = ogr.GetDriverByName('Memory') output_usm = driver.CreateDataSource('out') srsin = osr.SpatialReference() if not mesh.GetFieldData().HasArray("proj4"): print("VTU file does not contain a proj4 field") return -1 vtu_proj4 = mesh.GetFieldData().GetAbstractArray("proj4").GetValue(0) srsin.ImportFromProj4(vtu_proj4) is_geographic = srsin.IsGeographic() #output conic equal area for geotiff srsout = osr.SpatialReference() srsout.ImportFromProj4(vtu_proj4) if out_EPSG: srsout.ImportFromEPSG(out_EPSG) trans = osr.CoordinateTransformation(srsin,srsout) layer = output_usm.CreateLayer('poly', srsout, ogr.wkbPolygon) cd = mesh.GetCellData() for i in range(0,cd.GetNumberOfArrays()): layer.CreateField(ogr.FieldDefn(cd.GetArrayName(i), ogr.OFTReal)) #build the triangulation geometery for i in range(0, mesh.GetNumberOfCells()): v0 = mesh.GetCell(i).GetPointId(0) v1 = mesh.GetCell(i).GetPointId(1) v2 = mesh.GetCell(i).GetPointId(2) ring = ogr.Geometry(ogr.wkbLinearRing) if is_geographic: scale = 100000. #undo the scaling that CHM does for the paraview output ring.AddPoint( mesh.GetPoint(v0)[0] / scale, mesh.GetPoint(v0)[1] / scale) ring.AddPoint (mesh.GetPoint(v1)[0]/ scale, mesh.GetPoint(v1)[1]/ scale ) ring.AddPoint( mesh.GetPoint(v2)[0]/ scale, mesh.GetPoint(v2)[1]/ scale ) ring.AddPoint( mesh.GetPoint(v0)[0]/ scale, mesh.GetPoint(v0)[1]/ scale ) # add again to complete the ring. else: ring.AddPoint( mesh.GetPoint(v0)[0], mesh.GetPoint(v0)[1] ) ring.AddPoint (mesh.GetPoint(v1)[0], mesh.GetPoint(v1)[1] ) ring.AddPoint( mesh.GetPoint(v2)[0], mesh.GetPoint(v2)[1] ) ring.AddPoint( mesh.GetPoint(v0)[0], mesh.GetPoint(v0)[1] ) # add again to complete the ring. ring.Transform(trans) tpoly = ogr.Geometry(ogr.wkbPolygon) tpoly.AddGeometry(ring) feature = ogr.Feature( layer.GetLayerDefn() ) feature.SetGeometry(tpoly) if variables is None: for j in range(0, cd.GetNumberOfArrays()): name = cd.GetArrayName(j) variables.append(name) for v in variables: try: data = cd.GetArray(v).GetTuple(i) feature.SetField(str(v), float(data[0])) except: print("Variable %s not present in mesh" % v) return -1 if parameters is not None: for p in parameters: try: data = cd.GetArray(p).GetTuple(i) feature.SetField(str(p), float(data[0])) except: print("Parameter %s not present in mesh" % v) return -1 layer.CreateFeature(feature) for var in variables: target_fname = os.path.join(output_path, vtu_file + '_' + var.replace(" ", "_") + str(pixel_size) + 'x' + str( pixel_size) + '.tif') rasterize(layer, srsout, target_fname, pixel_size, var,all_touched) if nc_archive: df = xr.open_rasterio(target_fname).sel(band=1).drop('band') df = df.rename({'x':'lon','y':'lat'}) df.coords['time']=epoch + nc_time_counter*np.timedelta64(dt,'s') # this will automatically get converted to min or hours in the output nc df.name=var nc_rasters[var].append(df) # these are lazy loaded at the to netcdf call # remove the tifs we produce tifs_to_remove.append(target_fname) if parameters is not None: print parameters for p in parameters: target_param_fname = os.path.join(output_path, vtu_file + '_'+ p.replace(" ","_") + str(pixel_size)+'x'+str(pixel_size)+'.tif') rasterize(layer, srsout, target_param_fname, pixel_size, p,all_touched) nc_time_counter += 1 # we don't need to dump parameters for each timestep as they are currently assumed invariant with time. parameters = None #no parameters and no variables, just exit at this point if not variables and parameters is None: break files_processed += 1 if nc_archive: datasets = [] for var, rasters in nc_rasters.items(): a = xr.concat(rasters,dim='time') datasets.append(a.to_dataset()) arr = xr.merge(datasets) print('Writing netCDF file') fname=os.path.join(os.path.splitext(input_path)[0]+'.nc') arr.to_netcdf(fname,engine='netcdf4') for f in tifs_to_remove: try: os.remove(f) except: pass
def write_tin(cls, feature_list_a, feature_list_b, path, list_of_list=True): if not os.path.exists(os.path.dirname(path)): raise RuntimeError("the passed path does not exist: %s" % path) path = Helper.truncate_too_long(path) if not isinstance(feature_list_a, list): raise RuntimeError( "the passed parameter as feature_list_a is not a list: %s" % type(feature_list_a)) if not isinstance(feature_list_b, list): raise RuntimeError( "the passed parameter as feature_list_b is not a list: %s" % type(feature_list_b)) if os.path.splitext(path)[-1] == '.kml': path = path[:-4] GdalAux() # create the data source try: ds = GdalAux.create_ogr_data_source( ogr_format=GdalAux.ogr_formats['KML'], output_path=path) lyr = cls._create_ogr_line_lyr_and_fields(ds) except RuntimeError as e: logger.error("%s" % e) return if list_of_list: if len(feature_list_a[0]) != len(feature_list_a[1]): raise RuntimeError("invalid input for list of list") if len(feature_list_b[0]) != len(feature_list_b[1]): raise RuntimeError("invalid input for list of list") if len(feature_list_a) != len(feature_list_b): raise RuntimeError("invalid input for list of list") tmp_list_a = feature_list_a feature_list_a = list() for i, x in enumerate(tmp_list_a[0]): feature_list_a.append([x, tmp_list_a[1][i]]) tmp_list_b = feature_list_b feature_list_b = list() for i, x in enumerate(tmp_list_b[0]): feature_list_b.append([x, tmp_list_b[1][i]]) for i, point in enumerate(feature_list_a): ft = ogr.Feature(lyr.GetLayerDefn()) ft.SetField('note', "tin edge") ln = ogr.Geometry(ogr.wkbLineString) ln.AddPoint(point[0], point[1]) ln.AddPoint(feature_list_b[i][0], feature_list_b[i][1]) try: ft.SetGeometry(ln) except Exception as e: RuntimeError("%s > ln: %s, %s / %s, %s" % (e, point[0], point[1], feature_list_b[i][0], feature_list_b[i][1])) if lyr.CreateFeature(ft) != 0: raise RuntimeError("Unable to create feature") ft.Destroy() return True
def _make_linear(type_, coordinates): geom = ogr.Geometry(type_) for pt in coordinates: geom.AddPoint_2D(*pt) return geom
def test_rasterize_5(): # Setup working spatial reference sr_wkt = 'LOCAL_CS["arbitrary"]' sr = osr.SpatialReference(sr_wkt) # Create a memory raster to rasterize into. target_ds = gdal.GetDriverByName('MEM').Create('', 100, 100, 3, gdal.GDT_Byte) target_ds.SetGeoTransform((1000, 1, 0, 1100, 0, -1)) target_ds.SetProjection(sr_wkt) # Create a memory layer to rasterize from. rast_ogr_ds = \ ogr.GetDriverByName('Memory').CreateDataSource('wrk') rast_mem_lyr = rast_ogr_ds.CreateLayer('poly', srs=sr) # Add polygons. wkt_geom = 'POLYGON((1020 1030,1020 1045,1050 1045,1050 1030,1020 1030))' feat = ogr.Feature(rast_mem_lyr.GetLayerDefn()) feat.SetGeometryDirectly(ogr.Geometry(wkt=wkt_geom)) rast_mem_lyr.CreateFeature(feat) wkt_geom = 'POLYGON((1045 1050,1055 1050,1055 1020,1045 1020,1045 1050))' feat = ogr.Feature(rast_mem_lyr.GetLayerDefn()) feat.SetGeometryDirectly(ogr.Geometry(wkt=wkt_geom)) rast_mem_lyr.CreateFeature(feat) # Add linestrings. wkt_geom = 'LINESTRING(1000 1000, 1100 1050)' feat = ogr.Feature(rast_mem_lyr.GetLayerDefn()) feat.SetGeometryDirectly(ogr.Geometry(wkt=wkt_geom)) rast_mem_lyr.CreateFeature(feat) wkt_geom = 'LINESTRING(1005 1000, 1000 1050)' feat = ogr.Feature(rast_mem_lyr.GetLayerDefn()) feat.SetGeometryDirectly(ogr.Geometry(wkt=wkt_geom)) rast_mem_lyr.CreateFeature(feat) # Run the algorithm. err = gdal.RasterizeLayer(target_ds, [1, 2, 3], rast_mem_lyr, burn_values=[100, 110, 120], options=["MERGE_ALG=ADD"]) assert err == 0, 'got non-zero result code from RasterizeLayer' # Check results. expected = 13022 checksum = target_ds.GetRasterBand(2).Checksum() if checksum != expected: print(checksum) gdal.GetDriverByName('GTiff').CreateCopy('tmp/rasterize_5.tif', target_ds) pytest.fail('Did not get expected image checksum')
def crawlering(self): crawler.link2web() # opencsv txt = open("dirpath_2.txt", "r") filedir = txt.read() file = open(filedir, "w", encoding='ANSI') fieldnames = ['County', 'ReefName', 'GPSLocation', 'Coordinate', 'Date'] csvCursor = csv.DictWriter(file, fieldnames=fieldnames, delimiter=',') csvCursor.writeheader() print('--------------------------人工魚礁、保護礁區資訊--------------------------') cty_num = 0 coordinateList = [] DdotD_coordinateList = [] # 將表格撈出 for idx, (rad, nme) in enumerate(zip(soup.find_all(w='259'), soup.find_all(w='220'))): nme_num = int(nme['rowspan']) # 2 if(cty_num-nme_num < 0): cty_num = int(nme.find_previous_sibling()['rowspan']) # 4 cty_num = cty_num-nme_num # 4-2 = 2 county = nme.find_previous_sibling().text.strip() else: cty_num = cty_num-nme_num # 2-2 = 0 name = nme.text.strip() radius = rad.text.strip().replace(',', '') date = rad.find_next_sibling().text.strip() coordinate = nme.find_next_sibling().text.strip() coordinateList.append(nme.find_next_sibling().text.strip()) coo_ = nme for x in range(nme_num-1): coo_ = coo_.find_next('tr').find(w='239') coordinateList.append(coo_.text.strip()) # 換算公尺 geo_shape = 1 if(radius.find('圓心') > 0): geo_shape = 0 # 'multipoint' if(radius.find('浬') > 0): radius = re.findall(r"\d+\.?\d*", radius) radius = float(radius[0])*1852 elif(radius.find('公尺') > 0): radius = re.findall(r"\d+\.?\d*", radius) radius = float(radius[0]) elif(radius.find('浬') == -1 and radius.find('公尺') == -1): print('no buffer') radius = 0 # 處理坐標資料 for idxx, co in enumerate(coordinateList): co = co.replace('\r\n ', '') co = co.replace('\'\'', '"') co = co.replace('”', '"') remove_words = 'ABCD、ABCD點:。' for word in remove_words: co = co.replace(word, '') coordinateList[idxx] = co print(str(idx+1)+'.', county, name, date) print('radius: %s公尺' % (radius)) point = ogr.Geometry(ogr.wkbPoint) line = ogr.Geometry(ogr.wkbLineString) ring = ogr.Geometry(ogr.wkbLinearRing) poly = ogr.Geometry(ogr.wkbPolygon) multipoint = ogr.Geometry(ogr.wkbMultiPoint) for point_ in coordinateList: if(point_[0] == 'N'): lat = re.split(',', point_)[0] lat = lat.replace(' ', '°') lat = re.split('°|\'|\"', lat) lati = float(lat[0].replace('N', '').strip())+float(lat[1]) / \ 60+float(lat[2])/3600 lon = re.split(',', point_)[1] lon = re.split('°|\'|\"', lon) long = float(lon[0].replace('E', ''))+float(lon[1]) / \ 60+float(lon[2])/3600 else: lati, long = 0, 0 if(point_ == coordinateList[0]): Firstlat = lati Firstlon = long if(len(coordinateList) == 1): point.AddPoint(long, lati) elif(geo_shape is not 0 and len(coordinateList) == 2): line.AddPoint(long, lati) elif(geo_shape is not 0 and len(coordinateList) > 2): ring.AddPoint(long, lati) elif(geo_shape == 0 and len(coordinateList) > 1): point.AddPoint(long, lati) multipoint.AddGeometry(point) # Transform to twd97 and Buffer then Transform back to Wgs84 if county in ['澎湖縣', '澎湖', '金門縣', '金門', '馬祖縣', '馬祖', '連江縣', '連江']: if(len(coordinateList) == 1): point.Transform(crawler.SpatialRefTrans(2)) StrictArea = point.Buffer(radius) elif(geo_shape is not 0 and len(coordinateList) == 2): line.Transform(crawler.SpatialRefTrans(2)) StrictArea = line.Buffer(radius) elif(geo_shape is not 0 and len(coordinateList) > 2): ring.AddPoint(Firstlon, Firstlat) poly.AddGeometry(ring) poly = poly.ConvexHull() poly.Transform(crawler.SpatialRefTrans(2)) StrictArea = poly.Buffer(radius) elif(geo_shape == 0 and len(coordinateList) > 1): multipoint.Transform(crawler.SpatialRefTrans(2)) StrictArea = multipoint.Buffer(radius) StrictArea.Transform(crawler.SpatialRefTrans(4)) else: if(len(coordinateList) == 1): point.Transform(crawler.SpatialRefTrans(1)) StrictArea = point.Buffer(radius) elif(geo_shape is not 0 and len(coordinateList) == 2): line.Transform(crawler.SpatialRefTrans(1)) StrictArea = line.Buffer(radius) elif(geo_shape is not 0 and len(coordinateList) > 2): ring.AddPoint(Firstlon, Firstlat) poly.AddGeometry(ring) poly = poly.ConvexHull() poly.Transform(crawler.SpatialRefTrans(1)) StrictArea = poly.Buffer(radius) elif(geo_shape == 0 and len(coordinateList) > 1): multipoint.Transform(crawler.SpatialRefTrans(1)) StrictArea = multipoint.Buffer(radius) StrictArea.Transform(crawler.SpatialRefTrans(3)) outputcoordinateList = ','.join(coordinateList) csvCursor.writerow( {'County': county, 'ReefName': name, 'GPSLocation': StrictArea, 'Coordinate': outputcoordinateList, 'Date': date}) coordinateList = [] print(' ') print(' ')
def _getLine(coords): line = ogr.Geometry(type=ogr.wkbLineString) for xy in coords: line.AddPoint_2D(xy[0], xy[1]) return line
def _add_inventory_layer(data_source, inventory): """ :type data_source: :class:`osgeo.ogr.DataSource`. :param data_source: OGR data source the layer is added to. :type inventory: :class:`~obspy.core.inventory.Inventory` :param inventory: Inventory data to add as a new layer. """ if not HAS_GDAL: raise ImportError(IMPORTERROR_MSG) if not GDAL_VERSION_SUFFICIENT: raise ImportError(GDAL_TOO_OLD_MSG) # [name, type, width, precision] # field name is 10 chars max # ESRI shapefile attributes are stored in dbf files, which can not # store datetimes, only dates, see: # http://www.gdal.org/drv_shapefile.html # use POSIX timestamp for exact origin time, set time of first pick # for events with no origin field_definitions = [ ["Network", ogr.OFTString, 20, None], ["Station", ogr.OFTString, 20, None], ["Longitude", ogr.OFTReal, 16, 10], ["Latitude", ogr.OFTReal, 16, 10], ["Elevation", ogr.OFTReal, 9, 3], ["StartDate", ogr.OFTDate, None, None], ["EndDate", ogr.OFTDate, None, None], ["Channels", ogr.OFTString, 254, None], ] layer = _create_layer(data_source, "stations", field_definitions) layer_definition = layer.GetLayerDefn() for net in inventory: for sta in net: channel_list = ",".join( ["%s.%s" % (cha.location_code, cha.code) for cha in sta]) feature = ogr.Feature(layer_definition) try: # setting fields with `None` results in values of `0.000` # need to really omit setting values if they are `None` if net.code is not None: feature.SetField(native_str("Network"), native_str(net.code)) if sta.code is not None: feature.SetField(native_str("Station"), native_str(sta.code)) if sta.latitude is not None: feature.SetField(native_str("Latitude"), sta.latitude) if sta.longitude is not None: feature.SetField(native_str("Longitude"), sta.longitude) if sta.elevation is not None: feature.SetField(native_str("Elevation"), sta.elevation) if sta.start_date is not None: date = sta.start_date # ESRI shapefile attributes are stored in dbf files, which # can not store datetimes, only dates. We still need to use # the GDAL API with precision up to seconds (aiming at # other output drivers of GDAL; `100` stands for GMT) feature.SetField(native_str("StartDate"), date.year, date.month, date.day, date.hour, date.minute, date.second, 100) if sta.end_date is not None: date = sta.end_date # ESRI shapefile attributes are stored in dbf files, which # can not store datetimes, only dates. We still need to use # the GDAL API with precision up to seconds (aiming at # other output drivers of GDAL; `100` stands for GMT) feature.SetField(native_str("StartDate"), date.year, date.month, date.day, date.hour, date.minute, date.second, 100) if channel_list: feature.SetField(native_str("Channels"), native_str(channel_list)) if sta.latitude is not None and sta.longitude is not None: point = ogr.Geometry(ogr.wkbPoint) point.AddPoint(sta.longitude, sta.latitude) feature.SetGeometry(point) layer.CreateFeature(feature) finally: # Destroy the feature to free resources feature.Destroy()
# Process all polygon features. feat = poly_layer.GetNextFeature() tile_ref_field = feat.GetFieldIndex('MODULE') polyid_field = feat.GetFieldIndex('POLYID') poly_count = 0 degenerate_count = 0 while feat is not None: module = modules_hash[feat.GetField(tile_ref_field)] polyid = feat.GetField(polyid_field) tlid_list = module.poly_line_links[polyid] link_coll = ogr.Geometry(type=ogr.wkbGeometryCollection) for tlid in tlid_list: geom = module.lines[tlid] link_coll.AddGeometry(geom) try: poly = ogr.BuildPolygonFromEdges(link_coll) if poly.GetGeometryRef(0).GetPointCount() < 4: degenerate_count = degenerate_count + 1 poly.Destroy() feat.Destroy() feat = poly_layer.GetNextFeature() continue #print poly.ExportToWkt()
def _add_catalog_layer(data_source, catalog): """ :type data_source: :class:`osgeo.ogr.DataSource`. :param data_source: OGR data source the layer is added to. :type catalog: :class:`~obspy.core.event.Catalog` :param catalog: Event data to add as a new layer. """ if not HAS_GDAL: raise ImportError(IMPORTERROR_MSG) if not GDAL_VERSION_SUFFICIENT: raise ImportError(GDAL_TOO_OLD_MSG) # [name, type, width, precision] # field name is 10 chars max # ESRI shapefile attributes are stored in dbf files, which can not # store datetimes, only dates, see: # http://www.gdal.org/drv_shapefile.html # use POSIX timestamp for exact origin time, set time of first pick # for events with no origin field_definitions = [ ["EventID", ogr.OFTString, 100, None], ["OriginID", ogr.OFTString, 100, None], ["MagID", ogr.OFTString, 100, None], ["Date", ogr.OFTDate, None, None], ["OriginTime", ogr.OFTReal, 20, 6], ["FirstPick", ogr.OFTReal, 20, 6], ["Longitude", ogr.OFTReal, 16, 10], ["Latitude", ogr.OFTReal, 16, 10], ["Depth", ogr.OFTReal, 8, 3], ["Magnitude", ogr.OFTReal, 8, 3], ] layer = _create_layer(data_source, "earthquakes", field_definitions) layer_definition = layer.GetLayerDefn() for event in catalog: # try to use preferred origin/magnitude, fall back to first or use # empty one with `None` values in it origin = (event.preferred_origin() or event.origins and event.origins[0] or Origin(force_resource_id=False)) magnitude = (event.preferred_magnitude() or event.magnitudes and event.magnitudes[0] or Magnitude(force_resource_id=False)) t_origin = origin.time pick_times = [ pick.time for pick in event.picks if pick.time is not None ] t_pick = pick_times and min(pick_times) or None date = t_origin or t_pick feature = ogr.Feature(layer_definition) try: # setting fields with `None` results in values of `0.000` # need to really omit setting values if they are `None` if event.resource_id is not None: feature.SetField(native_str("EventID"), native_str(event.resource_id)) if origin.resource_id is not None: feature.SetField(native_str("OriginID"), native_str(origin.resource_id)) if t_origin is not None: # Use timestamp for exact timing feature.SetField(native_str("OriginTime"), t_origin.timestamp) if t_pick is not None: # Use timestamp for exact timing feature.SetField(native_str("FirstPick"), t_pick.timestamp) if date is not None: # ESRI shapefile attributes are stored in dbf files, which can # not store datetimes, only dates. We still need to use the # GDAL API with precision up to seconds (aiming at other output # drivers of GDAL; `100` stands for GMT) feature.SetField(native_str("Date"), date.year, date.month, date.day, date.hour, date.minute, date.second, 100) if origin.latitude is not None: feature.SetField(native_str("Latitude"), origin.latitude) if origin.longitude is not None: feature.SetField(native_str("Longitude"), origin.longitude) if origin.depth is not None: feature.SetField(native_str("Depth"), origin.depth / 1e3) if magnitude.mag is not None: feature.SetField(native_str("Magnitude"), magnitude.mag) if magnitude.resource_id is not None: feature.SetField(native_str("MagID"), native_str(magnitude.resource_id)) if origin.latitude is not None and origin.longitude is not None: point = ogr.Geometry(ogr.wkbPoint) point.AddPoint(origin.longitude, origin.latitude) feature.SetGeometry(point) layer.CreateFeature(feature) finally: # Destroy the feature to free resources feature.Destroy()
def ogr_wasp_roughness_from_linestring_fields(): if ogr_wasp_create_ds() != 'success': return 'skip' layer = gdaltest.wasp_ds.CreateLayer( 'mylayer', options=['WASP_FIELDS=z_left,z_right'], geom_type=ogr.wkbLineString) if layer == None: gdaltest.post_reason('unable to create layer') return 'fail' layer.CreateField(ogr.FieldDefn('dummy', ogr.OFTString)) layer.CreateField(ogr.FieldDefn('z_left', ogr.OFTReal)) layer.CreateField(ogr.FieldDefn('z_right', ogr.OFTReal)) for i in range(10): feat = ogr.Feature(layer.GetLayerDefn()) feat.SetField(0, 'dummy_' + str(i)) feat.SetField(1, float(i) - 1) feat.SetField(2, float(i)) line = ogr.Geometry(type=ogr.wkbLineString) line.AddPoint(i, 0) line.AddPoint(i, 0.5) line.AddPoint(i, 1) feat.SetGeometry(line) if layer.CreateFeature(feat) != 0: gdaltest.post_reason('unable to create feature %d' % i) return 'fail' del gdaltest.wasp_ds del layer f = open('tmp.map') for i in range(4): f.readline() i = 0 j = 0 for line in f: if not i % 2: [l, r, n] = line.split() if int(n) != 3: gdaltest.post_reason('number of points sould be 3 and is %s' % n) return 'fail' if float(r) != j or float(l) != j - 1: gdaltest.post_reason( 'roughness should be %d and %d and is %s and %s' % (j - 1, j, l, r)) return 'fail' j += 1 i += 1 if j != 10: gdaltest.post_reason('nb of feature should be 10 and is %d' % j) return 'fail' return 'success'
def GetCentreAndExtentOfRaster(DataDirectory, RasterFile): """ This function takes a raster and returns the centrepoint and the extent in both degrees and metres. Args: DataDirectory (str): the data directory with the basin raster RasterFile (str): the name of the raster Returns: The lat-long of the centrepoint, the x-y- extent in both degrees and metres Author: SMM Date: 01/02/2018 """ print("Trying to create a shapefile.") print("The Data directory is: " + DataDirectory + " and the raster is: " + RasterFile) driver_name = "ESRI shapefile" driver = ogr.GetDriverByName(driver_name) # get the filename of the outfile. if not DataDirectory.endswith(os.sep): print( "You forgot the separator at the end of the directory, appending..." ) DataDirectory = DataDirectory + os.sep # Get the raster prefix SplitRasterfile = RasterFile.split(".") RasterPrefix = SplitRasterfile[0] # get the espg of the raster FullFilename = DataDirectory + RasterFile ESPG_this_raster = GetUTMEPSG(FullFilename) ESPG_this_raster = str(ESPG_this_raster) print("The raster has coordinate of: " + ESPG_this_raster) ESPG_this_raster_split = ESPG_this_raster.split(":") ESPG_this_raster = ESPG_this_raster_split[-1] print("This ESPG is: " + str(ESPG_this_raster)) # Get extent of raster [xmin, xmax, ymin, ymax] = GetRasterExtent(FullFilename) xproj_extent = xmax - xmin yproj_extent = ymax - ymin # Create ring ring = ogr.Geometry(ogr.wkbLinearRing) ring.AddPoint(xmin, ymin) ring.AddPoint(xmin, ymax) ring.AddPoint(xmax, ymax) ring.AddPoint(xmax, ymin) # Create a coordinate transformation source = osr.SpatialReference() source.ImportFromEPSG(int(ESPG_this_raster)) target = osr.SpatialReference() target.ImportFromEPSG(4326) transform = osr.CoordinateTransformation(source, target) # now transform the ring so you can get coordinates in lat-long ring.Transform(transform) # now get the xmin,ymin, and xmax, ymax coords in lat-long pt1 = ring.GetPoint(0) min_long = pt1[0] min_lat = pt1[1] pt2 = ring.GetPoint(2) max_long = pt2[0] max_lat = pt2[1] extent_long = max_long - min_long extent_lat = max_lat - min_lat centre_long = min_long + extent_long * 0.5 centre_lat = min_lat + extent_lat * 0.5 return centre_lat, centre_long, extent_lat, extent_long, xproj_extent, yproj_extent
def ogr_wasp_merge(): if ogr_wasp_create_ds() != 'success': return 'skip' if not ogrtest.have_geos(): gdal.PushErrorHandler('CPLQuietErrorHandler') layer = gdaltest.wasp_ds.CreateLayer('mylayer', geom_type=ogr.wkbPolygon25D) if not ogrtest.have_geos(): gdal.PopErrorHandler() if layer == None: if ogrtest.have_geos(): gdaltest.post_reason('unable to create layer') return 'fail' else: return 'success' dfn = ogr.FeatureDefn() for i in range(6): feat = ogr.Feature(dfn) ring = ogr.Geometry(type=ogr.wkbLinearRing) h = i % 2 ring.AddPoint(0, 0, h) ring.AddPoint(round(math.cos(i * math.pi / 3), 6), round(math.sin(i * math.pi / 3), 6), h) ring.AddPoint(round(math.cos((i + 1) * math.pi / 3), 6), round(math.sin((i + 1) * math.pi / 3), 6), h) ring.AddPoint(0, 0, h) poly = ogr.Geometry(type=ogr.wkbPolygon25D) poly.AddGeometry(ring) feat.SetGeometry(poly) if layer.CreateFeature(feat) != 0: gdaltest.post_reason('unable to create feature') return 'fail' del gdaltest.wasp_ds del layer f = open('tmp.map') for i in range(4): f.readline() i = 0 j = 0 res = [] for line in f: if not i % 2: [l, r, n] = [v for v in line.split()] if int(n) != 2: gdaltest.post_reason( 'number of points sould be 2 and is %d (unwanted merge ?)' % int(n)) return 'fail' if float(r) > float(l): res.append((float(l), float(r))) else: res.append((float(r), float(l))) j += 1 i += 1 if j != 6: gdaltest.post_reason('there should be 6 boundaries and there are %d' % j) return 'fail' if res != [(0, 1) for k in range(6)]: print(res) gdaltest.post_reason('wrong values f=in boundaries') return 'fail' return 'success'
def CreateShapefileOfRasterFootprint(DataDirectory, RasterFile): """ This function takes a raster and creates a shapefile that is the footprint of the raster. Used for plotting the raster footprint on regional maps using basemap. Variously put together from: http://osgeo-org.1560.x6.nabble.com/gdal-dev-Creating-a-simple-shapefile-with-ogr-td3749101.html Args: DataDirectory (str): the data directory with the basin raster RasterFile (str): the name of the raster Returns: Shapefile of the raster footprint Author: SMM Date: 23/01/2018 """ print("Trying to create a shapefile.") print("The Data directory is: " + DataDirectory + " and the raster is: " + RasterFile) driver_name = "ESRI shapefile" driver = ogr.GetDriverByName(driver_name) # get the filename of the outfile. if not DataDirectory.endswith(os.sep): print( "You forgot the separator at the end of the directory, appending..." ) DataDirectory = DataDirectory + os.sep # Get the raster prefix SplitRasterfile = RasterFile.split(".") RasterPrefix = SplitRasterfile[0] # get the espg of the raster FullFilename = DataDirectory + RasterFile ESPG_this_raster = GetUTMEPSG(FullFilename) ESPG_this_raster = str(ESPG_this_raster) print("The raster has coordinate of: " + ESPG_this_raster) ESPG_this_raster_split = ESPG_this_raster.split(":") ESPG_this_raster = ESPG_this_raster_split[-1] print("This ESPG is: " + str(ESPG_this_raster)) # Get extent of raster [xmin, xmax, ymin, ymax] = GetRasterExtent(FullFilename) # Create ring ring = ogr.Geometry(ogr.wkbLinearRing) ring.AddPoint(xmin, ymin) ring.AddPoint(xmin, ymax) ring.AddPoint(xmax, ymax) ring.AddPoint(xmax, ymin) ring.AddPoint(xmin, ymin) # Create polygon poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring) # Create a coordinate transformation source = osr.SpatialReference() source.ImportFromEPSG(int(ESPG_this_raster)) target = osr.SpatialReference() target.ImportFromEPSG(4326) transform = osr.CoordinateTransformation(source, target) # now transformt the polygon poly.Transform(transform) # see what you got #print("The polygon is:") #print(poly.ExportToWkt()) # create the data source OutFileName = DataDirectory + RasterPrefix + "_footprint.shp" print("The output shapefile is: " + OutFileName) datasource = driver.CreateDataSource(OutFileName) # create the layer layer = datasource.CreateLayer(OutFileName, target, ogr.wkbPolygon) feature = ogr.Feature(layer.GetLayerDefn()) feature.SetGeometry(poly) layer.CreateFeature(feature) # Clean up feature.Destroy() datasource.Destroy()
def in_geofence(self, coordinates): coords_transformed = ogr.Geometry(ogr.wkbPoint) coords_transformed.AddPoint(*coordinates) return self.polygon.Contains(coords_transformed)
def edges_from_line(geom, attrs, simplify=True, geom_attrs=True): """ Generate edges for each line in geom Written as a helper for read_shp Parameters ---------- geom: ogr line geometry To be converted into an edge or edges attrs: dict Attributes to be associated with all geoms simplify: bool If True, simplify the line as in read_shp geom_attrs: bool If True, add geom attributes to edge as in read_shp Returns ------- edges: generator of edges each edge is a tuple of form (node1_coord, node2_coord, attribute_dict) suitable for expanding into a networkx Graph add_edge call """ try: from osgeo import ogr except ImportError: raise ImportError("edges_from_line requires OGR: http://www.gdal.org/") if geom.GetGeometryType() == ogr.wkbLineString: if simplify: edge_attrs = attrs.copy() last = geom.GetPointCount() - 1 if geom_attrs: edge_attrs["Wkb"] = geom.ExportToWkb() edge_attrs["Wkt"] = geom.ExportToWkt() edge_attrs["Json"] = geom.ExportToJson() yield (geom.GetPoint_2D(0), geom.GetPoint_2D(last), edge_attrs) else: for i in range(0, geom.GetPointCount() - 1): pt1 = geom.GetPoint_2D(i) pt2 = geom.GetPoint_2D(i + 1) edge_attrs = attrs.copy() if geom_attrs: segment = ogr.Geometry(ogr.wkbLineString) segment.AddPoint_2D(pt1[0], pt1[1]) segment.AddPoint_2D(pt2[0], pt2[1]) edge_attrs["Wkb"] = segment.ExportToWkb() edge_attrs["Wkt"] = segment.ExportToWkt() edge_attrs["Json"] = segment.ExportToJson() del segment yield (pt1, pt2, edge_attrs) elif geom.GetGeometryType() == ogr.wkbMultiLineString: for i in range(geom.GetGeometryCount()): geom_i = geom.GetGeometryRef(i) for edge in edges_from_line(geom_i, attrs, simplify, geom_attrs): yield edge
else: tags = "" if "datetaken" in photo: date = photo["datetaken"] else: date = "" if "url_o" in photo: url = photo["url_o"] elif "url_o" not in photo and "url_c" in photo: url = photo["url_c"] else: url = "" point = ogr.Geometry(ogr.wkbPoint) point.SetPoint_2D(0, floatlon, floatlat) lyr_in.SetSpatialFilter(point) for feat_in in lyr_in: cnt_final += 1 add = "%s;%s;%s;%s;%s;%s;%s\n" % (fid, uid, tags, date, lat, lon, url) outFile.write(add) # points in source vs points in polygon of shapefile # if same= try bigger bbox in line 30 print "Points in Source:", cnt_source print "Points in Polygon:", cnt_final outFile.close()
fd = ogr.FieldDefn('Id', ogr.OFTString) layer.CreateField(fd) fd = ogr.FieldDefn('Info', ogr.OFTString) layer.CreateField(fd) # ---------------------------------------------------------------------------- # Write GCPs. # ---------------------------------------------------------------------------- for gcp in gcps: feat = ogr.Feature(layer.GetLayerDefn()) if pixel_out == 0: geom = ogr.Geometry(geom_type) feat.SetField('Pixel', gcp.GCPPixel) feat.SetField('Line', gcp.GCPLine) geom.SetPoint(0, gcp.GCPX, gcp.GCPY, gcp.GCPZ) else: geom = ogr.Geometry(geom_type) feat.SetField('X', gcp.GCPX) feat.SetField('Y', gcp.GCPY) feat.SetField('Z', gcp.GCPZ) geom.SetPoint(0, gcp.GCPPixel, gcp.GCPLine) feat.SetField('Id', gcp.Id) feat.SetField('Info', gcp.Info) feat.SetGeometryDirectly(geom) layer.CreateFeature(feat)
def write_to_file(self, filename, sublayer=None): """Save vector data to file :param filename: filename with extension .shp or .gml :type filename: str :param sublayer: Optional parameter for writing a sublayer. Ignored unless we are writing to an sqlite file. :type sublayer: str :raises: WriteLayerError Note: Shp limitation, if attribute names are longer than 10 characters they will be truncated. This is due to limitations in the shp file driver and has to be done here since gdal v1.7 onwards has changed its handling of this issue: http://www.gdal.org/ogr/drv_shapefile.html **For this reason we recommend writing to spatialite.** """ # Check file format base_name, extension = os.path.splitext(filename) msg = ('Invalid file type for file %s. Only extensions ' 'sqlite, shp or gml allowed.' % filename) verify(extension in ['.sqlite', '.shp', '.gml'], msg) driver = DRIVER_MAP[extension] # FIXME (Ole): Tempory flagging of GML issue (ticket #18) if extension == '.gml': msg = ('OGR GML driver does not store geospatial reference.' 'This format is disabled for the time being. See ' 'https://github.com/AIFDR/riab/issues/18') raise WriteLayerError(msg) # Derive layer_name from filename (excluding preceding dirs) if sublayer is None or extension == '.shp': layer_name = os.path.split(base_name)[-1] else: layer_name = sublayer # Get vector data if self.is_polygon_data: geometry = self.get_geometry(as_geometry_objects=True) else: geometry = self.get_geometry() data = self.get_data() N = len(geometry) # Clear any previous file of this name (ogr does not overwrite) try: os.remove(filename) except OSError: pass # Create new file with one layer drv = ogr.GetDriverByName(driver) if drv is None: msg = 'OGR driver %s not available' % driver raise WriteLayerError(msg) ds = drv.CreateDataSource(filename) if ds is None: msg = 'Creation of output file %s failed' % filename raise WriteLayerError(msg) lyr = ds.CreateLayer(layer_name, self.projection.spatial_reference, self.geometry_type) if lyr is None: msg = 'Could not create layer %s' % layer_name raise WriteLayerError(msg) # Define attributes if any store_attributes = False fields = [] if data is not None: if len(data) > 0: try: fields = data[0].keys() except: msg = ('Input parameter "attributes" was specified ' 'but it does not contain list of dictionaries ' 'with field information as expected. The first ' 'element is %s' % data[0]) raise WriteLayerError(msg) else: # Establish OGR types for each element ogr_types = {} for name in fields: att = data[0][name] py_type = type(att) msg = ('Unknown type for storing vector ' 'data: %s, %s' % (name, str(py_type)[1:-1])) verify(py_type in TYPE_MAP, msg) ogr_types[name] = TYPE_MAP[py_type] else: # msg = ('Input parameter "data" was specified ' # 'but appears to be empty') # raise InaSAFEError(msg) pass # Create attribute fields in layer store_attributes = True for name in fields: fd = ogr.FieldDefn(name, ogr_types[name]) # FIXME (Ole): Trying to address issue #16 # But it doesn't work and # somehow changes the values of MMI in test # width = max(128, len(name)) # print name, width # fd.SetWidth(width) # Silent handling of warnings like # Warning 6: Normalized/laundered field name: # 'CONTENTS_LOSS_AUD' to 'CONTENTS_L' gdal.PushErrorHandler('CPLQuietErrorHandler') if lyr.CreateField(fd) != 0: msg = 'Could not create field %s' % name raise WriteLayerError(msg) # Restore error handler gdal.PopErrorHandler() # Store geometry geom = ogr.Geometry(self.geometry_type) layer_def = lyr.GetLayerDefn() for i in range(N): # Create new feature instance feature = ogr.Feature(layer_def) # Store geometry and check if self.is_point_data: x = float(geometry[i][0]) y = float(geometry[i][1]) geom.SetPoint_2D(0, x, y) elif self.is_line_data: geom = array_to_line(geometry[i], geometry_type=ogr.wkbLineString) elif self.is_polygon_data: # Create polygon geometry geom = ogr.Geometry(ogr.wkbPolygon) # Add outer ring linear_ring = array_to_line(geometry[i].outer_ring, geometry_type=ogr.wkbLinearRing) geom.AddGeometry(linear_ring) # Add inner rings if any for A in geometry[i].inner_rings: geom.AddGeometry( array_to_line(A, geometry_type=ogr.wkbLinearRing)) else: msg = 'Geometry type %s not implemented' % self.geometry_type raise WriteLayerError(msg) feature.SetGeometry(geom) G = feature.GetGeometryRef() if G is None: msg = 'Could not create GeometryRef for file %s' % filename raise WriteLayerError(msg) # Store attributes if store_attributes: for j, name in enumerate(fields): actual_field_name = layer_def.GetFieldDefn(j).GetNameRef() val = data[i][name] if isinstance(val, numpy.ndarray): # A singleton of type <type 'numpy.ndarray'> works # for gdal version 1.6 but fails for version 1.8 # in SetField with error: NotImplementedError: # Wrong number of arguments for overloaded function val = float(val) elif val is None: val = '' # We do this because there is NaN problem on windows # NaN value must be converted to _pseudo_in to solve the # problem. But, when InaSAFE read the file, it'll be # converted back to NaN value, so that NaN in InaSAFE is a # numpy.nan # please check https://github.com/AIFDR/inasafe/issues/269 # for more information if val != val: val = _pseudo_inf feature.SetField(actual_field_name, val) # Save this feature if lyr.CreateFeature(feature) != 0: msg = 'Failed to create feature %i in file %s' % (i, filename) raise WriteLayerError(msg) feature.Destroy() # Write keywords if any write_keywords(self.keywords, base_name + '.keywords')
def _make_point(pt): geom = ogr.Geometry(ogr.wkbPoint) geom.AddPoint_2D(*pt) return geom