def ogr_georss_test_rss(filename, only_first_feature): if not gdaltest.georss_read_support: return 'skip' ds = ogr.Open(filename) if ds is None: return 'fail' lyr = ds.GetLayer(0) srs = osr.SpatialReference() srs.SetWellKnownGeogCS('WGS84') if lyr.GetSpatialRef() is None or not lyr.GetSpatialRef().IsSame(srs): gdaltest.post_reason('SRS is not the one expected.') return 'fail' if lyr.GetSpatialRef().ExportToWkt().find( 'AXIS["Latitude",NORTH],AXIS["Longitude",EAST]') != -1: lyr.GetSpatialRef().ExportToWkt() gdaltest.post_reason( 'AXIS definition found with latitude/longitude order!') return 'fail' feat = lyr.GetNextFeature() expected_wkt = 'POINT (2 49)' if feat.GetGeometryRef().ExportToWkt() != expected_wkt: print(('%s' % feat.GetGeometryRef().ExportToWkt())) return 'fail' if feat.GetFieldAsString('title') != 'A point': return 'fail' if feat.GetFieldAsString('author') != 'Author': return 'fail' if feat.GetFieldAsString('link') != 'http://gdal.org': return 'fail' if feat.GetFieldAsString('pubDate') != '2008/12/07 20:13:00+02': return 'fail' if feat.GetFieldAsString('category') != 'First category': return 'fail' if feat.GetFieldAsString('category_domain') != 'first_domain': return 'fail' if feat.GetFieldAsString('category2') != 'Second category': return 'fail' if feat.GetFieldAsString('category2_domain') != 'second_domain': return 'fail' feat = lyr.GetNextFeature() expected_wkt = 'LINESTRING (2 48,2.1 48.1,2.2 48.0)' if only_first_feature is False and feat.GetGeometryRef().ExportToWkt( ) != expected_wkt: print(('%s' % feat.GetGeometryRef().ExportToWkt())) return 'fail' if feat.GetFieldAsString('title') != 'A line': return 'fail' feat = lyr.GetNextFeature() expected_wkt = 'POLYGON ((2 50,2.1 50.1,2.2 48.1,2.1 46.1,2 50))' if only_first_feature is False and feat.GetGeometryRef().ExportToWkt( ) != expected_wkt: print(('%s' % feat.GetGeometryRef().ExportToWkt())) return 'fail' if feat.GetFieldAsString('title') != 'A polygon': return 'fail' feat = lyr.GetNextFeature() expected_wkt = 'POLYGON ((2 49,2.0 49.5,2.2 49.5,2.2 49.0,2 49))' if only_first_feature is False and feat.GetGeometryRef().ExportToWkt( ) != expected_wkt: print(('%s' % feat.GetGeometryRef().ExportToWkt())) return 'fail' if feat.GetFieldAsString('title') != 'A box': return 'fail' return 'success'
def test_gdal_contour_1(): if test_cli_utilities.get_gdal_contour_path() is None: pytest.skip() try: os.remove('tmp/contour.shp') except OSError: pass try: os.remove('tmp/contour.dbf') except OSError: pass try: os.remove('tmp/contour.shx') except OSError: pass drv = gdal.GetDriverByName('GTiff') sr = osr.SpatialReference() sr.ImportFromEPSG(4326) wkt = sr.ExportToWkt() size = 160 precision = 1. / size ds = drv.Create('tmp/gdal_contour.tif', size, size, 1) ds.SetProjection(wkt) ds.SetGeoTransform([1, precision, 0, 50, 0, -precision]) raw_data = array.array('h', [10 for i in range(int(size / 2))]).tostring() for i in range(int(size / 2)): ds.WriteRaster(int(size / 4), i + int(size / 4), int(size / 2), 1, raw_data, buf_type=gdal.GDT_Int16, band_list=[1]) raw_data = array.array('h', [20 for i in range(int(size / 2))]).tostring() for i in range(int(size / 4)): ds.WriteRaster(int(size / 4) + int(size / 8), i + int(size / 4) + int(size / 8), int(size / 4), 1, raw_data, buf_type=gdal.GDT_Int16, band_list=[1]) raw_data = array.array('h', [25 for i in range(int(size / 4))]).tostring() for i in range(int(size / 8)): ds.WriteRaster(int(size / 4) + int(size / 8) + int(size / 16), i + int(size / 4) + int(size / 8) + int(size / 16), int(size / 8), 1, raw_data, buf_type=gdal.GDT_Int16, band_list=[1]) ds = None (_, err) = gdaltest.runexternal_out_and_err( test_cli_utilities.get_gdal_contour_path() + ' -a elev -i 10 tmp/gdal_contour.tif tmp/contour.shp') assert (err is None or err == ''), 'got error/warning' ds = ogr.Open('tmp/contour.shp') expected_envelopes = [[1.25, 1.75, 49.25, 49.75], [ 1.25 + 0.125, 1.75 - 0.125, 49.25 + 0.125, 49.75 - 0.125 ]] expected_height = [10, 20] lyr = ds.ExecuteSQL("select * from contour order by elev asc") assert lyr.GetSpatialRef().ExportToWkt( ) == wkt, 'Did not get expected spatial ref' assert lyr.GetFeatureCount() == len(expected_envelopes) i = 0 feat = lyr.GetNextFeature() while feat is not None: envelope = feat.GetGeometryRef().GetEnvelope() assert feat.GetField('elev') == expected_height[i] for j in range(4): if abs(expected_envelopes[i][j] - envelope[j]) > precision / 2 * 1.001: print('i=%d, wkt=%s' % (i, feat.GetGeometryRef().ExportToWkt())) print(feat.GetGeometryRef().GetEnvelope()) pytest.fail( '%f, %f' % (expected_envelopes[i][j] - envelope[j], precision / 2)) i = i + 1 feat = lyr.GetNextFeature() ds.ReleaseResultSet(lyr) ds.Destroy()
def main(): global inputArgs, grib, dir_path #Make our global vars: grib is the object that will hold our Grib Class. dir_path = os.path.dirname(os.path.realpath(__file__)) comparison_days =[0,-7] inputArgs = handle_args(sys.argv) #All input arguments if run on the command line. for deltaDay in comparison_days: if deltaDay == 0: date2 = None else: date2 = ((datetime.datetime.now(pytz.timezone('US/Pacific'))) + datetime.timedelta(days=deltaDay)).strftime( "%Y%m%d") ############## # Debugging #inputArgs.date = '20180327' inputArgs.date = time.strftime("%Y%m%d") inputArgs.date2 = date2 #Comment this out for just one date inputArgs.map = True # Make the map and save png to folder. findValueAtPoint = False # Find all the values at specific lat/lng points within an excel file. ################# grib = Grib() #Assign variable to the Grib Class. grib.model = inputArgs.model #Our model will always be "snodas" for this program grib.displayunits = inputArgs.displayunits grib.basin = inputArgs.basin # Basin can be "French_Meadows", "Hell_Hole", or "MFP", this gets shapefile # Bounding box will clip the raster to focus in on a region of interest (e.g. CA) This makes the raster MUCH smaller # and easier to work with. See gdal.Open -> gdal.Translate below for where this is acutally used. grib.bbox = [-125.0,50.0,-115.0,30.0] #[upper left lon, upper left lat, lower left lon, lower left lat] grib = get_snowdas(grib, inputArgs.date) #Get the snodas file and save data into the object variable grib #pngFile = makePNG() #Any reprojections of grib.gribAll have already been done in get_snowdas. #The original projection of snodas is EPSG:4326 (lat/lng), so it has been changed to EPSG:3875 (x/y) in get_snodas projInfo = grib.gribAll.GetProjection() geoinformation = grib.gribAll.GetGeoTransform() #Get the geoinformation from the grib file. xres = geoinformation[1] yres = geoinformation[5] xmin = geoinformation[0] xmax = geoinformation[0] + (xres * grib.gribAll.RasterXSize) ymin = geoinformation[3] + (yres * grib.gribAll.RasterYSize) ymax = geoinformation[3] spatialRef = osr.SpatialReference() spatialRef.ImportFromWkt(projInfo) spatialRefProj = spatialRef.ExportToProj4() # create a grid of xy (or lat/lng) coordinates in the original projection xy_source = np.mgrid[xmin:xmax:xres, ymax:ymin:yres] xx, yy = xy_source # A numpy grid of all the x/y into lat/lng # This will convert your projection to lat/lng (it's this simple). lons, lats = Proj(spatialRefProj)(xx, yy, inverse=True) # Find the center point of each grid box. # This says move over 1/2 a grid box in the x direction and move down (since yres is -) in the # y direction. Also, the +yres (remember, yres is -) is saying the starting point of this array will # trim off the y direction by one row (since it's shifted off the grid) xy_source_centerPt = np.mgrid[xmin + (xres / 2):xmax:xres, ymax + (yres / 2):ymin:yres] xxC, yyC = xy_source_centerPt lons_centerPt, lats_centerPt = Proj(spatialRefProj)(xxC, yyC, inverse=True) mask = createMask(xxC, yyC, spatialRefProj) grib.basinTotal = calculateBasin(mask, grib, xres, yres) # Calculate the difference between two rasters if inputArgs.date2 != None: grib.basinTotal[0] = compareDates(mask, grib, xres, yres)[0] if grib.basin == 'Hell_Hole': #Part of this basin is SMUD's teritory, so remove 92% of water in this basin grib.basin = 'Hell_Hole_SMUD' #This is just to get the correct directory structure submask = createMask(xxC, yyC, spatialRefProj) smudBasinTotal = calculateBasin(submask, grib, xres, yres) print("Extracting 92% of the SWE values from SMUD Basin...\n" + "Current Basin Total: " + str(grib.basinTotal[0])) grib.basinTotal[0] = grib.basinTotal[0] - (0.92*smudBasinTotal[0]) print("Smud Total: "+str(smudBasinTotal[0])+"\n New Total: "+str(grib.basinTotal[0])) grib.basin = 'Hell_Hole' #reset back #Need to do this after Heel_Hole's data has been manipulated (to account for SMUD) elevation_bins = calculateByElevation(mask, grib, xres, yres) #Send data for writing to Excel File if deltaDay == 0: excel_output(elevation_bins) if inputArgs.plot == True: makePlot(elevation_bins, deltaDay) print(elevation_bins) print(inputArgs.date," Basin Total: ", grib.basinTotal[0]) #findValue will return a dataframe with SWE values at various lat/lng points. df_ptVal = None if findValueAtPoint == True: df_ptVal = findPointValue(spatialRefProj, xy_source) if inputArgs.map == True: fig = plt.figure() ax = fig.add_subplot(111) m = Basemap(llcrnrlon=-122.8, llcrnrlat=37.3, urcrnrlon=-119.0, urcrnrlat=40.3, ax=ax) m.arcgisimage(service='ESRI_Imagery_World_2D', xpixels=2000, verbose=True) #m.arcgisimage(service='World_Shaded_Relief', xpixels=2000, verbose=True) #For inset # loc =>'upper right': 1, # 'upper left': 2, # 'lower left': 3, # 'lower right': 4, # 'right': 5, # 'center left': 6, # 'center right': 7, # 'lower center': 8, # 'upper center': 9, # 'center': 10 axin = inset_axes(m.ax, width="40%", height="40%", loc=8) m2 = Basemap(llcrnrlon=-120.7, llcrnrlat=38.7, urcrnrlon=-120.1, urcrnrlat=39.3, ax=axin) m2.arcgisimage(service='ESRI_Imagery_World_2D', xpixels=2000, verbose=True) mark_inset(ax, axin, loc1=2, loc2=4, fc="none", ec="0.5") ###################################DEBUGGING AREA############################################################### # Debugging: Test to prove a given lat/lng pair is accessing the correct grid box: #*********TEST 1: Test for center points #grib.data[0,0] = 15 #increase the variable by some arbitrary amount so it stands out. #xpts, ypts = m(lons_centerPt[0,0],lats_centerPt[0,0]) #This should be in the dead center of grid[0,0] #m.plot(xpts,ypts, 'ro') #*********TEST 2: Test for first grid box # Test to see a if the point at [x,y] is in the upper right corner of the cell (it better be!) #xpts, ypts = m(lons[0, 0], lats[0, 0]) # should be in upper right corner of cell #m.plot(xpts, ypts, 'bo') # *********TEST 3: Test for first grid box # Test to see the location of center points of each grid in polygon # To make this work, uncomment the variables in def create_mask #debug_Xpoly_center_pts, debug_Ypoly_center_pts = m(debugCenterX, debugCenterY) #m.plot(debug_Xpoly_center_pts, debug_Ypoly_center_pts, 'bo') # *********TEST 4: Test grid box size (In lat lng coords) # This is for use in a Basemap projection with lat/lon (e.g. EPSG:4326) #testX = np.array([[-120.1, -120.1], [-120.10833, -120.10833]]) #testY = np.array([[39.0, 39.00833], [39.0, 39.00833]]) # testVal = np.array([[4,4],[4,4]]) # For use in basemap projection with x/y (e.g. espg:3857. In m=basemap just include the argument projection='merc') # testX = np.array([[500975, 500975], [(500975 + 1172), (500975 + 1172)]]) # testY = np.array([[502363, (502363 + 1172)], [502363, (502363 + 1172)]]) #testVal = np.array([[18, 18], [18, 18]]) #im1 = m.pcolormesh(testX, testY, testVal, cmap=plt.cm.jet, vmin=0.1, vmax=10, latlon=False, alpha=0.5) # Test to see all points # xtest, ytest = m(lons,lats) # m.plot(xtest,ytest, 'bo') ################################################################################################################ hr = 0 makeMap(lons, lats, hr, m, m2,df_ptVal, deltaDay) return
def processAlgorithm(self, parameters, context, feedback): rasterPath = self.getParameterValue(self.INPUT_DEM) layer = QgsProcessingUtils.mapLayerFromString(self.getParameterValue(self.BOUNDARY_LAYER), context) step = self.getParameterValue(self.STEP) percentage = self.getParameterValue(self.USE_PERCENTAGE) outputPath = self.getOutputValue(self.OUTPUT_DIRECTORY) rasterDS = gdal.Open(rasterPath, gdal.GA_ReadOnly) geoTransform = rasterDS.GetGeoTransform() rasterBand = rasterDS.GetRasterBand(1) noData = rasterBand.GetNoDataValue() cellXSize = abs(geoTransform[1]) cellYSize = abs(geoTransform[5]) rasterXSize = rasterDS.RasterXSize rasterYSize = rasterDS.RasterYSize rasterBBox = QgsRectangle(geoTransform[0], geoTransform[3] - cellYSize * rasterYSize, geoTransform[0] + cellXSize * rasterXSize, geoTransform[3]) rasterGeom = QgsGeometry.fromRect(rasterBBox) crs = osr.SpatialReference() crs.ImportFromProj4(str(layer.crs().toProj4())) memVectorDriver = ogr.GetDriverByName('Memory') memRasterDriver = gdal.GetDriverByName('MEM') features = QgsProcessingUtils.getFeatures(layer, context) total = 100.0 / layer.featureCount() if layer.featureCount() else 0 for current, f in enumerate(features): geom = f.geometry() intersectedGeom = rasterGeom.intersection(geom) if intersectedGeom.isEmpty(): feedback.pushInfo( self.tr('Feature {0} does not intersect raster or ' 'entirely located in NODATA area').format(f.id())) continue fName = os.path.join( outputPath, 'hystogram_%s_%s.csv' % (layer.name(), f.id())) ogrGeom = ogr.CreateGeometryFromWkt(intersectedGeom.exportToWkt()) bbox = intersectedGeom.boundingBox() xMin = bbox.xMinimum() xMax = bbox.xMaximum() yMin = bbox.yMinimum() yMax = bbox.yMaximum() (startColumn, startRow) = raster.mapToPixel(xMin, yMax, geoTransform) (endColumn, endRow) = raster.mapToPixel(xMax, yMin, geoTransform) width = endColumn - startColumn height = endRow - startRow srcOffset = (startColumn, startRow, width, height) srcArray = rasterBand.ReadAsArray(*srcOffset) if srcOffset[2] == 0 or srcOffset[3] == 0: feedback.pushInfo( self.tr('Feature {0} is smaller than raster ' 'cell size').format(f.id())) continue newGeoTransform = ( geoTransform[0] + srcOffset[0] * geoTransform[1], geoTransform[1], 0.0, geoTransform[3] + srcOffset[1] * geoTransform[5], 0.0, geoTransform[5] ) memVDS = memVectorDriver.CreateDataSource('out') memLayer = memVDS.CreateLayer('poly', crs, ogr.wkbPolygon) ft = ogr.Feature(memLayer.GetLayerDefn()) ft.SetGeometry(ogrGeom) memLayer.CreateFeature(ft) ft.Destroy() rasterizedDS = memRasterDriver.Create('', srcOffset[2], srcOffset[3], 1, gdal.GDT_Byte) rasterizedDS.SetGeoTransform(newGeoTransform) gdal.RasterizeLayer(rasterizedDS, [1], memLayer, burn_values=[1]) rasterizedArray = rasterizedDS.ReadAsArray() srcArray = numpy.nan_to_num(srcArray) masked = numpy.ma.MaskedArray(srcArray, mask=numpy.logical_or(srcArray == noData, numpy.logical_not(rasterizedArray))) self.calculateHypsometry(f.id(), fName, feedback, masked, cellXSize, cellYSize, percentage, step) memVDS = None rasterizedDS = None feedback.setProgress(int(current * total)) rasterDS = None
def test_ogr_pds4_create_table_binary(): options = ['VAR_LOGICAL_IDENTIFIER=logical_identifier', 'VAR_TITLE=title', 'VAR_INVESTIGATION_AREA_NAME=ian', 'VAR_INVESTIGATION_AREA_LID_REFERENCE=INVESTIGATION_AREA_LID_REFERENCE', 'VAR_OBSERVING_SYSTEM_NAME=osn', 'VAR_TARGET=target', 'VAR_TARGET_TYPE=target'] for signedness in ['Signed', 'Unsigned']: for endianness in ['LSB', 'MSB']: ds = ogr.GetDriverByName('PDS4').CreateDataSource('/vsimem/test.xml', options=options) layername = endianness with gdaltest.config_options( {'PDS4_ENDIANNESS': endianness, 'PDS4_SIGNEDNESS': signedness} ): lyr = ds.CreateLayer(layername, options = ['TABLE_TYPE=BINARY']) fld = ogr.FieldDefn('bool', ogr.OFTInteger) fld.SetSubType(ogr.OFSTBoolean) lyr.CreateField(fld) fld = ogr.FieldDefn('byte', ogr.OFTInteger) fld.SetWidth(2) lyr.CreateField(fld) fld = ogr.FieldDefn('int16', ogr.OFTInteger) fld.SetSubType(ogr.OFSTInt16) lyr.CreateField(fld) lyr.CreateField(ogr.FieldDefn('int', ogr.OFTInteger)) lyr.CreateField(ogr.FieldDefn('int64', ogr.OFTInteger64)) fld = ogr.FieldDefn('float', ogr.OFTReal) fld.SetSubType(ogr.OFSTFloat32) lyr.CreateField(fld) lyr.CreateField(ogr.FieldDefn('real', ogr.OFTReal)) lyr.CreateField(ogr.FieldDefn('str', ogr.OFTString)) lyr.CreateField(ogr.FieldDefn('datetime', ogr.OFTDateTime)) lyr.CreateField(ogr.FieldDefn('date', ogr.OFTDate)) lyr.CreateField(ogr.FieldDefn('time', ogr.OFTTime)) sign = -1 if signedness == 'Signed' else 1 f = ogr.Feature(lyr.GetLayerDefn()) f['bool'] = 1 f['byte'] = sign * 9 f['int16'] = sign * 12345 f['int'] = sign * 123456789 f['int64'] = sign * 1234567890123 f['float'] = 1.25 f['real'] = 1.2567 f['str'] = 'foo' f['datetime'] = '2019/01/24 12:34:56.789+00' f['date'] = '2019-01-24' f['time'] = '12:34:56.789' lyr.CreateFeature(f) ds = None f = gdal.VSIFOpenL('/vsimem/test.xml', 'rb') data = gdal.VSIFReadL(1, 100000, f).decode('ascii') gdal.VSIFCloseL(f) assert '_Binary' in data assert '_Character' not in data if endianness == 'LSB': assert 'LSB' in data, data assert 'MSB' not in data, data else: assert 'MSB' in data, data assert 'LSB' not in data, data if signedness == 'Signed': assert 'Signed' in data, data assert 'Unsigned' not in data, data else: assert 'Unsigned' in data, data assert 'Signed' not in data, data assert validate_xml('/vsimem/test.xml') ds = ogr.Open('/vsimem/test.xml') layername = endianness lyr = ds.GetLayerByName(layername) assert lyr.GetLayerDefn().GetFieldCount() == 11 f = lyr.GetNextFeature() assert f['bool'] assert f['byte'] == sign * 9 assert f['int16'] == sign * 12345 assert f['int'] == sign * 123456789 assert f['int64'] == sign * 1234567890123 assert f['float'] == 1.25 assert f['real'] == 1.2567 assert f['str'] == 'foo' assert f['datetime'] == '2019/01/24 12:34:56.789+00' assert f['date'] == '2019/01/24' assert f['time'] == '12:34:56.789' ds = None # Add new layer ds = ogr.Open('/vsimem/test.xml', update = 1) sr = osr.SpatialReference() sr.SetFromUserInput('WGS84') lyr = ds.CreateLayer('bar', geom_type = ogr.wkbPoint25D, srs = sr, options = ['TABLE_TYPE=BINARY']) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometryDirectly(ogr.CreateGeometryFromWkt('POINT Z (1 2 3)')) lyr.CreateFeature(f) ds = None assert validate_xml('/vsimem/test.xml') ds = ogr.Open('/vsimem/test.xml') lyr = ds.GetLayerByName('bar') f = lyr.GetNextFeature() assert f.GetGeometryRef().ExportToIsoWkt() == 'POINT Z (1 2 3)' ds = None ogr.GetDriverByName('PDS4').DeleteDataSource('/vsimem/test.xml') gdal.Rmdir('/vsimem/test')
def resize_and_resample_dataset_uri( original_dataset_uri, bounding_box, out_pixel_size, output_uri, resample_method, output_datatype=None): L.critical('DEPRECATED!!!' '' 'resize_and_resample_dataset_uri is deprecated. use hb.resample_to_match (Which is a wrapper).') """ A function to a datsaet to larger or smaller pixel sizes Args: original_dataset_uri (string): a GDAL dataset bounding_box (list): [upper_left_x, upper_left_y, lower_right_x, lower_right_y] out_pixel_size (?): the pixel size in projected linear units output_uri (string): the location of the new resampled GDAL dataset resample_method (string): the resampling technique, one of "nearest|bilinear|cubic|cubic_spline|lanczos" Returns: nothing """ resample_dict = { "nearest": gdal.GRA_NearestNeighbour, "near": gdal.GRA_NearestNeighbour, "nearest_neighbor": gdal.GRA_NearestNeighbour, "bilinear": gdal.GRA_Bilinear, "cubic": gdal.GRA_Cubic, "cubicspline": gdal.GRA_CubicSpline, "lanczos": gdal.GRA_Lanczos, "average": gdal.GRA_Average } original_dataset = gdal.Open(original_dataset_uri) original_band = original_dataset.GetRasterBand(1) original_nodata = original_band.GetNoDataValue() #gdal python doesn't handle unsigned nodata values well and sometime returns #negative numbers. this guards against that if original_band.DataType == gdal.GDT_Byte: original_nodata %= 2**8 if original_band.DataType == gdal.GDT_UInt16: original_nodata %= 2**16 if original_band.DataType == gdal.GDT_UInt32: original_nodata %= 2**32 if not output_datatype: output_datatype = original_band.DataType if original_nodata is None: L.debug('Nodata not defined in resize_and_resample_dataset_uri on ' + str(original_dataset_uri) + '. This can be correct but is dangerous because you might have the no_data_value contribute to the resampled values.') original_nodata = -9999 original_sr = osr.SpatialReference() original_sr.ImportFromWkt(original_dataset.GetProjection()) output_geo_transform = [ bounding_box[0], out_pixel_size, 0.0, bounding_box[1], 0.0, -out_pixel_size] new_x_size = abs( int(np.round((bounding_box[2] - bounding_box[0]) / out_pixel_size))) new_y_size = abs( int(np.round((bounding_box[3] - bounding_box[1]) / out_pixel_size))) #create the new x and y size block_size = original_band.GetBlockSize() #If the original band is tiled, then its x blocksize will be different than #the number of columns if block_size[0] != original_band.XSize and original_band.XSize > 256 and original_band.YSize > 256: #it makes sense for a wad of invest functions to use 256x256 blocks, lets do that here block_size[0] = 256 block_size[1] = 256 gtiff_creation_options = [ 'TILED=YES', 'BIGTIFF=IF_SAFER', 'BLOCKXSIZE=%d' % block_size[0], 'BLOCKYSIZE=%d' % block_size[1]] else: #this thing is so small or strangely aligned, use the default creation options gtiff_creation_options = [] hb.create_directories([os.path.dirname(output_uri)]) gdal_driver = gdal.GetDriverByName('GTiff') output_dataset = gdal_driver.Create( output_uri, new_x_size, new_y_size, 1, output_datatype, options=gtiff_creation_options) output_band = output_dataset.GetRasterBand(1) if original_nodata is None: original_nodata = float( calculate_value_not_in_dataset(original_dataset)) output_band.SetNoDataValue(original_nodata) # Set the geotransform output_dataset.SetGeoTransform(output_geo_transform) output_dataset.SetProjection(original_sr.ExportToWkt()) #need to make this a closure so we get the current time and we can affect #state def reproject_callback(df_complete, psz_message, p_progress_arg): """The argument names come from the GDAL API for callbacks.""" try: current_time = time.time() if ((current_time - reproject_callback.last_time) > 5.0 or (df_complete == 1.0 and reproject_callback.total_time >= 5.0)): # LOGGER.info( # "ReprojectImage %.1f%% complete %s, psz_message %s", # df_complete * 100, p_progress_arg[0], psz_message) print ("ReprojectImage for resize_and_resample_dataset_uri " + str(df_complete * 100) + " percent complete") reproject_callback.last_time = current_time reproject_callback.total_time += current_time except AttributeError: reproject_callback.last_time = time.time() reproject_callback.total_time = 0.0 # Perform the projection/resampling gdal.ReprojectImage( original_dataset, output_dataset, original_sr.ExportToWkt(), original_sr.ExportToWkt(), resample_dict[resample_method], 0, 0, reproject_callback, [output_uri]) #Make sure the dataset is closed and cleaned up original_band = None gdal.Dataset.__swig_destroy__(original_dataset) original_dataset = None output_dataset.FlushCache() gdal.Dataset.__swig_destroy__(output_dataset) output_dataset = None hb.calculate_raster_stats_uri(output_uri)
# -*- coding: utf-8 -*- """ Created on Wed Apr 4 18:55:33 2012 @author: mag """ from osgeo import osr, gdal infile = '/home/mag/data/OTHER/RS2 Agulhas and Lion/RS2_FQA_1xQGSS20101218_173930_00000005/' # get the existing coordinate system ds = gdal.Open("RADARSAT_2_CALIB:SIGMA0:" + infile + "product.xml") old_cs= osr.SpatialReference() old_cs.ImportFromWkt(ds.GetProjectionRef()) # create the new coordinate system wgs84_wkt = """ GEOGCS["WGS 84", DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]], AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.01745329251994328, AUTHORITY["EPSG","9122"]], AUTHORITY["EPSG","4326"]]""" new_cs = osr.SpatialReference() new_cs .ImportFromWkt(wgs84_wkt)
from base64 import b64decode import os import re import osmium as o import osgeo.ogr as ogr import osgeo.osr as osr from shapely.wkb import loads, dumps from shapely.prepared import prep from osm_export_tool import GeomType, File fab = o.geom.WKBFactory() create_geom = lambda b : ogr.CreateGeometryFromWkb(bytes.fromhex(b)) epsg_4326 = osr.SpatialReference() epsg_4326.ImportFromEPSG(4326) CLOSED_WAY_KEYS = ['aeroway','amenity','boundary','building','building:part','craft','geological','historic','landuse','leisure','military','natural','office','place','shop','sport','tourism'] CLOSED_WAY_KEYVALS = {'highway':'platform','public_transport':'platform'} def closed_way_is_polygon(tags): for key in CLOSED_WAY_KEYS: if key in tags: return True for key, val in CLOSED_WAY_KEYVALS.items(): if key in tags and tags[key] == val: return True return False def make_filename(s): return s.lower().replace(' ','_')
def test_osr_epsg_treats_as_northing_easting(epsg_code, is_northing_easting): srs = osr.SpatialReference() srs.ImportFromEPSG(epsg_code) assert srs.EPSGTreatsAsNorthingEasting() == is_northing_easting
def epsg2wkt(epsg): """ """ srs = osr.SpatialReference() srs.ImportFromEPSG(epsg) return srs.ExportToWkt()
def create_representation(self, input_path, input_data, workspace, cfg, src_meta): """ """ logger.info('[{}] Start initialisation.'.format( datetime.datetime.now())) temporary_files = [] # Set projections as OSR spatial reference. # By the way we add '+over' to the output projection if cylindrical # (if data around dateline, we'll keep things continuous until the tiling # which will use a modulo to set a correct tile numbering). input_proj = cfg['input_proj'] input_srs = osr.SpatialReference() input_srs.ImportFromEPSG(input_proj) output_proj = cfg['output_proj'] output_srs = osr.SpatialReference() if output_proj in mtdt.CYLINDRIC_PROJ: tmp_srs = osr.SpatialReference() tmp_srs.ImportFromEPSG(output_proj) output_srs.ImportFromProj4(tmp_srs.ExportToProj4() + ' +over') else: output_srs.ImportFromEPSG(output_proj) # Get trajectory GCPs in output projection input_dset = gdal.Open(input_path) input_tf = gdal.Transformer(input_dset, None, ['MAX_GCP_ORDER=-1']) traj_gcps = get_trajectory_gcps(input_dset, input_srs, output_srs, input_transformer=input_tf) # Get trajectory resolution traj_res = get_trajectory_mean_resolution(input_dset, input_srs, input_transformer=input_tf) # traj_res = get_trajectory_output_resolutions(input_dset, input_srs, output_srs, # input_transformer=input_tf) # Set output options map_extent = [float(ext) for ext in cfg['extent'].split(' ')] tilesmap = TilesMap(map_extent) min_zoom = cfg['output_options'].get('min-zoom', '3') max_zoom = cfg['output_options'].get('max-zoom', '+1') if max_zoom.startswith(('+', '-')): _max_zoom = max(tilesmap.res2zoom([traj_res, traj_res])) if max_zoom.startswith('-'): max_zoom = _max_zoom - int(max_zoom[1:]) elif max_zoom.startswith('+'): max_zoom = _max_zoom + int(max_zoom[1:]) else: max_zoom = int(max_zoom) if min_zoom.startswith(('+', '-')): widhei = [ traj_gcps['gcpmidx'].max() - traj_gcps['gcpmidx'].min(), traj_gcps['gcpmidy'].max() - traj_gcps['gcpmidy'].min() ] _min_res = [widhei[i] / tilesmap.tile_size[i] for i in [0, 1]] _min_zoom = min(tilesmap.res2zoom(_min_res)) if min_zoom.startswith('-'): min_zoom = _min_zoom - int(min_zoom[1:]) elif min_zoom.startswith('+'): min_zoom = _min_zoom + int(min_zoom[1:]) else: min_zoom = int(min_zoom) if min_zoom > max_zoom: max_zoom = min_zoom cfg['output_options']['min-zoom'] = str(min_zoom) cfg['output_options']['max-zoom'] = str(max_zoom) linewidth_meter = float(cfg['output_options'].get( 'linewidth-meter', '5000')) min_linewidth_pixel = int(cfg['output_options'].get( 'min-linewidth-pixel', '4')) resampling = cfg['output_options'].get('resampling', 'average') cfg['output_options']['linewidth-meter'] = str(linewidth_meter) cfg['output_options']['min-linewidth-pixel'] = str(min_linewidth_pixel) cfg['output_options']['resampling'] = resampling logger.info('[{}] End initialisation.'.format(datetime.datetime.now())) # Remove unwanted bands nb_bands = src_meta.get('nb_bands', 0) ispaletted = src_meta.get('ispaletted', False) if nb_bands == 2 and ispaletted: bands_ok_path = os.path.join(workspace, 'fix_bands.vrt') remove_bands(input_path, bands_ok_path, bands2keep=[1]) temporary_files.append(bands_ok_path) src_meta['nb_bands'] = 1 if 0 < len(src_meta['nodatavalues']): src_meta['nodatavalues'] = [src_meta['nodatavalues'][0]] else: bands_ok_path = input_path # Loop on zooms (average traj, modify traj GCPs, shape computation, warp, cut, tile) viewport = cfg['viewport'].split(' ') viewport_geom = ogr.CreateGeometryFromWkt( mtdt._get_bbox_wkt(*viewport)) zooms_tilemap = {} zooms_transparency = {} ## NEW metadata.py ## Before # zooms_shape_geom = {} # zooms_shape_extent = {} ## Now zooms_meta = {} ## \NEW metadata.py for zoom in range(min_zoom, max_zoom + 1): logger.info('[{}] Start processing zoom {}.'.format( datetime.datetime.now(), zoom)) zoom_res = tilesmap.zoom2res(zoom) # Average avrg_res = max(zoom_res) if resampling == 'average' and traj_res < avrg_res: avrg_ok_path = os.path.join( workspace, 'fix_average_zoom{:02d}.tiff'.format(zoom)) navrg = np.ceil(avrg_res / traj_res).astype('int') try: logger.info('[{}] Start averaging.'.format( datetime.datetime.now())) average_trajectory(bands_ok_path, avrg_ok_path, navrg) logger.info('[{}] End averaging.'.format( datetime.datetime.now())) except: logger.error('Could not average.') raise temporary_files.append(avrg_ok_path) else: avrg_ok_path = bands_ok_path # Transform GCPs linewidth = [ max([linewidth_meter, min_linewidth_pixel * r]) for r in zoom_res ] gcps_ok_path = os.path.join(workspace, 'fix_gcps_zoom{:02d}.vrt'.format(zoom)) try: logger.info('[{}] Start modifying gcps.'.format( datetime.datetime.now())) modify_trajectory_gcps(avrg_ok_path, gcps_ok_path, traj_gcps, linewidth) logger.info('[{}] End modifying gcps.'.format( datetime.datetime.now())) except: logger.error('Could not modify gcps.') raise temporary_files.append(gcps_ok_path) # Compute shape geometry in the same way it is done in metadata.py # (we redo it at each zoom since GCPs are changed). logger.info('[{}] Start computing shape.'.format( datetime.datetime.now())) gcps_ok_dset = gdal.Open(gcps_ok_path) gcps_ok_tf = gdal.Transformer(gcps_ok_dset, None, ['MAX_GCP_ORDER=-1']) shape = get_trajectory_shape(gcps_ok_dset, transformer=gcps_ok_tf, ndist=330, min_shape_res=750000., max_shape_points=33) srs4326 = osr.SpatialReference() srs4326.ImportFromEPSG(4326) proj_tf = osr.CoordinateTransformation(output_srs, srs4326) lonlat_shape = proj_tf.TransformPoints(shape) ## NEW metadata.py ## Before # shape_geom0 = mtdt._get_shape_geometry(lonlat_shape, gcps_ok_dset, input_proj, output_proj) # shape_geom, _, _ = mtdt._get_crop_info(shape_geom0, viewport_geom) # shape_extent = get_shape_extent(shape_geom, lonlat_shape) # zooms_shape_geom[zoom] = shape_geom # zooms_shape_extent[zoom] = shape_extent # center_long = None # if input_proj in mtdt.CYLINDRIC_PROJ and output_proj in mtdt.STEREO_PROJ: # shape_lon = [lonlat[0] for lonlat in shape] # minlon, maxlon = min(shape_lon), max(shape_lon) # if (maxlon > 180 and minlon > -180) or (maxlon < 180 and minlon < -180): # center_long = '{}'.format((maxlon + minlon) / 2.) #tiling_extent = tilesmap.tiling_extent(zoom, shape_extent) ## Now output_shape_geom, bbox_infos, warp_infos = mtdt._get_output_shape( lonlat_shape, gcps_ok_dset, input_proj, output_proj, viewport_geom) zooms_meta[zoom] = {} zooms_meta[zoom]['lonlat_shape'] = lonlat_shape zooms_meta[zoom]['output_shape_geom'] = output_shape_geom zooms_meta[zoom]['bbox_infos'] = bbox_infos zooms_meta[zoom]['warp_infos'] = warp_infos shape_extent = warp_infos['extent'] tiling_extent = tilesmap.tiling_extent(zoom, shape_extent) ## \NEW metadata.py logger.info('[{}] End computing shape.'.format( datetime.datetime.now())) # TMP : Check GDAL transformer # print linewidth # gdaltf = gdal.Transformer(gcps_ok_dset, None, ['MAX_GCP_ORDER=-1']) # gridx = np.linspace(tiling_extent[0], tiling_extent[2], num=800) # gridy = np.linspace(tiling_extent[1], tiling_extent[3], num=800) # gridxy = np.array((np.tile(gridx[:, np.newaxis], (1, gridy.size)), # np.tile(gridy[np.newaxis, :], (gridx.size, 1)))) # dimsxy = gridxy.shape[1:3] # gridxy = gridxy.reshape((2, -1)).transpose() # pixlin = np.array(gdaltf.TransformPoints(1, gridxy)[0]) # pix = pixlin[:, 0].reshape(dimsxy).transpose() # lin = pixlin[:, 1].reshape(dimsxy).transpose() # gcps = gcps_ok_dset.GetGCPs() # gcpx = np.array([gcp.GCPX for gcp in gcps]) # gcpy = np.array([gcp.GCPY for gcp in gcps]) # import matplotlib.pyplot as plt # plt.figure() # plt.imshow(pix, origin='lower', interpolation='nearest', # extent=[gridx.min(), gridx.max(), gridy.min(), gridy.max()]) # plt.colorbar(label='pixel') ; plt.xlabel('x') ; plt.ylabel('y') # plt.plot(gcpx, gcpy, 'k+') # plt.xlim((gridx.min(), gridx.max())) ; plt.ylim((gridy.min(), gridy.max())) # plt.figure() # plt.imshow(lin, origin='lower', interpolation='nearest', # extent=[gridx.min(), gridx.max(), gridy.min(), gridy.max()]) # plt.colorbar(label='line') ; plt.xlabel('x') ; plt.ylabel('y') # plt.plot(gcpx, gcpy, 'k+') # plt.xlim((gridx.min(), gridx.max())) ; plt.ylim((gridy.min(), gridy.max())) # plt.show() # \TMP # Estimate the tiles to be generated logger.info('[{}] Start estimating tiles.'.format( datetime.datetime.now())) traj_bboxes = get_trajectory_bboxes(gcps_ok_dset, nbbox=128, max_extent=tiling_extent, transformer=gcps_ok_tf) zoom_tiles = [] for bbox in traj_bboxes: zoom_tiles.extend(tilesmap.bbox2tiles(zoom, bbox)) tiles_list = {zoom: list(set(zoom_tiles))} logger.info('[{}] End estimating tiles.'.format( datetime.datetime.now())) # Warp warp_res = zoom_res warp_extent = tiling_extent if resampling == 'average': _resampling = 'near' else: _resampling = resampling isrgb = src_meta.get('isrgb', False) if 0 < len(src_meta['nodatavalues']) and not isrgb: dstnodata = src_meta['nodatavalues'] dstalpha = False else: dstnodata = None dstalpha = True tps = src_meta.get('use_gcp', False) warp_ok_path = os.path.join(workspace, 'warp_zoom{:02d}.vrt'.format(zoom)) warp(gcps_ok_path, output_srs, warp_ok_path, output_srs, warp_res, warp_extent, _resampling, dstnodata, dstalpha, tps) temporary_files.append(warp_ok_path) # Cut ## NEW metadata.py ## Before # shape_geom_type = shape_geom.GetGeometryType() # if shape_geom_type != ogr.wkbGeometryCollection and \ # (any(tiling_extent[i] < float(viewport[i]) for i in [0, 1]) or \ # any(tiling_extent[i] > float(viewport[i]) for i in [2, 3])): ## Now if cfg['output_proj_type'] != 'cylindric' and \ (any(tiling_extent[i] < float(viewport[i]) for i in [0, 1]) or \ any(tiling_extent[i] > float(viewport[i]) for i in [2, 3])): ## \NEW metadata.py # Make cutline cut_extent = viewport cutline_path = os.path.join( workspace, 'cutline_zoom{:02d}.csv'.format(zoom)) write_cutline(cutline_path, cut_extent) temporary_files.append(cutline_path) # Do cut dstnodata = src_meta.get('nodatavalues', None) cut_ok_path = os.path.join(workspace, 'cut_zoom{:02d}.vrt'.format(zoom)) cut(warp_ok_path, cut_ok_path, cutline_path, dstnodata=dstnodata) temporary_files.append(cut_ok_path) else: cut_ok_path = warp_ok_path # Tile srcnodata = src_meta.get('nodatavalues', None) paletted = src_meta.get('isrgb', False) ## NEW metadata.py center_long = warp_infos['center_long'] ## \NEW metadata.py debug = cfg['debug'] tiles_list_path = os.path.join( workspace, 'tiles_list_zoom{:02d}.json'.format(zoom)) with open(tiles_list_path, 'w') as tl_file: json.dump(tiles_list, tl_file) temporary_files.append(tiles_list_path) tiles_ok_path = tile(cut_ok_path, workspace, output_proj, map_extent, zoom, zoom, srcnodata=srcnodata, paletted=paletted, center_long=center_long, debug=debug, tiles_list=tiles_list_path) # Move tiles and read tilemap.json / transparency.json tiles_dir = os.path.join(workspace, 'tiles.zxy') if zoom == min_zoom: if os.path.isdir(tiles_dir): shutil.rmtree(tiles_dir) os.mkdir(tiles_dir) os.rename(os.path.join(tiles_ok_path, '{}'.format(zoom)), os.path.join(tiles_dir, '{}'.format(zoom))) with open(os.path.join(tiles_ok_path, 'tilemap.json')) as tile_file: zooms_tilemap[zoom] = json.load(tile_file) with open(os.path.join(tiles_ok_path, 'transparency.json')) as transp_file: zooms_transparency[zoom] = json.load(transp_file) shutil.rmtree(tiles_ok_path) logger.info('[{}] End processing zoom {}.'.format( datetime.datetime.now(), zoom)) # Clean temporary files if not cfg.get('keep_intermediary_files', False): to_remove = filter(lambda x: x != input_path and os.path.exists(x), temporary_files) map(os.remove, list(set(to_remove))) logger.debug('These temporary files have been removed: {}'.format( to_remove)) ## NEW metadata.py ## Before # Set bbox and shape with min zoom # ref_zoom = min_zoom # bbox = zooms_shape_extent[ref_zoom] # bbox_str = "POLYGON(({b[0]:f} {b[3]:f},{b[2]:f} {b[3]:f},{b[2]:f} {b[1]:f},"\ # "{b[0]:f} {b[1]:f},{b[0]:f} {b[3]:f}))".format(b=bbox) # shape_geom = zooms_shape_geom[ref_zoom] # shape_wkt = shape_geom.ExportToWkt().replace('POLYGON (', 'POLYGON(') # if not cfg['no_shape']: # src_meta['shape_str'] = shape_wkt # src_meta['real_shape_str'] = shape_wkt ## Now # Update src_meta and set bbox_str # We use min zoom as the reference ref_zoom = min_zoom real_shape_wkt = zooms_meta[ref_zoom]['output_shape_geom'].ExportToWkt( ) real_shape_wkt = real_shape_wkt.replace('POLYGON (', 'POLYGON(') if cfg['no_shape']: shape_wkt = 'POINT(0 0)' else: shape_wkt = real_shape_wkt src_meta['lonlat_shape'] = zooms_meta[ref_zoom]['lonlat_shape'] src_meta['real_shape_str'] = real_shape_wkt src_meta['shape_str'] = shape_wkt src_meta['bbox_infos'] = zooms_meta[ref_zoom]['bbox_infos'] src_meta['warp_infos'] = zooms_meta[ref_zoom]['warp_infos'] bbox = zooms_meta[ref_zoom]['bbox_infos']['bbox'] bbox_str = "POLYGON(({b[0]:f} {b[3]:f},{b[2]:f} {b[3]:f},{b[2]:f} {b[1]:f},"\ "{b[0]:f} {b[1]:f},{b[0]:f} {b[3]:f}))".format(b=bbox) ## \NEW metadata.py # Reconstruct tilemap.json / transparency.json tilemap_dict = zooms_tilemap[ref_zoom] ## NEW metadata.py ## Before #tilemap_dict['bbox'] = bbox ## Now # do nothing: why modify bbox in tilemap.json if we don't do it for raster tiles ? ## \NEW metadata.py for z, d in zooms_tilemap.iteritems(): if z != ref_zoom: tilemap_dict['tilesets'].update(d['tilesets']) with open(os.path.join(tiles_dir, 'tilemap.json'), 'w') as tile_file: json.dump(tilemap_dict, tile_file, indent=2) transparency_dict = zooms_transparency[ref_zoom] for z, d in zooms_transparency.iteritems(): if z != ref_zoom: transparency_dict.update(d) with open(os.path.join(tiles_dir, 'transparency.json'), 'w') as transp_file: json.dump(transparency_dict, transp_file, indent=0) tiles_mask = create_tiles_mask(tiles_dir) logger.debug('Tiles mask: {}'.format(tiles_mask)) resolutions = [] for zoom, tileset in tilemap_dict['tilesets'].iteritems(): resolutions.append('{}:{}'.format(zoom, tileset['units_per_pixel'])) zooms = map(int, tilemap_dict['tilesets'].keys()) resolutions.append('9998:{}*{}'.format(max(3, min(zooms)), max(zooms))) resolutions.append('9999:{}'.format(tiles_mask)) extra_meta = { 'resolutions': resolutions, 'min_zoom_level': min(zooms), 'max_zoom_level': max(zooms), 'bbox_str': bbox_str, 'output_path': os.path.abspath(tiles_dir) } # Workaround for cross-IDL ## NEW metadata.py ## Before # shape_geom = ogr.CreateGeometryFromWkt(src_meta['shape_str']) # if ogr.wkbGeometryCollection == shape_geom.GetGeometryType() and \ # 2 == shape_geom.GetGeometryCount(): # l0, r0, b0, t0 = shape_geom.GetGeometryRef(0).GetEnvelope() # West # l1, r1, b1, t1 = shape_geom.GetGeometryRef(1).GetEnvelope() # East # if l0 + r0 > l1 + r1: # # Switch coordinates so that # # l0, r0, t0, b0 are the coordinates of the western shape # # l1, r1, t1, b1 are the coordinates of the eastern shape # l0, r0, t0, b0, l1, r1, t1, b1 = l1, r1, t1, b1, l0, r0, t0, b0 # logger.debug('Checking XIDL...') # logger.debug('{} {} {} {} vs {} {} {} {}'.format(l0, r0, b0, t0, # l1, r1, b1, t1)) # if XIDL_FIX_LON_DELTA + r0 < l1: # bbox_pattern = 'POLYGON(({} {}, {} {}, {} {}, {} {}, {} {}))' # extra_meta['w_bbox'] = bbox_pattern.format(l0, t0, r0, t0, # r0, b0, l0, b0, # l0, t0) # extra_meta['e_bbox'] = bbox_pattern.format(l1, t1, r1, t1, # r1, b1, l1, b1, # l1, t1) ## Now if zooms_meta[ref_zoom]['bbox_infos']['xIDL'] == True: # bboxes contain [xmin, ymin, xmax, ymax] bbox_pattern = 'POLYGON(({} {}, {} {}, {} {}, {} {}, {} {}))' l0, b0, r0, t0 = zooms_meta[ref_zoom]['bbox_infos']['w_bbox'] extra_meta['w_bbox'] = bbox_pattern.format(l0, t0, r0, t0, r0, b0, l0, b0, l0, t0) l1, b1, r1, t1 = zooms_meta[ref_zoom]['bbox_infos']['e_bbox'] extra_meta['e_bbox'] = bbox_pattern.format(l1, t1, r1, t1, r1, b1, l1, b1, l1, t1) ## \NEW metadata.py return extra_meta
def saveTiff(self, drappingMain): rasterPath = QFileDialog.getSaveFileName(drappingMain, "save file dialog", "/ortho.tiff", "Images (*.tiff)")[0] maskedOrtho = self.ortho pointRaster = self.pointRaster im = self.image if rasterPath: cols = pointRaster.RasterXSize rows = pointRaster.RasterYSize geoTrans = pointRaster.GetGeoTransform() geoTrans = list(geoTrans) x_min = geoTrans[0] pixelWidth = geoTrans[1] y_min = geoTrans[3] pixelHeight = geoTrans[5] nDim = np.ndim(im) if nDim == 3: nBand = im.shape[2] driver = gdal.GetDriverByName('GTiff') outRaster = driver.Create(rasterPath, cols, rows, nBand, gdal.GDT_UInt16) outRaster.SetGeoTransform( (x_min, pixelWidth, 0, y_min, 0, pixelHeight)) for i in range(nBand): outband = outRaster.GetRasterBand(i + 1) outband.WriteArray(np.uint16(maskedOrtho[:, :, i])) outband.FlushCache() outRasterSRS = osr.SpatialReference() outRasterSRS.ImportFromEPSG(self.epsg) outRaster.SetProjection(outRasterSRS.ExportToWkt()) outRaster = None # driver = gdal.GetDriverByName('GTiff') # outRaster = driver.Create(rasterSaveName, cols, rows, 1, gdal.GDT_UInt16) # outRaster.SetGeoTransform((originX, pixelWidth, 0, originY, 0, pixelHeight)) # outband = outRaster.GetRasterBand(1) # outband.WriteArray(boolMat) # outRasterSRS = osr.SpatialReference() # outRasterSRS.ImportFromEPSG(self.crs.srsid ())#2056) # outRaster.SetProjection(outRasterSRS.ExportToWkt()) else: driver = gdal.GetDriverByName('GTiff') outRaster = driver.Create(rasterPath, cols, rows, 1, gdal.GDT_UInt16) outRaster.SetGeoTransform( (x_min, pixelWidth, 0, y_min, 0, pixelHeight)) outband = outRaster.GetRasterBand(1) outband.WriteArray(np.uint16(maskedOrtho)) outRasterSRS = osr.SpatialReference() outRasterSRS.ImportFromEPSG(self.epsg) outRaster.SetProjection(outRasterSRS.ExportToWkt()) outband.FlushCache() outRaster = None
def generatePointRasterLayer(self): imXLine = self.XLine imYLine = self.YLine ortho = self.ortho Xmin = self.minX Ymin = self.minY Xmax = self.maxX Ymax = self.maxY resol = self.resol # Save extent to a new Shapefile pointDriver = ogr.GetDriverByName("MEMORY") pointDataSource = pointDriver.CreateDataSource('memData') #open the memory datasource with write access #tmp=pointDriver.Open('memData',1) self.epsg = int(self.crs.authid().split(':')[1]) pointLayerSRS = osr.SpatialReference() pointLayerSRS.ImportFromEPSG(self.epsg) pointLayer = pointDataSource.CreateLayer("Points", pointLayerSRS, geom_type=ogr.wkbPoint) #pointLayer.SetProjection(pointLayerSRS.ExportToWkt()) # Add an ID field idField = ogr.FieldDefn("id", ogr.OFTInteger) pointLayer.CreateField(idField) # Create the feature and set values featureDefn = pointLayer.GetLayerDefn() feature = ogr.Feature(featureDefn) # Fill the layer with points for i in range(imXLine.shape[0]): points = ogr.Geometry(ogr.wkbPoint) points.AddPoint(float(imXLine[i]), float(imYLine[i])) feature.SetGeometry(points) feature.SetField("id", 1) pointLayer.CreateFeature(feature) # Close DataSource #outDataSource.Destroy() #Generate rasterized point layer #------------------------------- cols = ortho.shape[0] rows = ortho.shape[1] originX = Xmin originY = Ymax pixelWidth = resol pixelHeight = resol driver = gdal.GetDriverByName('MEM') pointRaster = driver.Create('memory', cols, rows, 1, gdal.GDT_UInt16) pointRaster.SetGeoTransform( (originX, pixelWidth, 0, originY, 0, -pixelHeight)) #pointBand = pointRaster.GetRasterBand(1) #outband.WriteArray(boolMat) pointRasterSRS = osr.SpatialReference() pointRasterSRS.ImportFromEPSG(self.epsg) pointRaster.SetProjection(pointRasterSRS.ExportToWkt()) #pointBand.FlushCache() #Fill layer #---------- gdal.RasterizeLayer(pointRaster, [1], pointLayer) #, outLayer) #pointBand = pointRaster.GetRasterBand(1) #array = pointBand.ReadAsArray() #plt.imshow(array) #plt.show() self.pointRaster = pointRaster
geotransform[2] = X pixel rotation geotransform[3] = North/South location of Upper Left corner geotransform[4] = Y pixel rotation geotransform[5] = Y pixel size Xgeo = gt(0) + Xpixel*gt(1) + Yline*gt(2) Ygeo = gt(3) + Xpixel*gt(4) + Yline*gt(5) """ print gt # <demo> --- stop --- #Projection change for a point: proj_out = osr.SpatialReference() proj_out.ImportFromEPSG(4326) #proj_in is a String, so it must be converted to a SpatialReference object: proj_in = osr.SpatialReference(proj_in) transf = osr.CoordinateTransformation(proj_in, proj_out) punto = transf.TransformPoint(gt[0], gt[3]) print punto # <demo> --- stop --- #using the geotransform functions #Pixel to coordinates: gt = (1,1,0,1,0,1)
def save_footprints(self, map_name): if self._products_df_sorted is None: return if self._apiname == 'USGS_EE': gs.fatal(_( "USGS Earth Explorer does not support footprint download.")) try: from osgeo import ogr, osr except ImportError as e: gs.fatal(_("Option <footprints> requires GDAL library: {}").format(e)) gs.message(_("Writing footprints into <{}>...").format(map_name)) driver = ogr.GetDriverByName("GPKG") tmp_name = gs.tempfile() + '.gpkg' data_source = driver.CreateDataSource(tmp_name) srs = osr.SpatialReference() srs.ImportFromEPSG(4326) # features can be polygons or multi-polygons layer = data_source.CreateLayer(str(map_name), srs, ogr.wkbMultiPolygon) # attributes attrs = OrderedDict([ ("uuid", ogr.OFTString), ("ingestiondate", ogr.OFTString), ("cloudcoverpercentage", ogr.OFTInteger), ("producttype", ogr.OFTString), ("identifier", ogr.OFTString) ]) # Sentinel-1 data does not have cloudcoverpercentage prod_types = [type for type in self._products_df_sorted["producttype"]] s1_types = ["SLC", "GRD"] if any(type in prod_types for type in s1_types): del attrs["cloudcoverpercentage"] for key in attrs.keys(): field = ogr.FieldDefn(key, attrs[key]) layer.CreateField(field) # features for idx in range(len(self._products_df_sorted['uuid'])): wkt = self._products_df_sorted['footprint'][idx] feature = ogr.Feature(layer.GetLayerDefn()) newgeom = ogr.CreateGeometryFromWkt(wkt) # convert polygons to multi-polygons newgeomtype = ogr.GT_Flatten(newgeom.GetGeometryType()) if newgeomtype == ogr.wkbPolygon: multigeom = ogr.Geometry(ogr.wkbMultiPolygon) multigeom.AddGeometryDirectly(newgeom) feature.SetGeometry(multigeom) else: feature.SetGeometry(newgeom) for key in attrs.keys(): if key == 'ingestiondate': value = self._products_df_sorted[key][idx].strftime("%Y-%m-%dT%H:%M:%SZ") else: value = self._products_df_sorted[key][idx] feature.SetField(key, value) layer.CreateFeature(feature) feature = None data_source = None # coordinates of footprints are in WKT -> fp precision issues # -> snap gs.run_command('v.import', input=tmp_name, output=map_name, layer=map_name, snap=1e-10, quiet=True )
def test_osr_epsg_11(): srs = osr.SpatialReference() srs.ImportFromEPSG(2065)
def get_wkt_from_epsg_code(epsg_code): srs = osr.SpatialReference() srs.ImportFromEPSG(int(epsg_code)) wkt = srs.ExportToWkt() return wkt
def test_osr_epsg_13(): # One exact match sr = osr.SpatialReference() sr.SetFromUserInput("""PROJCS["ETRS89 / UTM zone 32N (N-E)", GEOGCS["ETRS89", DATUM["European_Terrestrial_Reference_System_1989", SPHEROID["GRS 1980",6378137,298.257222101, AUTHORITY["EPSG","7019"]], TOWGS84[0,0,0,0,0,0,0], AUTHORITY["EPSG","6258"]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.0174532925199433, AUTHORITY["EPSG","9122"]], AUTHORITY["EPSG","4258"]], PROJECTION["Transverse_Mercator"], PARAMETER["latitude_of_origin",0], PARAMETER["central_meridian",9], PARAMETER["scale_factor",0.9996], PARAMETER["false_easting",500000], PARAMETER["false_northing",0], UNIT["metre",1, AUTHORITY["EPSG","9001"]], AXIS["Northing",NORTH], AXIS["Easting",EAST]]""") matches = sr.FindMatches() assert len(matches) == 1 and matches[0][1] == 100 assert matches[0][0].IsSame(sr) # Another one sr = osr.SpatialReference() sr.ImportFromEPSG(3044) sr.MorphToESRI() sr.SetFromUserInput(sr.ExportToWkt()) matches = sr.FindMatches() assert len(matches) == 1 and matches[0][1] == 100 assert not matches[0][0].IsSame(sr) # Two matches (and test GEOGCS) # This will now match with 4126 (which is deprecated), since the datum # is identified to 6126 and GetEPSGGeogCS has logic to subtract 2000 to it. #sr.SetFromUserInput("""GEOGCS["myLKS94", #DATUM["Lithuania_1994_ETRS89", # SPHEROID["GRS 1980",6378137,298.257222101], # TOWGS84[0,0,0,0,0,0,0]], #PRIMEM["Greenwich",0], #UNIT["degree",0.0174532925199433]]""") #matches = sr.FindMatches() #if len(matches) != 2: # gdaltest.post_reason('fail') # print(matches) # return 'fail' #if matches[0][0].GetAuthorityCode(None) != '4126' or matches[0][1] != 90: # gdaltest.post_reason('fail') # print(matches) # return 'fail' #if matches[1][0].GetAuthorityCode(None) != '4669' or matches[1][1] != 90: # gdaltest.post_reason('fail') # print(matches) # return 'fail' # Very approximate matches sr.SetFromUserInput("""GEOGCS["myGEOGCS", DATUM["my_datum", SPHEROID["WGS 84",6378137,298.257223563]], PRIMEM["Greenwich",0], UNIT["degree",0.0174532925199433]] """) matches = sr.FindMatches() assert matches # One single match, but not similar according to IsSame() sr = osr.SpatialReference() sr.SetFromUserInput("""PROJCS["WGS 84 / UTM zone 32N", GEOGCS["WGS 84", DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]], AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.0174532925199433, AUTHORITY["EPSG","9122"]], AUTHORITY["EPSG","4326"]], PROJECTION["Transverse_Mercator"], PARAMETER["latitude_of_origin",0], PARAMETER["central_meridian",9], PARAMETER["scale_factor",0.9996], PARAMETER["false_easting",999999999], PARAMETER["false_northing",0], UNIT["metre",1, AUTHORITY["EPSG","9001"]]] """) matches = sr.FindMatches() assert len(matches) == 1 and matches[0][1] == 25 assert matches[0][0].IsSame(sr) != 1 # WKT has EPSG code but the definition doesn't match with the official # one (namely linear units are different) # https://github.com/OSGeo/gdal/issues/990 sr = osr.SpatialReference() sr.SetFromUserInput("""PROJCS["NAD83 / Ohio North", GEOGCS["NAD83", DATUM["North_American_Datum_1983", SPHEROID["GRS 1980",6378137,298.257222101, AUTHORITY["EPSG","7019"]], TOWGS84[0,0,0,0,0,0,0], AUTHORITY["EPSG","6269"]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.0174532925199433, AUTHORITY["EPSG","9122"]], AUTHORITY["EPSG","4269"]], PROJECTION["Lambert_Conformal_Conic_2SP"], PARAMETER["standard_parallel_1",41.7], PARAMETER["standard_parallel_2",40.43333333333333], PARAMETER["latitude_of_origin",39.66666666666666], PARAMETER["central_meridian",-82.5], PARAMETER["false_easting",1968503.937007874], PARAMETER["false_northing",0], UNIT["International Foot",0.3048, AUTHORITY["EPSG","9002"]], AXIS["X",EAST], AXIS["Y",NORTH], AUTHORITY["EPSG","32122"]] """) matches = sr.FindMatches() assert len(matches) == 1 and matches[0][1] == 25 assert matches[0][0].IsSame(sr) != 1
def resize_and_resample_dataset_uri_hb_old( original_dataset_uri, bounding_box, out_pixel_size, output_uri, resample_method): """Resize and resample the given dataset. Args: original_dataset_uri (string): a GDAL dataset bounding_box (list): [upper_left_x, upper_left_y, lower_right_x, lower_right_y] out_pixel_size: the pixel size in projected linear units output_uri (string): the location of the new resampled GDAL dataset resample_method (string): the resampling technique, one of "nearest|bilinear|cubic|cubic_spline|lanczos" Returns: None """ resample_dict = { "nearest": gdal.GRA_NearestNeighbour, "nearest_neighbor": gdal.GRA_NearestNeighbour, "bilinear": gdal.GRA_Bilinear, "cubic": gdal.GRA_Cubic, "cubic_spline": gdal.GRA_CubicSpline, "lanczos": gdal.GRA_Lanczos, "average": gdal.GRA_Average, } original_dataset = gdal.Open(original_dataset_uri) original_band = original_dataset.GetRasterBand(1) original_nodata = original_band.GetNoDataValue() if original_nodata is None: original_nodata = -9999 original_sr = osr.SpatialReference() original_sr.ImportFromWkt(original_dataset.GetProjection()) output_geo_transform = [ bounding_box[0], out_pixel_size, 0.0, bounding_box[1], 0.0, -out_pixel_size] new_x_size = abs( int(np.round((bounding_box[2] - bounding_box[0]) / out_pixel_size))) new_y_size = abs( int(np.round((bounding_box[3] - bounding_box[1]) / out_pixel_size))) if new_x_size == 0: print ( "bounding_box is so small that x dimension rounds to 0; " "clamping to 1.") new_x_size = 1 if new_y_size == 0: print ( "bounding_box is so small that y dimension rounds to 0; " "clamping to 1.") new_y_size = 1 # create the new x and y size block_size = original_band.GetBlockSize() # If the original band is tiled, then its x blocksize will be different # than the number of columns if original_band.XSize > 256 and original_band.YSize > 256: # it makes sense for many functions to have 256x256 blocks block_size[0] = 256 block_size[1] = 256 gtiff_creation_options = [ 'TILED=YES', 'BIGTIFF=IF_SAFER', 'BLOCKXSIZE=%d' % block_size[0], 'BLOCKYSIZE=%d' % block_size[1]] metadata = original_band.GetMetadata('IMAGE_STRUCTURE') if 'PIXELTYPE' in metadata: gtiff_creation_options.append('PIXELTYPE=' + metadata['PIXELTYPE']) else: # it is so small or strangely aligned, use the default creation options gtiff_creation_options = [] hb.create_directories([os.path.dirname(output_uri)]) gdal_driver = gdal.GetDriverByName('GTiff') output_dataset = gdal_driver.Create( output_uri, new_x_size, new_y_size, 1, original_band.DataType, options=gtiff_creation_options) output_band = output_dataset.GetRasterBand(1) output_band.SetNoDataValue(original_nodata) # Set the geotransform output_dataset.SetGeoTransform(output_geo_transform) output_dataset.SetProjection(original_sr.ExportToWkt()) # need to make this a closure so we get the current time and we can affect # state def reproject_callback(df_complete, psz_message, p_progress_arg): """The argument names come from the GDAL API for callbacks.""" try: current_time = time.time() if ((current_time - reproject_callback.last_time) > 5.0 or (df_complete == 1.0 and reproject_callback.total_time >= 5.0)): print ( "ReprojectImage %.1f%% complete %s, psz_message %s", df_complete * 100, p_progress_arg[0], psz_message) reproject_callback.last_time = current_time reproject_callback.total_time += current_time except AttributeError: reproject_callback.last_time = time.time() reproject_callback.total_time = 0.0 # Perform the projection/resampling gdal.ReprojectImage( original_dataset, output_dataset, original_sr.ExportToWkt(), original_sr.ExportToWkt(), resample_dict[resample_method], 0, 0, reproject_callback, [output_uri]) # Make sure the dataset is closed and cleaned up original_band = None gdal.Dataset.__swig_destroy__(original_dataset) original_dataset = None output_dataset.FlushCache() gdal.Dataset.__swig_destroy__(output_dataset) output_dataset = None hb.calculate_raster_stats_uri(output_uri)
def test_osr_epsg_gcs_deprecated(): sr = osr.SpatialReference() with gdaltest.config_option('OSR_USE_NON_DEPRECATED', 'NO'): sr.ImportFromEPSG(4268) assert sr.ExportToWkt().find('NAD27 Michigan (deprecated)') >= 0
def get_projection(srs_wkt) -> str: srs = osr.SpatialReference() srs.ImportFromWkt(srs_wkt) return srs.GetAttrValue('projcs')
def test_osr_epsg_geoccs_deprecated(): sr = osr.SpatialReference() with gdaltest.config_option('OSR_USE_NON_DEPRECATED', 'NO'): sr.ImportFromEPSG(4346) assert sr.ExportToWkt().find('ETRS89 (geocentric) (deprecated)') >= 0
def __init__(self, input_file, band=1): gdal.AllRegister() self.data_set = gdal.Open(input_file) if self.data_set == None: raise Exception("Could not open file: %s", input_file) # Get the transformation from projection to pixel coordinates self.geotransform = self.data_set.GetGeoTransform() self.originX = self.geotransform[0] self.originY = self.geotransform[3] self.pixel_width = self.geotransform[1] self.pixel_height = self.geotransform[5] self.bands = self.data_set.RasterCount self.xsize = self.data_set.RasterXSize self.ysize = self.data_set.RasterYSize self.band_type = self.data_set.GetRasterBand(1).DataType # Get corner locations in native coordinates self.ulx = self.geotransform[0] self.uly = self.geotransform[3] self.lrx = self.ulx + self.geotransform[1] * self.xsize self.lry = self.uly + self.geotransform[5] * self.ysize self.llx = self.ulx self.lly = self.lry self.urx = self.lrx self.ury = self.uly self.centerx = 0.5 * (self.ulx + self.lrx) self.centery = 0.5 * (self.uly + self.lry) # Get the projection and the Proj projection self.wkt_proj = self.data_set.GetProjection() self.spatial_reference = osr.SpatialReference(wkt=self.wkt_proj) self.proj4_proj = self.spatial_reference.ExportToProj4() self.dataset_proj = Proj( self.proj4_proj) # Projection with this data set # This takes you to lon/lat destination_projection = '+units=m +ellps=WGS84 +datum=WGS84 +proj=longlat ' self.destination_projection = Proj(destination_projection) # Get the lat/lon corners self.ullon, self.ullat = transform(self.dataset_proj, self.destination_projection, self.ulx, self.uly) self.lrlon, self.lrlat = transform(self.dataset_proj, self.destination_projection, self.lrx, self.lry) self.lllon, self.lllat = transform(self.dataset_proj, self.destination_projection, self.llx, self.lly) self.urlon, self.urlat = transform(self.dataset_proj, self.destination_projection, self.urx, self.ury) self.centerlon, self.centerlat = transform(self.dataset_proj, self.destination_projection, self.centerx, self.centery) # Get the lat/lon bounding box self.lonmin = min(self.ullon, self.lrlon, self.lllon, self.urlon) self.lonmax = max(self.ullon, self.lrlon, self.lllon, self.urlon) self.latmin = min(self.ullat, self.lrlat, self.lllat, self.urlat) self.latmax = max(self.ullat, self.lrlat, self.lllat, self.urlat) # Get the shapely polygons self.lonlat_bbox = Polygon([ (self.lonmin, self.latmin), (self.lonmax, self.latmin), (self.lonmax, self.latmax), (self.lonmin, self.latmax), ]) self.lonlat_poly = Polygon([ (self.ullon, self.ullat), (self.lllon, self.lllat), (self.lrlon, self.lrlat), (self.urlon, self.urlat), ]) self.xy_poly = Polygon([ (self.ulx, self.uly), (self.llx, self.lly), (self.lrx, self.lry), (self.urx, self.ury), ]) # Get the band nodata value self.band = self.data_set.GetRasterBand(band) self.nodata_value = self.band.GetNoDataValue() # Close the data set and band self.band = None self.data_set = None
def test_osr_epsg_1(): srs = osr.SpatialReference() srs.ImportFromEPSG(26591) assert srs.GetAuthorityCode(None) == '3003'
def h5togeotiff(hdf_files,geotiff_target,dataset_name ="dataset1/data1",data_type="float",expiration_time=None): """ Converts BALTRAD hdf5 file to Mapserver compliant GeoTiff file. Reprojection of data is included. Parameters: * hdf5_source: Source HDF5 file path, if list then sum results * geotiff_target: Target GeoTIFF file path * target_projection: EPSG code string or set to None for no projection * dataset_name: change this if other information is wanted * data_type: data type for target file: float or int * expiration_time: if defined (datetype.datetype) skip conversion if necessary """ if not isinstance(hdf_files, (list, tuple)): hdf_files = [hdf_files] first_iteration = True for hdf5_source in hdf_files: # read h5 file f = h5py.File(hdf5_source,'r') # read only where = f["where"] # coordinate variables what = f["what"] # data # read time from h5 file date_string = what.attrs["date"][0:8] time_string = what.attrs["time"][0:4] # ignore seconds starttime = datetime.strptime(date_string+"T"+time_string, "%Y%m%dT%H%M") if expiration_time: if starttime<expiration_time: raise H5ConversionSkip("Conversion of expired dataset (%s) skipped" % str(starttime)) dataset = f[dataset_name.split("/")[0]] data_1 = dataset[dataset_name.split("/")[1]] data = data_1["data"] data_what = data_1["what"] # read coornidates lon_min = where.attrs["LL_lon"] lon_max = where.attrs["UR_lon"] lat_min = where.attrs["LL_lat"] lat_max = where.attrs["UR_lat"] # non-rectangle datasets not supported (are they ever produced?) # if ( where.attrs["LL_lon"]!=where.attrs["UL_lon"] or \ # where.attrs["LL_lat"]!=where.attrs["LR_lat"] or \ # where.attrs["LR_lon"]!=where.attrs["UR_lon"] or \ # where.attrs["UL_lat"]!=where.attrs["UR_lat"] ): # raise Exception("non-rectangle datasets not supported") proj_text = str(where.attrs["projdef"]) h5_proj = Proj(proj_text) lonlat_proj = Proj(init="epsg:4326") # transfrom bounding box from lonlat -> laea xmin, ymin = transform(lonlat_proj,h5_proj,lon_min,lat_min) xmax, ymax = transform(lonlat_proj,h5_proj,lon_max,lat_max) # shape x_size = data.shape[1] y_size = data.shape[0] # generate axes x_axis = numpy.arange( xmin,xmax,(xmax-xmin)/x_size ) #y_help_axis = numpy.arange( ymin,ymax,(ymax-ymin)/x_size ) y_axis = numpy.arange( ymax,ymin,(ymin-ymax)/y_size ) # reversed #x_help_axis = numpy.arange( xmax,xmin,(xmin-xmax)/y_size ) # reverse this also missing_value = data_what.attrs["nodata"] missing_echo = data_what.attrs["undetect"] if data_type=="int": geotiff_data = numpy.uint8(data); geotiff_data[numpy.where(geotiff_data==(missing_echo))]=1 geotiff_data[numpy.where(geotiff_data==(missing_value))]=0 else: offset = float(data_what.attrs["offset"]) if first_iteration: geotiff_data = numpy.float32(data[:]) + numpy.float32(data_what.attrs["offset"]) first_iteration = False else: geotiff_data = geotiff_data + numpy.float32(data[:]) + numpy.float32(data_what.attrs["offset"]) # begin tiff file generation driver = gdal.GetDriverByName('GTiff') if data_type=="int": gdt_data_type = GDT_Byte else: gdt_data_type = GDT_Float32 # geotiff mask array? out = driver.Create(geotiff_target, geotiff_data.shape[1], geotiff_data.shape[0], 1, gdt_data_type) # out.SetMetadataItem("TIFFTAG_GDAL_NODATA",str(missing_value)) out.SetMetadataItem("TIFFTAG_DATETIME",starttime.strftime("%Y-%m-%dT%H:%MZ")) #timestamp = datetime.utcnow().strftime("%Y-%m-%dT%H:%MZ") #geotiff_data[numpy.where(geotiff_data==(missing_value+offset))]=255 out.SetGeoTransform([xmin, # grid must be regular! (xmax - xmin)/geotiff_data.shape[1], # grid size, get lon index! 0, ymax, 0, (ymin - ymax)/geotiff_data.shape[0]]) srs = osr.SpatialReference() srs.ImportFromProj4( proj_text ) out.SetProjection( srs.ExportToWkt() ) # export to geotiff #gdal_array.BandWriteArray(out.GetRasterBand(1), geotiff_data) out.GetRasterBand(1).WriteArray( geotiff_data ) # delete geotiff object to free memory del out f.close() return {"timestamp":starttime.strftime("%Y-%m-%dT%H:%MZ"), "projection": proj_text, "bbox_lonlat": "%f,%f,%f,%f" % (lon_min,lat_min,lon_max,lat_max), "bbox_original": "%f,%f,%f,%f" % (xmin,ymin,xmax,ymax) }
input_folder = args.i fields_vector_file = args.f output_folder = args.o gdalwarp_base_cmd = args.gdal + '/gdalwarp -srcnodata 0 -dstnodata 0 -of GTiff -crop_to_cutline ' print('---------------Stage I ----------------') print('Warping NDVI files..') ndvi_files = NDVIFilesSeries(input_folder).get_files() fields_data = vop.vector_file.get_all_geometry(fields_vector_file) if not os.path.exists(output_folder): os.mkdir(output_folder) srs_4326 = osr.SpatialReference() srs_4326.ImportFromEPSG(4326) for fd in fields_data: field_folder = os.path.join(output_folder, str(fd[0])) if not os.path.exists(field_folder): os.mkdir(field_folder) shp_file = os.path.join(field_folder, 'field_border.shp') #NDVIFilesSeries.create_shp(fd[1],shp_file) vop.vector_file.create_vector_file(fd[1], shp_file, srs_4326) for nf in ndvi_files: year = os.path.basename(nf)[0:4] cropped_tif = os.path.join(os.path.join(field_folder, year), os.path.basename(nf)) if not os.path.exists(os.path.join(field_folder, year)): os.mkdir(os.path.join(field_folder, year))
def main(args=None): global Verbose global CreateOptions global Names global TileWidth global TileHeight global Format global BandType global Driver global Extension global MemDriver global TileIndexFieldName global TileIndexName global CsvDelimiter global CsvFileName global TileIndexDriverTyp global Source_SRS global TargetDir global ResamplingMethod global Levels global PyramidOnly global UseDirForEachRow gdal.AllRegister() if args is None: args = sys.argv argv = gdal.GeneralCmdLineProcessor(args) if argv is None: return 1 # Parse command line arguments. i = 1 while i < len(argv): arg = argv[i] if arg == '-of': i += 1 Format = argv[i] elif arg == '-ot': i += 1 BandType = gdal.GetDataTypeByName(argv[i]) if BandType == gdal.GDT_Unknown: print('Unknown GDAL data type: %s' % argv[i]) return 1 elif arg == '-co': i += 1 CreateOptions.append(argv[i]) elif arg == '-v': Verbose = True elif arg == '-targetDir': i += 1 TargetDir = argv[i] if os.path.exists(TargetDir) == False: print("TargetDir " + TargetDir + " does not exist") return 1 if TargetDir[len(TargetDir) - 1:] != os.sep: TargetDir = TargetDir + os.sep elif arg == '-ps': i += 1 TileWidth = int(argv[i]) i += 1 TileHeight = int(argv[i]) elif arg == '-r': i += 1 ResamplingMethodString = argv[i] if ResamplingMethodString == "near": ResamplingMethod = GRA_NearestNeighbour elif ResamplingMethodString == "bilinear": ResamplingMethod = GRA_Bilinear elif ResamplingMethodString == "cubic": ResamplingMethod = GRA_Cubic elif ResamplingMethodString == "cubicspline": ResamplingMethod = GRA_CubicSpline elif ResamplingMethodString == "lanczos": ResamplingMethod = GRA_Lanczos else: print("Unknown resampling method: %s" % ResamplingMethodString) return 1 elif arg == '-levels': i += 1 Levels = int(argv[i]) if Levels < 1: print("Invalid number of levels : %d" % Levels) return 1 elif arg == '-s_srs': i += 1 Source_SRS = osr.SpatialReference() if Source_SRS.SetFromUserInput(argv[i]) != 0: print('invalid -s_srs: ' + argv[i]) return 1 elif arg == "-pyramidOnly": PyramidOnly = True elif arg == '-tileIndex': i += 1 TileIndexName = argv[i] parts = os.path.splitext(TileIndexName) if len(parts[1]) == 0: TileIndexName += ".shp" elif arg == '-tileIndexField': i += 1 TileIndexFieldName = argv[i] elif arg == '-csv': i += 1 CsvFileName = argv[i] parts = os.path.splitext(CsvFileName) if len(parts[1]) == 0: CsvFileName += ".csv" elif arg == '-csvDelim': i += 1 CsvDelimiter = argv[i] elif arg == '-useDirForEachRow': UseDirForEachRow = True elif arg[:1] == '-': print('Unrecognised command option: %s' % arg) Usage() return 1 else: Names.append(arg) i += 1 if len(Names) == 0: print('No input files selected.') Usage() return 1 if (TileWidth == 0 or TileHeight == 0): print("Invalid tile dimension %d,%d" % (TileWidth, TileHeight)) return 1 if (TargetDir is None): print("Missing Directory for Tiles -targetDir") Usage() return 1 # create level 0 directory if needed if (UseDirForEachRow and PyramidOnly == False): leveldir = TargetDir + str(0) + os.sep if (os.path.exists(leveldir) == False): os.mkdir(leveldir) if Levels > 0: #prepare Dirs for pyramid startIndx = 1 for levelIndx in range(startIndx, Levels + 1): leveldir = TargetDir + str(levelIndx) + os.sep if (os.path.exists(leveldir)): continue os.mkdir(leveldir) if (os.path.exists(leveldir) == False): print("Cannot create level dir: %s" % leveldir) return 1 if Verbose: print("Created level dir: %s" % leveldir) Driver = gdal.GetDriverByName(Format) if Driver is None: print('Format driver %s not found, pick a supported driver.' % Format) UsageFormat() return 1 DriverMD = Driver.GetMetadata() Extension = DriverMD.get(DMD_EXTENSION) if 'DCAP_CREATE' not in DriverMD: MemDriver = gdal.GetDriverByName("MEM") tileIndexDS = getTileIndexFromFiles(Names, TileIndexDriverTyp) if tileIndexDS is None: print("Error building tile index") return 1 minfo = mosaic_info(Names[0], tileIndexDS) ti = tile_info(minfo.xsize, minfo.ysize, TileWidth, TileHeight) if Source_SRS is None and len(minfo.projection) > 0: Source_SRS = osr.SpatialReference() if Source_SRS.SetFromUserInput(minfo.projection) != 0: print('invalid projection ' + minfo.projection) return 1 if Verbose: minfo.report() ti.report() if PyramidOnly == False: dsCreatedTileIndex = tileImage(minfo, ti) tileIndexDS.Destroy() else: dsCreatedTileIndex = tileIndexDS if Levels > 0: buildPyramid(minfo, dsCreatedTileIndex, TileWidth, TileHeight) if Verbose: print("FINISHED") return 0
def transformDialog(self): geosrs = osr.SpatialReference() geosrs.ImportFromEPSG(3857) self.transformProj(geosrs)
except: pass img = Image.open(filename) # Create the new file. if args.filetype == 'pdf': img.save(output_file, 'PDF', resolution=100.0) elif args.filetype == 'tif': img.save(output_file) test_img = io.imread(filename) img = test_img[0] ds = gdal.Open(output_file, gdal.GA_Update) sr = osr.SpatialReference() sr.SetWellKnownGeogCS('WGS84') # Randomly sample points in the image. gcp_list = [( np.random.randint(img.shape[0]), np.random.randint(img.shape[1]) ) for _ in range(5)] # Create the ground control points with the latitude/longitude coordinates. gcps = [ ] for gcp in gcp_list: lat, lon = pixel_to_lat_lon(gcp[0], gcp[1], filename) print(f'Lat, lon: {lat}, {lon}') gcps.append(gdal.GCP(lon, lat, 0, gcp[1], gcp[0]))
def gen_zonal_stats(vectors, raster, band=1, stats=None, categorical=False, **kwargs): """""" logger.debug('Computing Zonal Statistics') # Should the raster be kept open or reopened for each feature? raster_ds = gdal.Open(raster, 0) raster_band = raster_ds.GetRasterBand(band) raster_nodata = raster_band.GetNoDataValue() raster_proj = raster_ds.GetProjection() raster_osr = osr.SpatialReference() raster_osr.ImportFromWkt(raster_proj) raster_geo = raster_ds.GetGeoTransform() raster_rows = raster_ds.RasterYSize raster_cols = raster_ds.RasterXSize cs = abs(raster_geo[1]) raster_x = raster_geo[0] raster_y = raster_geo[3] raster_extent = [ raster_x, raster_y - raster_rows * cs, raster_x + raster_cols * cs, raster_y ] # For now, hardcode to only process shapefile vector files vector_driver = ogr.GetDriverByName('ESRI Shapefile') vector_ds = vector_driver.Open(vectors, 0) vector_lyr = vector_ds.GetLayer() vector_osr = vector_lyr.GetSpatialRef() # Project vector geometry to the raster spatial reference vector_tx = osr.CoordinateTransformation(vector_osr, raster_osr) logger.debug('Raster: {}'.format(raster)) logger.debug(' WKT: {}'.format(raster_osr.ExportToWkt())) logger.debug(' Rows: {}'.format(raster_rows)) logger.debug(' Cols: {}'.format(raster_cols)) logger.debug(' Extent: {}'.format(raster_extent)) logger.debug(' Geo: {}'.format(raster_geo)) logger.debug(' Cellsize: {}'.format(cs)) logger.debug(' Snap X: {}'.format(raster_x)) logger.debug(' Snap Y: {}'.format(raster_y)) logger.debug('Vectors: {}'.format(vectors)) logger.debug(' WKT: {}'.format(vector_osr.ExportToWkt())) # Iterate through the features for vector_ftr in vector_lyr: fid = vector_ftr.GetFID() # Project the geometry vector_geom = vector_ftr.GetGeometryRef() v_geom = vector_geom.Clone() v_geom.Transform(vector_tx) # Get the projected geometry extent extent = list(v_geom.GetEnvelope()) # Convert to an OGR style extent (xmin, ymin, xmax, ymax) extent = [extent[0], extent[2], extent[1], extent[3]] # Expand the vector extent to the raster transform extent[0] = math.floor((extent[0] - raster_x) / cs) * cs + raster_x extent[1] = math.floor((extent[1] - raster_y) / cs) * cs + raster_y extent[2] = math.ceil((extent[2] - raster_x) / cs) * cs + raster_x extent[3] = math.ceil((extent[3] - raster_y) / cs) * cs + raster_y # TODO: Check if zone extent intersects the raster extent # Clip the zone extent to the raster extent extent[0] = max(extent[0], raster_extent[0]) extent[1] = max(extent[1], raster_extent[1]) extent[2] = min(extent[2], raster_extent[2]) extent[3] = min(extent[3], raster_extent[3]) # Compute raster properties cols = int((abs(extent[2] - extent[0]) / cs) + 0.5) rows = int((abs(extent[3] - extent[1]) / cs) + 0.5) geo = [extent[0], cs, 0, extent[3], 0, -cs] i = int(round((geo[0] - raster_geo[0]) / cs, 0)) j = int(round((geo[3] - raster_geo[3]) / -cs, 0)) # logger.debug('FID: {}'.format(fid)) # logger.debug(' Rows: {}'.format(rows)) # logger.debug(' Cols: {}'.format(cols)) # logger.debug(' Extent: {}'.format(extent)) # logger.debug(' Geo: {}'.format(geo)) # logger.debug(' i: {}'.format(i)) # logger.debug(' j: {}'.format(j)) # Create an in-memory dataset/layer for each feature v_driver = ogr.GetDriverByName('Memory') v_ds = v_driver.CreateDataSource('out') v_lyr = v_ds.CreateLayer('poly', geom_type=ogr.wkbPolygon, srs=raster_osr) v_feat = ogr.Feature(v_lyr.GetLayerDefn()) v_feat.SetGeometryDirectly(v_geom) v_lyr.CreateFeature(v_feat) # Create an in-memory raster to set from the vector data mask_driver = gdal.GetDriverByName('MEM') mask_ds = mask_driver.Create('', cols, rows, 1, gdal.GDT_Byte) mask_ds.SetProjection(raster_proj) mask_ds.SetGeoTransform(geo) mask_band = mask_ds.GetRasterBand(1) mask_band.Fill(0) mask_band.SetNoDataValue(0) gdal.RasterizeLayer(mask_ds, [1], v_lyr, burn_values=[1]) # Read the vector mask array mask = mask_band.ReadAsArray(0, 0, cols, rows).astype(np.bool) # Read the data array array = raster_band.ReadAsArray(i, j, cols, rows) # Mask nodata pixels if (raster_nodata is not None and array.dtype in [np.float32, np.float64]): array[array == raster_nodata] = np.nan # Apply the zone mask # This might contribute to memory issues array = array[mask] if categorical and array.dtype not in [np.float32, np.float64]: # Compute categorical stats ftr_stats = dict(zip(*np.unique(array, return_counts=True))) else: ftr_stats = {stat: None for stat in stats} # Remove all nan values before computing statistics if np.any(np.isnan(array)): array = array[np.isfinite(array)] for stat in stats: if not np.any(array): continue elif stat == 'mean': ftr_stats[stat] = float(np.mean(array)) elif stat == 'max': ftr_stats[stat] = float(np.max(array)) elif stat == 'min': ftr_stats[stat] = float(np.min(array)) elif stat == 'median': ftr_stats[stat] = float(np.median(array)) elif stat == 'sum': ftr_stats[stat] = float(np.sum(array)) elif stat == 'std': ftr_stats[stat] = float(np.std(array)) elif stat == 'var': ftr_stats[stat] = float(np.var(array)) elif stat == 'count': ftr_stats[stat] = float(np.sum(np.isfinite(array))) else: raise ValueError('Stat {} not supported'.format(stat)) # Cleanup del array, mask v_ds = None mask_band = None mask_ds = None del v_ds, v_lyr, v_feat, v_geom, v_driver del mask_ds, mask_band, mask_driver yield ftr_stats # Cleanup vector_lyr = None vector_ds = None raster_band = None raster_ds = None del vector_ds, vector_lyr del raster_ds, raster_band