def ogr2ogr(dest, src, *args, **kwargs): srcDS = gdal.OpenEx(src) opts = gdal.VectorTranslateOptions(skipFailures=True, *args, **kwargs) ds = gdal.VectorTranslate(dest, srcDS=srcDS, options = opts) # Dataset isn't written until dataset is closed and dereferenced # https://gis.stackexchange.com/questions/255586/gdal-vectortranslate-returns-empty-object del ds
def shapefile2geojson(infile, outfile, fieldname): '''Translate a shapefile to GEOJSON. Similar to: ogr2ogr -t_srs EPSG:4326 -f GeoJSON file.json file.shp ''' options = gdal.VectorTranslateOptions(format="GeoJSON", dstSRS="EPSG:4326") gdal.VectorTranslate(outfile, infile, options=options) print("Translated GEOJSON file", outfile)
def shp_to_csv(): root = Path(__file__).parent shapefiles = [ root / 'tmp' / each for each in os.listdir(root / 'tmp') if each.endswith('.shp') ] srcDS = gdal.OpenEx(str(shapefiles[0])) gdal.VectorTranslate(str(root / 'tmp' / 'nysdec_lands.csv'), srcDS, format='CSV', dstSRS='EPSG:4326', options=['-progress'], layerCreationOptions=['GEOMETRY=AS_WKT'])
def as_raster_layer(self, srs, min_pixel_size, block_extent, requested_pixel_size=None, data_type=None, bounds=None): tmp_dir = self._make_name() cleanup.register_temp_dir(tmp_dir) if not os.path.exists(tmp_dir): os.makedirs(tmp_dir) reproj_path = os.path.join(tmp_dir, self._make_name(".shp")) gdal.VectorTranslate(reproj_path, self._path, dstSRS=srs, reproject=True) if not self._raw: self._build_attribute_table(reproj_path, self._nodata_value) tmp_raster_path = os.path.join(tmp_dir, self._make_name(".tmp.tiff")) gdal.Rasterize(tmp_raster_path, reproj_path, xRes=min_pixel_size, yRes=min_pixel_size, attribute=self._id_attribute, noData=self._nodata_value, creationOptions=["COMPRESS=DEFLATE"], outputBounds=bounds) raster_path = os.path.join(tmp_dir, self._make_name(".tiff")) info = gdal.Info(tmp_raster_path, format="json") is_float = "Float" in info["bands"][0]["type"] if self._raw else False output_type = data_type if data_type is not None \ else self._data_type if self._data_type is not None \ else gdal.GDT_Float32 if is_float \ else self.best_fit_data_type(self._get_min_max(tmp_raster_path)) gdal.Translate(raster_path, tmp_raster_path, outputType=output_type, creationOptions=["COMPRESS=DEFLATE"]) return RasterLayer(raster_path, self.attributes, self._attribute_table)
def merge_geometries_by_field(file,out, field): file_name = os.path.basename(file) if field == None : query = "SELECT ST_Union(geometry) AS geometry FROM '" + file_name[:-4]+"'" else: query = "SELECT ST_Union(geometry) AS geometry, "+field+" FROM '" + file_name[:-4]+"' GROUP BY "+field print(file) ds = gdal.VectorTranslate(out,file, SQLDialect="sqlite", SQLStatement=query, format="ESRI Shapefile", geometryType="PROMOTE_TO_MULTI") ds = None
def as_raster_layer(self, srs, min_pixel_size, block_extent, requested_pixel_size=None, data_type=None, bounds=None): tmp_dir = self._make_name() cleanup.register_temp_dir(tmp_dir) if not os.path.exists(tmp_dir): os.makedirs(tmp_dir) reproj_path = os.path.join(tmp_dir, self._make_name(".shp")) gdal.VectorTranslate(reproj_path, self._path, dstSRS=srs, reproject=True, layers=[self._layer]) if not self._raw: self._build_attribute_table(reproj_path, self._nodata_value) tmp_raster_path = os.path.join(tmp_dir, self._make_name(".tmp.tiff")) gdal.Rasterize(tmp_raster_path, reproj_path, xRes=min_pixel_size, yRes=min_pixel_size, attribute=self._id_attribute, noData=self._nodata_value, creationOptions=["COMPRESS=DEFLATE"], outputBounds=bounds) raster_path = os.path.join(tmp_dir, self._make_name(".tiff")) output_type = data_type or self.best_fit_data_type( self._get_min_max(tmp_raster_path)) gdal.Translate(raster_path, tmp_raster_path, outputType=output_type, creationOptions=["COMPRESS=DEFLATE"]) return RasterLayer(raster_path, self._attributes, self._attribute_table)
def processAlgorithm(self, parameters, context, feedback): """ Import layer to the database """ ogrLayer, layerName = self.getOgrCompatibleSource(self.INPUT, parameters, context, feedback) dbname = self.parameterAsString(parameters, self.DBNAME, context) name = self.parameterAsString(parameters, self.NAME, context) forceSinglepPartGeometry = self.parameterAsBool(parameters, self.SINGLEPARTGEOMETRY, context) srcDs = gdal.OpenEx(ogrLayer) if not srcDs: raise QgsProcessingException("Failed to open '%s'" % ogrLayer) options = [] if forceSinglepPartGeometry: options.append('-explodecollections') if os.path.exists(dbname): options.append('-update') if feedback: feedback.setProgressText("Importing layer") def callback(pct, msg, data, **kwargs): if msg: feedback.setProgressText(msg) feedback.setProgress(100*pct) else: callback = None ds = gdal.VectorTranslate(dbname, srcDS=srcDs, format='SQLite', datasetCreationOptions=['SPATIALITE=y'], layerName=name, options=options, callback=callback) if ds: del ds else: raise QgsProcessingException("Failed to import '%s'" % ogrLayer) return {}
from osgeo import ogr import gdal srcDS = gdal.OpenEx('084090952187.kml') ds = gdal.VectorTranslate('output.json', srcDS, format='GeoJSON')
def rasterise_vector(raster_fname, vector_fname, where_statement=None, output_fname="", output_format="MEM", verbose=False): """Rasterises a vector file to produce a mask where some condition in the vector dataset is true. The mask will have the same extent and projection as a (provided) 'master' dataset. The selection of the feature for the mask will be performed by a command of the form field_name='Value', where the single quotes are mandatory (e.g. NAME='Ireland'). Parameters ----------- raster_fname: str A GDAL-compatible raster filename that will be used to extract the shape, projection, resolution, ... for the rasterisation. vector_fname: str The vector filename (e.g. Shapefile) where_statement: str The where statement (e.g. "NAME='Colombia'"). output_fname: str, optinal The output filename, if not provided, an "in-memory array" will be selected. If not provided and the output_format is other than `MEM` an error will be raised. output_format: str, optional An output format. By default, `MEM` verbose: Boolean Whether to get some extra inforamation Returns -------- The mask as a Numpy array, 0 where the mask is off, 1 where it is on. """ if output_fname == "" and output_format != "MEM": raise ValueError("You need to provide an ouput filename" + " for format{:s}".format(output_format)) g = gdal.Open(raster_fname) if g is None: raise IOError("Could not open file {:s}".format(raster_fname)) raster_proj = g.GetProjectionRef() geoT = g.GetGeoTransform() if verbose: print(">>> Opened file {:s}".format(raster_fname)) print(">>> Projection: {:s}".format(raster_proj)) xs = [] ys = [] for x, y in [[0, 0], [0, g.RasterYSize], [g.RasterXSize, g.RasterYSize], [g.RasterXSize, 0]]: xx, yy = gdal.ApplyGeoTransform(geoT, x, y) xs.append(xx) ys.append(yy) extent = [min(xs), min(ys), max(xs), max(ys)] xRes = geoT[1] yRes = geoT[-1] nx = g.RasterXSize ny = g.RasterYSize if verbose: print(">>> File size {:d} rows, {:d} columns".format(nx, ny)) print(">>> UL corner: {:g}, {:g}".format(min(xs), max(ys))) src_ds = gdal.OpenEx(vector_fname) if src_ds is None: raise IOError("Can't read the vector file {}".format(vector_fname)) v = gdal.VectorTranslate('', src_ds, format='Memory', dstSRS=raster_proj, where=where_statement) gg = gdal.Rasterize(output_fname, v, format=output_format, outputType=gdal.GDT_Byte, xRes=xRes, yRes=yRes, where=where_statement, outputBounds=[min(xs), min(ys), max(xs), max(ys)], width=nx, height=ny, noData=0, burnValues=1) if gg is not None: if verbose: print("Done! {:d} non-zero pixels".format(gg.ReadAsArray().sum())) else: raise ValueError("Couldn't generate the mask. Check input parameters") return gg.ReadAsArray()
def build_gpkg(in_gdb, out_gpkg): """Build a geopackage from gSSURGO source gdb data.""" src_ds = gdal.OpenEx(in_gdb) ds = gdal.VectorTranslate(out_gpkg, srcDS = src_ds, format = "GPKG") del ds