def create_mem(sizex, sizey, numbands, datatype=gdal.GDT_Byte, options=None): """ Create a new In-Memory Dataset. """ if options is None: options = [] mem_drv = gdal.GetDriverByName('MEM') return mem_drv.Create('', sizex, sizey, numbands, datatype, options)
def encode(self, dataset, frmt, encoding_params): options = () if frmt == "image/tiff": options = _get_gtiff_options(**encoding_params) args = [("%s=%s" % key, value) for key, value in options] path = "/tmp/%s" % uuid4().hex out_driver = gdal.GetDriverByName("GTiff") return out_driver.CreateCopy(path, dataset, True, args), out_driver
def _generate_browse_complex(parsed_exprs, fields_and_coverages, width, height, bbox, crs, generator): o_x = bbox[0] o_y = bbox[3] res_x = (bbox[2] - bbox[0]) / width res_y = -(bbox[3] - bbox[1]) / height tiff_driver = gdal.GetDriverByName('GTiff') field_names = set() for parsed_expression in parsed_exprs: field_names |= set(extract_fields(parsed_expression)) fields_and_datasets = {} for field_name in field_names: coverages = fields_and_coverages[field_name] field_data = warp_fields(coverages, field_name, bbox, crs, width, height) fields_and_datasets[field_name] = field_data out_filename = generator.generate('tif') tiff_driver = gdal.GetDriverByName('GTiff') out_ds = tiff_driver.Create(out_filename, width, height, len(parsed_exprs), gdal.GDT_Float32, options=["TILED=YES", "COMPRESS=PACKBITS"]) out_ds.SetGeoTransform([o_x, res_x, 0, o_y, 0, res_y]) out_ds.SetProjection(osr.SpatialReference(crs).wkt) for band_index, parsed_expr in enumerate(parsed_exprs, start=1): with np.errstate(divide='ignore', invalid='ignore'): out_data = _evaluate_expression(parsed_expr, fields_and_datasets, generator) if isinstance(out_data, (int, float)): out_data = np.full((height, width), out_data) out_band = out_ds.GetRasterBand(band_index) out_band.WriteArray(out_data) return BrowseCreationInfo(out_filename, None)
def setUp(self): # TODO check if connection to DB server is possible # TODO check if datasets are configured within the DB gdal.AllRegister() if gdal.GetDriverByName("RASDAMAN") is None: self.skipTest("Rasdaman driver is not enabled.") if not self.isRequestConfigEnabled("rasdaman_enabled"): self.skipTest("Rasdaman tests are not enabled. Use the " "configuration option 'rasdaman_enabled' to allow " "rasdaman tests.") super(RasdamanTestCaseMixIn, self).setUp()
def __init__(self, filename, size_x, size_y, geotransform, band_num, data_type, projection, driver=None, creation_options=None): driver = gdal.GetDriverByName(driver or "GTiff") self.dataset = driver.Create(filename, size_x, size_y, band_num, data_type, creation_options or []) self.dataset.SetGeoTransform(geotransform) self.dataset.SetProjection(projection)
def create_temp(sizex, sizey, numbands, datatype=gdal.GDT_Byte, options=None, temp_root=None): """ Create a temporary Dataset. """ temp_root = temp_root if temp_root is not None else tempfile.gettempdir() if options is None: options = [] tiff_drv = gdal.GetDriverByName('GTiff') filename = join(temp_root, '%s.tif' % uuid4().hex) logger.debug("Creating temporary dataset '%s' (%dx%dx%d)" % (filename, sizex, sizey, numbands)) return tiff_drv.Create(filename, sizex, sizey, numbands, datatype, options)
def warp_fields(coverages, field_name, bbox, crs, width, height): driver = gdal.GetDriverByName('MEM') out_ds = driver.Create( '', width, height, 1, coverages[0].range_type.get_field(field_name).data_type) out_ds.SetGeoTransform([ bbox[0], (bbox[2] - bbox[0]) / width, 0, bbox[3], 0, -(bbox[3] - bbox[1]) / height, ]) epsg = crss.parseEPSGCode(crs, [crss.fromShortCode]) sr = osr.SpatialReference() sr.ImportFromEPSG(epsg) out_ds.SetProjection(sr.ExportToWkt()) for coverage in coverages: location = coverage.get_location_for_field(field_name) band_index = coverage.get_band_index_for_field(field_name) orig_ds = gdal.open_with_env(location.path, location.env) vrt_filename = None if orig_ds.RasterCount > 1: vrt_filename = '/vsimem/' + uuid4().hex gdal.BuildVRT(vrt_filename, orig_ds, bandList=[band_index]) ds = gdal.Open(vrt_filename) else: ds = orig_ds gdal.Warp(out_ds, ds) ds = None if vrt_filename: gdal.Unlink(vrt_filename) band = out_ds.GetRasterBand(1) return band.ReadAsArray()
def with_extent(filename, extent, save=None): """ Create a VRT and override the underlying files geolocation """ src_ds = gdal.OpenShared(filename) width, height = src_ds.RasterXSize, src_ds.RasterYSize driver = gdal.GetDriverByName('VRT') out_ds = driver.CreateCopy(save, src_ds) x = extent[0] y = extent[3] resx = abs(extent[2] - extent[0]) / width resy = abs(extent[3] - extent[1]) / height out_ds.SetGeoTransform([ x, resx, 0, y, 0, resy, ]) return out_ds
def create_mem_ds(width, height, data_type): driver = gdal.GetDriverByName('MEM') return driver.Create('', width, height, 1, data_type)
def process(self, input_filename, output_filename, geo_reference=None, generate_metadata=True): # open the dataset and create an In-Memory Dataset as copy # to perform optimizations ds = create_mem_copy(gdal.Open(input_filename)) gt = ds.GetGeoTransform() footprint_wkt = None if not geo_reference: if gt == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0): # TODO: maybe use a better check raise ValueError("No geospatial reference for unreferenced " "dataset given.") else: logger.debug("Applying geo reference '%s'." % type(geo_reference).__name__) ds, footprint_wkt = geo_reference.apply(ds) # apply optimizations for optimization in self.get_optimizations(ds): logger.debug("Applying optimization '%s'." % type(optimization).__name__) try: new_ds = optimization(ds) if new_ds is not ds: # cleanup afterwards cleanup_temp(ds) ds = new_ds except: cleanup_temp(ds) raise # generate the footprint from the dataset if not footprint_wkt: logger.debug("Generating footprint.") footprint_wkt = self._generate_footprint_wkt(ds) # check that footprint is inside of extent of generated image # regenerate otherwise else: tmp_extent = getExtentFromRectifiedDS(ds) tmp_bbox = Polygon.from_bbox((tmp_extent[0], tmp_extent[1], tmp_extent[2], tmp_extent[3])) tmp_footprint = GEOSGeometry(footprint_wkt) if not tmp_bbox.contains(tmp_footprint): footprint_wkt = tmp_footprint.intersection(tmp_bbox).wkt if self.footprint_alpha: logger.debug("Applying optimization 'AlphaBandOptimization'.") opt = AlphaBandOptimization() opt(ds, footprint_wkt) output_filename = self.generate_filename(output_filename) logger.debug("Writing file to disc using options: %s." % ", ".join(self.format_selection.creation_options)) logger.debug("Metadata tags to be written: %s" % ", ".join(ds.GetMetadata_List("") or [])) # save the file to the disc driver = gdal.GetDriverByName(self.format_selection.driver_name) ds = driver.CreateCopy(output_filename, ds, options=self.format_selection.creation_options) for optimization in self.get_post_optimizations(ds): logger.debug("Applying post-optimization '%s'." % type(optimization).__name__) optimization(ds) # generate metadata if requested footprint = None if generate_metadata: normalized_space = Polygon.from_bbox((-180, -90, 180, 90)) non_normalized_space = Polygon.from_bbox((180, -90, 360, 90)) footprint = GEOSGeometry(footprint_wkt) #.intersection(normalized_space) outer = non_normalized_space.intersection(footprint) if len(outer): footprint = MultiPolygon( *map(lambda p: Polygon(*map(lambda ls: LinearRing(*map(lambda point: (point[0] - 360, point[1]), ls.coords )), tuple(p) )), (outer,) ) ).union(normalized_space.intersection(footprint)) else: if isinstance(footprint, Polygon): footprint = MultiPolygon(footprint) logger.info("Calculated Footprint: '%s'" % footprint.wkt) # use the provided footprint #geom = OGRGeometry(footprint_wkt) #exterior = [] #for x, y in geom.exterior_ring.tuple: # exterior.append(y); exterior.append(x) #polygon = [exterior] num_bands = ds.RasterCount # finally close the dataset and write it to the disc ds = None return PreProcessResult(output_filename, footprint, num_bands)
def get_vrt_driver(): """ Convenience function to get the VRT driver. """ return gdal.GetDriverByName("VRT")
class NGEOPreProcessor(WMSPreProcessor): def __init__(self, format_selection, overviews=True, crs=None, bands=None, bandmode=RGB, footprint_alpha=False, color_index=False, palette_file=None, no_data_value=None, overview_resampling=None, overview_levels=None, overview_minsize=None, radiometric_interval_min=None, radiometric_interval_max=None, sieve_max_threshold=None, simplification_factor=None, temporary_directory=None): self.format_selection = format_selection self.overviews = overviews self.overview_resampling = overview_resampling self.overview_levels = overview_levels self.overview_minsize = overview_minsize self.crs = crs self.bands = bands self.bandmode = bandmode self.footprint_alpha = footprint_alpha self.color_index = color_index self.palette_file = palette_file self.no_data_value = no_data_value self.radiometric_interval_min = radiometric_interval_min self.radiometric_interval_max = radiometric_interval_max if sieve_max_threshold is not None: self.sieve_max_threshold = sieve_max_threshold else: self.sieve_max_threshold = 0 if simplification_factor is not None: self.simplification_factor = simplification_factor else: # default 2 * resolution == 2 pixels self.simplification_factor = 2 self.temporary_directory = temporary_directory def process(self, input_filename, output_filename, geo_reference=None, generate_metadata=True, merge_with=None, original_footprint=None): # open the dataset and create an In-Memory Dataset as copy # to perform optimizations ds = create_mem_copy(gdal.Open(input_filename)) gt = ds.GetGeoTransform() footprint_wkt = None if not geo_reference: if gt == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0): if ds.GetGCPCount() > 0: geo_reference = InternalGCPs() else: raise ValueError("No geospatial reference for " "unreferenced dataset given.") if geo_reference: logger.debug("Applying geo reference '%s'." % type(geo_reference).__name__) # footprint is always in EPSG:4326 ds, footprint_wkt = geo_reference.apply(ds) # apply optimizations for optimization in self.get_optimizations(ds): logger.debug("Applying optimization '%s'." % type(optimization).__name__) try: new_ds = optimization(ds) if new_ds is not ds: # cleanup afterwards cleanup_temp(ds) ds = new_ds except: cleanup_temp(ds) raise # generate the footprint from the dataset if not footprint_wkt: logger.debug("Generating footprint.") footprint_wkt = self._generate_footprint_wkt(ds) # check that footprint is inside of extent of generated image # regenerate otherwise else: tmp_extent = getExtentFromRectifiedDS(ds) tmp_bbox = Polygon.from_bbox( (tmp_extent[0], tmp_extent[1], tmp_extent[2], tmp_extent[3])) # transform image bbox to EPSG:4326 if necessary proj = ds.GetProjection() srs = osr.SpatialReference() try: srs.ImportFromWkt(proj) srs.AutoIdentifyEPSG() ptype = "PROJCS" if srs.IsProjected() else "GEOGCS" srid = int(srs.GetAuthorityCode(ptype)) if srid != '4326': out_srs = osr.SpatialReference() out_srs.ImportFromEPSG(4326) transform = osr.CoordinateTransformation(srs, out_srs) tmp_bbox2 = ogr.CreateGeometryFromWkt(tmp_bbox.wkt) tmp_bbox2.Transform(transform) tmp_bbox = GEOSGeometry(tmp_bbox2.ExportToWkt()) except (RuntimeError, TypeError), e: logger.warn("Projection: %s" % proj) logger.warn("Failed to identify projection's EPSG code." "%s: %s" % (type(e).__name__, str(e))) tmp_footprint = GEOSGeometry(footprint_wkt) if not tmp_bbox.contains(tmp_footprint): logger.debug("Re-generating footprint because not inside of " "generated image.") footprint_wkt = tmp_footprint.intersection(tmp_bbox).wkt if self.footprint_alpha: logger.debug("Applying optimization 'AlphaBandOptimization'.") opt = AlphaBandOptimization() opt(ds, footprint_wkt) output_filename = self.generate_filename(output_filename) if merge_with is not None: if original_footprint is None: raise ValueError( "Original footprint with to be merged image required.") original_ds = gdal.Open(merge_with, gdal.GA_Update) merger = GDALDatasetMerger([ GDALGeometryMaskMergeSource( original_ds, original_footprint, temporary_directory=self.temporary_directory), GDALGeometryMaskMergeSource( ds, footprint_wkt, temporary_directory=self.temporary_directory) ]) final_ds = merger.merge(output_filename, self.format_selection.driver_name, self.format_selection.creation_options) # cleanup previous file driver = original_ds.GetDriver() original_ds = None driver.Delete(merge_with) cleanup_temp(ds) else: logger.debug("Writing single file '%s' using options: %s." % (output_filename, ", ".join( self.format_selection.creation_options))) logger.debug("Metadata tags to be written: %s" % ", ".join(ds.GetMetadata_List("") or [])) # save the file to the disc driver = gdal.GetDriverByName(self.format_selection.driver_name) final_ds = driver.CreateCopy( output_filename, ds, options=self.format_selection.creation_options) # cleanup cleanup_temp(ds) for optimization in self.get_post_optimizations(final_ds): logger.debug("Applying post-optimization '%s'." % type(optimization).__name__) optimization(final_ds) # generate metadata if requested footprint = None if generate_metadata: normalized_space = Polygon.from_bbox((-180, -90, 180, 90)) non_normalized_space = Polygon.from_bbox((180, -90, 360, 90)) footprint = GEOSGeometry(footprint_wkt) outer = non_normalized_space.intersection(footprint) if len(outer): footprint = MultiPolygon(*map( lambda p: Polygon(*map( lambda ls: LinearRing(*map( lambda point: (point[0] - 360, point[1]), ls.coords )), tuple(p))), (outer, ))).union( normalized_space.intersection(footprint)) else: if isinstance(footprint, Polygon): footprint = MultiPolygon(footprint) if original_footprint: logger.debug("Merging footprint.") footprint = footprint.union(GEOSGeometry(original_footprint)) logger.debug("Calculated Footprint: '%s'" % footprint.wkt) num_bands = final_ds.RasterCount # finally close the dataset and write it to the disc final_ds = None return PreProcessResult(output_filename, footprint, num_bands)
def create_simple_vrt(ds, vrt_filename): vrt_dr = gdal.GetDriverByName("VRT") return vrt_dr.CreateCopy(vrt_filename, ds)
def create_mem_copy(ds, *args, **kwargs): """ Create a new In-Memory Dataset as copy from an existing dataset. """ mem_drv = gdal.GetDriverByName('MEM') return mem_drv.CreateCopy('', ds, *args, **kwargs)