def save(self, chunk): """ """ # read and convert datatype with chunk.as_dataset() as dataset: band = dataset.GetRasterBand(1) active = band.GetMaskBand().ReadAsArray()[np.newaxis] array = band.ReadAsArray().astype(self.dtype)[np.newaxis] # determine inside pixels inside = np.zeros_like(active) kwargs = { 'geo_transform': dataset.GetGeoTransform(), 'projection': dataset.GetProjection(), } with datasources.Layer(self.geometry) as layer: with datasets.Dataset(inside, **kwargs) as dataset: gdal.RasterizeLayer(dataset, [1], layer, burn_values=[255]) # mask outide or inactive array[~np.logical_and(active, inside)] = self.no_data_value # write to target dataset kwargs.update(no_data_value=self.no_data_value) with datasets.Dataset(array, **kwargs) as dataset: data = dataset.ReadRaster(0, 0, chunk.width, chunk.height) args = chunk.origin + (chunk.width, chunk.height, data) self.dataset.WriteRaster(*args)
def rasterize(feature, source_dir, target_dir): """ Rasterize streamline shape for a single tile into raster. """ geo_transform = get_geo_transform(feature.geometry()) name = feature[str('name')] partial_path = os.path.join(name[:3], name) # target path target_path = os.path.join(target_dir, partial_path) + '.tif' if os.path.exists(target_path): return # create directory try: os.makedirs(os.path.dirname(target_path)) except OSError: pass # no problem # open source source_path = os.path.join(source_dir, partial_path) data_source = ogr.Open(source_path) layer = data_source[0] # create target array kwargs = {'no_data_value': 0, 'geo_transform': geo_transform, 'array': np.zeros((1, 12500, 10000), 'u1'), 'projection': osr.GetUserInputAsWKT(str('epsg:28992'))} with datasets.Dataset(**kwargs) as dataset: for value, attribute in enumerate([2, 3, 4, 4.7], 2): layer.SetAttributeFilter(str('class={}').format(attribute)) gdal.RasterizeLayer(dataset, [1], layer, burn_values=[value]) GTIF.CreateCopy(target_path, dataset, options=OPTIONS)
def calculate(self, feature): # target path leaf_number = feature[b'BLADNR'] path = os.path.join(self.output_path, leaf_number[:3], '{}.tif'.format(leaf_number)) if os.path.exists(path): logger.debug('Target already exists.') return # calculate geometry = feature.geometry() indices = self.group.geo_transform.get_indices(geometry) array = self.group.read(indices) resolution = self.group.geo_transform[1], self.group.geo_transform[5] # hillshade = zevenbergen_thorne(array=array, resolution=resolution) hillshade = other(array=array, resolution=resolution) # create directory try: os.makedirs(os.path.dirname(path)) except OSError: pass # no problem # save kwargs = { 'projection': self.group.projection, 'geo_transform': self.group.geo_transform.shifted(geometry), } options = [ 'tiled=yes', 'compress=deflate', ] with datasets.Dataset(hillshade[np.newaxis, ...], **kwargs) as dataset: driver.CreateCopy(path, dataset, options=options)
def read(self, geometry): """ Return 2-tuple (array, index) The 'array' array is already a linear array of elements within geometry. The The 'data' array is a boolean array indicating wether the corresponding element from 'array' contains data, i.e., whose value does not correspond to the no_data_value. """ # determine kwargs to use with GDAL datasets kwargs = { 'geo_transform': self.geo_transform.shifted(geometry, inflate=True) } kwargs.update(self.kwargs) # read the data for array array_2d = self.group.read(geometry, inflate=True) # prepare a mask to select elements that are within geometry select_2d = np.zeros(array_2d.shape, dtype='u1') with datasources.Layer(geometry) as layer: with datasets.Dataset(select_2d[np.newaxis], **kwargs) as dataset: gdal.RasterizeLayer(dataset, [1], layer, burn_values=[1]) # select those elements array_1d = array_2d[select_2d.astype('b1')] # determine data or no data if array_1d.dtype.kind == 'f': select_1d = ~np.isclose(array_1d, self.no_data_value) else: select_1d = ~np.equal(array_1d, self.no_data_value) return {'array': array_1d[select_1d], 'size': array_1d.size}
def retile(self, feature): """ Retile to feature. """ # target path name = feature[str('name')] path = os.path.join(self.target_path, name[:3], '{}.tif'.format(name)) if os.path.exists(path): return # retile geometry = feature.geometry() geo_transform = self.geo_transform.shifted(geometry) try: values = self.group.read(geometry) except TypeError: return if (values == self.no_data_value).all(): return # create directory try: os.makedirs(os.path.dirname(path)) except OSError: pass # no problem # save kwargs = {'projection': self.projection, 'geo_transform': geo_transform, 'no_data_value': self.no_data_value.item()} options = ['tiled=yes', 'compress=deflate'] with datasets.Dataset(values[np.newaxis, ...], **kwargs) as dataset: driver.CreateCopy(path, dataset, options=options)
def convert(archive, name): points = read(archive=archive, name=name) kwargs = rasterize(points) path = '{}.tif'.format(os.path.splitext(os.path.basename(name))[0]) logger.debug('Saving to "{}".'.format(path)) with datasets.Dataset(**kwargs) as dataset: DRIVER.CreateCopy(path, dataset, options=['compress=deflate'])
def save(self, fill_zeros): """ Cut out and save block. param fill_zeros: Put zeros for no data within geometry. """ outputs = self.operation.calculate(self.inputs) for name in outputs: # assign to names output = outputs[name] array = output['values'][np.newaxis] active = output['active'][np.newaxis] projection = output['projection'] no_data_value = output['no_data_value'] geo_transform = output['geo_transform'] # determine inside pixels inside = np.zeros_like(active) kwargs = { 'projection': projection, 'geo_transform': geo_transform, } with datasources.Layer(self.geometry) as layer: with datasets.Dataset(inside, **kwargs) as dataset: gdal.RasterizeLayer(dataset, [1], layer, burn_values=[255]) if fill_zeros: # set inactive pixels to zero, but outside pixels to no data array[np.logical_not(active)] = 0 array[np.logical_not(inside)] = no_data_value else: # mask outside or inactive array[~np.logical_and(active, inside)] = no_data_value # write to target dataset tile = self.tile kwargs.update(no_data_value=no_data_value) with datasets.Dataset(array, **kwargs) as dataset: data = dataset.ReadRaster(0, 0, tile.width, tile.height) args = tile.origin + (tile.width, tile.height, data) self.datasets[name].WriteRaster(*args) # uppdate resume file with open(self.rpath, 'w') as resume_file: resume_file.write(str(self.tile.serial + 1))
def write(self): try: os.makedirs(dirname(self.path)) except OSError: pass options = ['compress=deflate', 'tiled=yes'] with datasets.Dataset(self.data, **self.kwargs) as dataset: DRIVER_GDAL_GTIFF.CreateCopy(self.path, dataset, options=options)
def save(self): """ Save. """ # prepare dirs os.makedirs(dirname(self.target_path), exist_ok=True) # write tiff array = self.target[np.newaxis] with datasets.Dataset(array, **self.kwargs) as dataset: DRIVER.CreateCopy(self.target_path, dataset, options=OPTIONS)
def fill(self, feature): # target path name = feature[str('name')] path = os.path.join(self.output_path, name[:3], '{}.tif'.format(name)) if os.path.exists(path): return # create directory try: os.makedirs(os.path.dirname(path)) except OSError: pass # no problem # geometries inner_geometry = feature.geometry() outer_geometry = inner_geometry.Buffer(50) # geo transforms inner_geo_transform = self.geo_transform.shifted(inner_geometry) outer_geo_transform = self.geo_transform.shifted(outer_geometry) # data values = self.raster_group.read(outer_geometry) cover = self.cover_group.read(outer_geometry) # create mask where cover refers to water mask = np.zeros_like(cover, dtype='b1') mask.ravel()[:] = np.in1d(cover, (50, 51, 52, 156, 254)) # set buildings to maximum dem before directions building = np.logical_and(cover > 1, cover < 15) maximum = np.finfo(values.dtype).max original = values[building] values[building] = maximum # processing fill_simple_depressions(values) fill_complex_depressions(values=values, mask=mask) # put buildings back in place values[building] = original # cut out slices = outer_geo_transform.get_slices(inner_geometry) values = values[slices][np.newaxis] # save options = ['compress=deflate', 'tiled=yes'] kwargs = { 'projection': self.projection, 'geo_transform': inner_geo_transform, 'no_data_value': self.no_data_value.item() } with datasets.Dataset(values, **kwargs) as dataset: GTIF.CreateCopy(path, dataset, options=options)
def calculate(self, feature): # target path name = feature[str('name')] path = os.path.join(self.output_path, name[:3], '{}.tif'.format(name)) if os.path.exists(path): return # create directory try: os.makedirs(os.path.dirname(path)) except OSError: pass # no problem # geometries inner_geometry = feature.geometry() outer_geometry = inner_geometry.Buffer(50) # geo transforms inner_geo_transform = self.geo_transform.shifted(inner_geometry) outer_geo_transform = self.geo_transform.shifted(outer_geometry) values = self.raster_group.read(outer_geometry) cover = self.cover_group.read(outer_geometry) # set buildings to maximum dem before calculating directions maximum = np.finfo(values.dtype).max building = np.logical_and(cover > 1, cover < 15) values[building] = maximum # processing direction = calculate_flow_direction(values) # make water undefined water = np.zeros_like(cover, dtype='b1') water.ravel()[:] = np.in1d(cover, (50, 51, 52, 156, 254)) direction[water] = 0 # make buildings undefined direction[building] = 0 # cut out slices = outer_geo_transform.get_slices(inner_geometry) direction = direction[slices][np.newaxis] # saving options = ['compress=deflate', 'tiled=yes'] kwargs = { 'no_data_value': 0, 'projection': self.projection, 'geo_transform': inner_geo_transform } with datasets.Dataset(direction, **kwargs) as dataset: GTIF.CreateCopy(path, dataset, options=options)
def txt2tif(source_path): root, ext = os.path.splitext(source_path) points = np.loadtxt(source_path) kwargs = rasterize(points) array = kwargs.pop('array') for statistic in 'min', 'max': func = getattr(np.ma, statistic) kwargs['array'] = func(array, 0).filled(NO_DATA_VALUE)[np.newaxis] target_path = root + '_' + statistic + '.tif' with datasets.Dataset(**kwargs) as dataset: DRIVER.CreateCopy(target_path, dataset, options=OPTIONS)
def aggregate(self, index_feature): # target path name = index_feature[str('name')] path = os.path.join(self.output_path, name[:2], '{}.tif'.format(name)) if os.path.exists(path): return # create directory try: os.makedirs(os.path.dirname(path)) except OSError: pass # no problem geometry = index_feature.geometry() factor = 2**self.iterations geo_transform = self.geo_transform.shifted(geometry).scaled(factor) # data values = self.raster_group.read(geometry) no_data_value = self.no_data_value # set errors to no data index = np.logical_and( values != no_data_value, np.logical_or(values < -1000, values > 1000), ) values[index] == no_data_value if np.equal(values, no_data_value).all(): return # aggregate repeatedly kwargs = { 'func': 'mean', 'values': values, 'no_data_value': no_data_value } for _ in range(self.iterations): kwargs = utils.aggregate(**kwargs) # save values = kwargs['values'][np.newaxis] options = ['compress=deflate', 'tiled=yes'] kwargs = { 'projection': self.projection, 'geo_transform': geo_transform, 'no_data_value': no_data_value.item() } with datasets.Dataset(values, **kwargs) as dataset: GTIF.CreateCopy(path, dataset, options=options)
def shadow(self, feature): # target path leaf_number = feature[b'BLADNR'] path = os.path.join(self.output_path, leaf_number[:3], '{}.tif'.format(leaf_number)) if os.path.exists(path): logger.debug('Target already exists.') return # prepare geometry = feature.geometry() size, bounds = self.get_size_and_bounds(geometry) array = self.group.read(bounds) # maximum filter makes shadows a little wider footprint = ndimage.generate_binary_structure(2, 1) array = ndimage.maximum_filter(array, footprint=footprint) view1 = self.get_view(array=array, size=size) target = np.zeros_like(view1, dtype='b1') # calculate shadow for iteration in itertools.count(1): view2 = self.get_view(array=array, size=size, iteration=iteration) index = np.logical_and(~target, view2 > view1) if not index.any(): break if iteration * self.dz > self.mz: break target[index] = True target = target.astype('u1') + 255 # True: 0, False: 255 # create directory try: os.makedirs(os.path.dirname(path)) except OSError: pass # no problem kwargs = { 'no_data_value': 255, 'projection': self.group.projection, 'geo_transform': self.group.geo_transform.shifted(geometry), } options = [ 'tiled=yes', 'compress=deflate', ] with datasets.Dataset(target[np.newaxis], **kwargs) as dataset: driver.CreateCopy(path, dataset, options=options)
def determine_floor_level(self, feature): """ Return boolean if a floor level was computed and assigned. Add assign a computed floor level to the supplied feature. :param feature: feature with floor column. """ # determine geometry and 1m buffer geometry = feature.geometry() # skip too large geometries xmin, xmax, ymin, ymax = geometry.GetEnvelope() if max(ymax - ymin, xmax - xmin) > 1000: return try: buffer_geometry = geometry.Buffer(1).Difference(geometry) except RuntimeError: # garbage geometry return # read raster data for the extent of the buffer geometry geo_transform = self.geo_transform.shifted(buffer_geometry) data = self.raster_group.read(buffer_geometry) if (data == self.no_data_value).all(): return data.shape = (1,) + data.shape # rasterize the buffer geometry into a raster mask mask = np.zeros(data.shape, 'u1') dataset_kwargs = {'geo_transform': geo_transform} dataset_kwargs.update(self.kwargs) with datasources.Layer(buffer_geometry) as layer: with datasets.Dataset(mask, **dataset_kwargs) as dataset: gdal.RasterizeLayer(dataset, [1], layer, burn_values=[1]) # rasterize the percentile try: floor = np.percentile(data[mask.nonzero()], 75) if self.floor: floor += self.floor return floor except IndexError: # no data points at all return
def single(self, feature, target): """ :param feature: vector feature :param target: raster file to write to """ # determine geometry and 1m buffer geometry = feature.geometry() try: geometry_buffer = geometry.Buffer(1).Difference(geometry) except RuntimeError: # garbage geometry return False # read raster data geo_transform = self.geo_transform.shifted(geometry_buffer) data = self.raster_group.read(geometry_buffer) if (data == self.no_data_value).all(): return False data.shape = (1, ) + data.shape # create ogr data sources with geometry and buffer data_source = self.get_ogr_data_source(geometry) data_source_buffer = self.get_ogr_data_source(geometry_buffer) # determine mask mask = np.zeros(data.shape, 'u1') dataset_kwargs = {'geo_transform': geo_transform} dataset_kwargs.update(self.kwargs) with datasets.Dataset(mask, **dataset_kwargs) as dataset: gdal.RasterizeLayer(dataset, [1], data_source_buffer[0], burn_values=[1]) # rasterize the percentile try: burn = np.percentile(data[mask.nonzero()], 75) if self.floor: burn += self.floor except IndexError: # no data points at all return False gdal.RasterizeLayer(target, [1], data_source[0], burn_values=[burn]) return True
def clip(self, path): """ Clip using OGR source at path. Clip actually puts zeros in the source outside the clip layer, so that they are excluded from the fill process. """ # create mask with ones mask = np.ones_like(self.source, dtype='b1') # rasterize data_source as zeros data_source = ogr.Open(path) array = mask[np.newaxis].view('u1') with datasets.Dataset(array, **self.kwargs) as dataset: for layer in data_source: gdal.RasterizeLayer(dataset, [1], layer, burn_values=[0]) # fill source with zeros where mask contains ones self.source[mask] = 0
def setUpClass(cls): # create source data dtype = 'f4' fillvalue = np.finfo(dtype).max.item() shape = 7, 7 sample = sum(np.indices(shape)).astype('f4') cls.void = ndimage.binary_erosion(np.ones(shape, dtype='b1')) source = np.where(cls.void, fillvalue, sample) # create an edge edge = cls.void ^ ndimage.binary_dilation(cls.void) indices = edge.nonzero() cls.edge = edges.Edge( indices=indices, values=source[indices], shape=source.shape, ) # save a raster in memory driver = gdal.GetDriverByName('Gtiff') cls.raster = '/vsimem/raster.tif' array = source[np.newaxis] projection = osr.GetUserInputAsWKT('EPSG:28992') x1, y2 = 200000, 400007 kwargs = { 'geo_transform': (x1, 1, 0, y2, 0, -1), 'no_data_value': fillvalue, 'projection': projection, } with datasets.Dataset(array, **kwargs) as dataset: driver.CreateCopy(cls.raster, dataset) # save a shape in memory driver = ogr.GetDriverByName('ESRI Shapefile') cls.vector = '/vsimem/vector.shp' data_source = driver.CreateDataSource(cls.vector) y1, x2 = 400000, 200007 wkt = POLYGON.format(x1=x1, y1=y1, x2=x2, y2=y2) sr = osr.SpatialReference(projection) geometry = ogr.CreateGeometryFromWkt(wkt, sr) with datasources.Layer(geometry) as layer: data_source.CopyLayer(layer, '')
def create_filled_rasterarray(array, burn_polygon, raster_path, rasterdata, print_rast): kwargs = { 'projection': rasterdata['projection'], 'geo_transform': rasterdata['geo_transform'] } if 'no_data_value' in rasterdata: kwargs['no_data_value'] = rasterdata['no_data_value'] with datasets.Dataset(array, **kwargs) as source: # set pixels outside geometry to 'no data' burn_value(dataset=source, geometry=burn_polygon, value=rasterdata['fillvalue']) # execute raster for debugging purposes if print_rast == 1: DRIVER_GDAL_GTIFF.CreateCopy(raster_path, source) return array
def accumulate(self, feature): # target path name = feature[str('name')] path = os.path.join(self.output_path, name[:3], '{}.tif'.format(name)) if os.path.exists(path): return # create directory try: os.makedirs(os.path.dirname(path)) except OSError: pass # no problem # geometries inner_geometry = feature.geometry() outer_geometry = inner_geometry.Buffer(50) # geo transforms inner_geo_transform = self.geo_transform.shifted(inner_geometry) outer_geo_transform = self.geo_transform.shifted(outer_geometry) # data direction = self.raster_group.read(outer_geometry) # processing accu = accumulate(direction) # cut out and convert slices = outer_geo_transform.get_slices(inner_geometry) acculog = np.log10(accu[slices][np.newaxis] + 1).astype('f4') # save options = ['compress=deflate', 'tiled=yes'] kwargs = { 'projection': self.projection, 'geo_transform': inner_geo_transform, 'no_data_value': np.finfo(acculog.dtype).min.item() } with datasets.Dataset(acculog, **kwargs) as dataset: GTIF.CreateCopy(path, dataset, options=options)
def clip(kwargs, geometry): """ Clip kwargs in place. """ # do not touch original kwargs kwargs = kwargs.copy() array = kwargs.pop('array') mask = np.ones_like(array, 'u1') # create an ogr datasource source = MEM_DRIVER.CreateDataSource('') layer = source.CreateLayer(str(''), SR) defn = layer.GetLayerDefn() feature = ogr.Feature(defn) feature.SetGeometry(geometry) layer.CreateFeature(feature) # clip with datasets.Dataset(mask, **kwargs) as dataset: gdal.RasterizeLayer(dataset, [1], layer, burn_values=[0]) # alter array with result array[mask.astype('b1')] = NO_DATA_VALUE
def burn(self, items): # source and layer data_source = DRIVER_OGR_MEMORY.CreateDataSource('') spatial_ref = osr.SpatialReference(PROJECTION) # layer definition value_name = 'value' layer = data_source.CreateLayer(str(''), spatial_ref) layer.CreateField(ogr.FieldDefn(str(value_name), ogr.OFTInteger)) layer_defn = layer.GetLayerDefn() # data insertion for geometry, value in items: feature = ogr.Feature(layer_defn) feature[str(value_name)] = value feature.SetGeometry(ogr.CreateGeometryFromWkb(bytes(geometry))) layer.CreateFeature(feature) options = ['ATTRIBUTE={}'.format(value_name)] with datasets.Dataset(self.data, **self.kwargs) as dataset: gdal.RasterizeLayer(dataset, [1], layer, options=options) self.burned = True
def get_mask(self, geometry, shape): # create an ogr datasource driver = ogr.GetDriverByName(str('Memory')) source = driver.CreateDataSource(str('')) sr = osr.SpatialReference(self.projection) layer = source.CreateLayer(str(''), sr) defn = layer.GetLayerDefn() feature = ogr.Feature(defn) feature.SetGeometry(geometry) layer.CreateFeature(feature) # burn where data should be mask = np.zeros(shape, dtype='u1') geo_transform = self.geo_transform.shifted(geometry) kwargs = { 'geo_transform': geo_transform, 'projection': self.projection } with datasets.Dataset(mask, **kwargs) as dataset: gdal.RasterizeLayer(dataset, (1, ), layer, burn_values=(1, )) return mask.astype('b1').repeat(3, axis=0)
def roof(index_path, point_path, source_path, target_path): fetcher = Fetcher(index_path=index_path, point_path=point_path) data_source = ogr.Open(source_path) layer = data_source[0] try: os.mkdir(target_path) except OSError: pass for char, feature in zip(string.ascii_letters, layer): # if char not in 'mn': # continue geometry = feature.geometry() points = fetcher.fetch(geometry) # classify classes = classify(points) a, b = 0, 255 colors = np.array([[b, a, a], [a, b, a], [a, a, b], [b, a, b]], 'u1')[classes] # save classified cloud text = '\n'.join(parse(points, colors)) laz_path = os.path.join(target_path, char + '.laz') template = 'las2las -stdin -itxt -iparse xyzRGB -o {}' command = template.format(laz_path) process = subprocess.Popen(shlex.split(command), stdin=subprocess.PIPE) process.communicate(text) # save tif tif_path = os.path.join(target_path, char + '.tif') points = points[classes.astype('b1')] kwargs = rasterize(points=points, geometry=geometry) with datasets.Dataset(**kwargs) as dataset: TIF_DRIVER.CreateCopy(tif_path, dataset, options=OPTIONS) print(char, len(points))
def fix_nodata(source_paths): for source_path in source_paths: # analyze source source = gdal.Open(source_path) array = source.ReadAsArray()[np.newaxis, ...] index = np.where(array == -32767) no_data_value = source.GetRasterBand(1).GetNoDataValue() if no_data_value == 0 and index[0].size == 0: logger.debug('Skip {}'.format(source_path)) continue # save modified tif logger.debug('Convert {}'.format(source_path)) array[index] = 0 kwargs = { 'no_data_value': 0, 'projection': source.GetProjection(), 'geo_transform': source.GetGeoTransform() } target_path = '{}.target'.format(source_path) driver = source.GetDriver() with datasets.Dataset(array, **kwargs) as target: target.SetMetadata(source.GetMetadata_List()) target.GetRasterBand(1).SetUnitType( source.GetRasterBand(1).GetUnitType(), ) driver.CreateCopy(target_path, target, options=['compress=deflate']) # swap files source = None backup_path = '{}.org'.format(source_path) os.rename(source_path, backup_path) os.rename(target_path, source_path)
def __init__(self, dataset, geometry): """ Rasterize geometry into target dataset extent to find relevant blocks. """ w, h = dataset.GetRasterBand(1).GetBlockSize() geo_transform = utils.GeoTransform(dataset.GetGeoTransform()) # create an array in which each cell represents a dataset block shape = ( (dataset.RasterYSize - 1) // h + 1, (dataset.RasterXSize - 1) // w + 1, ) index = np.zeros(shape, dtype='u1') kwargs = { 'geo_transform': geo_transform.scaled(w, h), 'projection': dataset.GetProjection(), } # find active blocks by rasterizing geometry options = ['all_touched=true'] with datasources.Layer(geometry) as layer: with datasets.Dataset(index[np.newaxis], **kwargs) as ds_idx: gdal.RasterizeLayer( ds_idx, [1], layer, burn_values=[1], options=options, ) # store as attributes self.block_size = w, h self.dataset_size = dataset.RasterXSize, dataset.RasterYSize self.geo_transform = geo_transform self.indices = index.nonzero()
def fill(self, feature): """ Call gdal interpolation function """ # prepare target path name = feature[str('bladnr')] path = os.path.join(self.output_path, name[:3], '{}.tif'.format(name)) if os.path.exists(path): return # create directory try: os.makedirs(os.path.dirname(path)) except OSError: pass # no problem # check for data geometry = feature.geometry() values = self.raster_group.read(geometry) if (values == self.no_data_value).all(): return kwargs = { 'projection': self.projection, 'geo_transform': self.geo_transform.shifted(geometry), 'no_data_value': self.no_data_value, } # gdal is going to use the current dir as temporary space curdir = os.getcwd() tmpdir = tempfile.mkdtemp(dir='/dev/shm') os.chdir(tmpdir) # fill no data until no voids remain iterations = 0 original_values = values.copy() # for diffing while self.no_data_value in values: # create a mask band # mask_array = (values != self.no_data_value).view('u1') # mask = datasets.create(mask_array[np.newaxis]) # mask_band = mask.GetRasterBand(1) # call the algorithm with datasets.Dataset(values[np.newaxis], **kwargs) as work: work_band = work.GetRasterBand(1) mask_band = work_band.GetMaskBand() try: gdal.FillNodata( work_band, mask_band, 100, # search distance 0, # smoothing iterations ) except RuntimeError: print(name) raise iterations += 1 # switch back current dir os.chdir(curdir) os.rmdir(tmpdir) # write diff values[values == original_values] = self.no_data_value with datasets.Dataset(values[np.newaxis], **kwargs) as result: DRIVER.CreateCopy(path, result, options=OPTIONS)
def rebase(base_path, source_path, target_path): """ Rebase source on base and write it to target. """ # skip existing if exists(target_path): print('{} skipped.'.format(target_path)) return # skip when missing sources if not exists(source_path): print('{} not found.'.format(source_path)) # prepare dirs try: os.makedirs(dirname(target_path)) except OSError: pass # read source dataset source_dataset = gdal.Open(str(source_path)) source_band = source_dataset.GetRasterBand(1) source_no_data_value = source_band.GetNoDataValue() source_array = source_band.ReadAsArray() # prepare target array target_projection = source_dataset.GetProjection() target_geo_transform = source_dataset.GetGeoTransform() target_no_data_value = np.finfo(source_array.dtype).max target_array = np.full_like(source_array, target_no_data_value) # copy active cells source_mask = (source_array != source_no_data_value) target_array[source_mask] = source_array[source_mask] # rebase if exists(base_path): base_dataset = gdal.Open(str(base_path)) base_band = base_dataset.GetRasterBand(1) base_no_data_value = base_band.GetNoDataValue() base_array = base_band.ReadAsArray() # combined mask has active pixels from source and base that are equal mask = (base_array != base_no_data_value) equal = (source_array == base_array) blank = source_mask & mask & equal target_array[blank] = target_no_data_value method = 'rebase' else: method = 'copy' # write kwargs = { 'projection': target_projection, 'geo_transform': target_geo_transform, 'no_data_value': target_no_data_value.item(), } # write with datasets.Dataset(target_array[np.newaxis, ...], **kwargs) as dataset: DRIVER_GDAL_GTIFF.CreateCopy(target_path, dataset, options=OPTIONS) print('{} created ({}).'.format(target_path, method))
def command(index_path, source_path, target_dir, attribute): """ Rasterize some postgis tables. """ # investigate sources if source_path.lower().startswith('pg:'): source_data_source = PGDataSource(source_path) else: source_data_source = ogr.Open(source_path) if source_data_source.GetDriver().GetName() == 'ESRI Shapefile': # seems 1.9.1 does not sort, while 1.9.2 does ordered_source_data_source = sorted(source_data_source, key=lambda l: l.GetName()) else: ordered_source_data_source = source_data_source source_field_names = [] for source_layer in ordered_source_data_source: # check attribute for all source layers source_field_names.append( get_field_name(layer=source_layer, attribute=attribute) ) ogr_type = get_ogr_type( data_source=ordered_source_data_source, field_names=source_field_names, ) # Create indexes for shapefiles if necessary if source_data_source.GetDriver().GetName() == 'ESRI Shapefile': for source_layer in ordered_source_data_source: source_layer_name = source_layer.GetName() if os.path.isfile(source_path): source_layer_index_path = source_path[-4:] + '.qix' else: source_layer_index_path = os.path.join( source_path, source_layer_name + '.qix', ) if os.path.exists(source_layer_index_path): continue print('Creating spatial index on {}.'.format(source_layer_name)) source_data_source.ExecuteSQL( str('CREATE SPATIAL INDEX ON {}').format(source_layer_name), ) # rasterize index_data_source = ogr.Open(index_path) index_layer = index_data_source[0] total = index_layer.GetFeatureCount() print('Starting rasterize.') gdal.TermProgress_nocb(0) for count, index_feature in enumerate(index_layer, 1): leaf_number = index_feature[str('BLADNR')] target_path = os.path.join( target_dir, leaf_number[0:3], leaf_number + '.tif', ) if os.path.exists(target_path): gdal.TermProgress_nocb(count / total) continue index_geometry = index_feature.geometry() # prepare dataset data_type = DATA_TYPE[ogr_type] no_data_value = NO_DATA_VALUE[ogr_type] dataset = DRIVER_GDAL_MEM.Create('', 2000, 2500, 1, data_type) dataset.SetProjection(osr.GetUserInputAsWKT(str('epsg:28992'))) dataset.SetGeoTransform(get_geotransform(index_geometry)) band = dataset.GetRasterBand(1) band.SetNoDataValue(no_data_value) band.Fill(no_data_value) burned = False for i, source_layer in enumerate(ordered_source_data_source): source_field_name = source_field_names[i] source_layer.SetSpatialFilter(index_geometry) if not source_layer.GetFeatureCount(): continue # create ogr layer if necessary if hasattr(source_layer, 'as_ogr_layer'): temp_data_source, source_layer = source_layer.as_ogr_layer( name=source_field_name, sr=index_layer.GetSpatialRef(), ) # rasterize gdal.RasterizeLayer( dataset, [1], source_layer, options=['ATTRIBUTE={}'.format(source_field_name)] ) burned = True if burned: leaf_number = index_feature[str('BLADNR')] array = (dataset.ReadAsArray() == 255).astype('u1') if array.any(): # save no data tif for inspection ndv_target_path = os.path.join(target_dir, 'no_data', leaf_number[0:3], leaf_number + '.tif') try: os.makedirs(os.path.dirname(ndv_target_path)) except OSError: pass array.shape = 1, dataset.RasterYSize, dataset.RasterXSize kwargs = { 'array': array, 'no_data_value': 0, 'geo_transform': get_geotransform(index_geometry), 'projection': osr.GetUserInputAsWKT(str('epsg:28992')), } with datasets.Dataset(**kwargs) as ndv_dataset: options = ['compress=deflate', 'tiled=yes'] DRIVER_GDAL_GTIFF.CreateCopy(ndv_target_path, ndv_dataset, options=options) # save try: os.makedirs(os.path.dirname(target_path)) except OSError: pass DRIVER_GDAL_GTIFF.CreateCopy( target_path, dataset, options=['COMPRESS=DEFLATE'], ) gdal.TermProgress_nocb(count / total)