def from_sources(cls, filename, sources, driver=None, creation_options=None): # use smallest resolution first = sources[0] others = sources[1:] res_x, res_y = first.resolution bbox = first.bbox first_srs = osr.SpatialReference() first_srs.ImportFromWkt(first.dataset.GetProjection()) for source in others: # check the sources source_srs = osr.SpatialReference() source_srs.ImportFromWkt(source.dataset.GetProjection()) if not source_srs.IsSame(first_srs) or len(source) != len(first): raise Exception("Could not create merge target.") new_res_x, new_res_y = source.resolution bbox = bbox | source.bbox res_x = min(res_x, new_res_x) res_y = min(res_y, new_res_y) # create output dataset size_x = int((bbox[2] - bbox[0]) / res_x + .5) size_y = int((bbox[3] - bbox[1]) / res_y + .5) gt = bbox[0], res_x, 0.0, bbox[3], 0.0, -res_y return cls(filename, size_x, size_y, gt, first.dataset.RasterCount, first.dataset.GetRasterBand(1).DataType, first.dataset.GetProjection(), driver, creation_options)
def __call__(self, src_ds): logger.info("Applying ReprojectionOptimization") # setup src_sr = osr.SpatialReference() src_sr.ImportFromWkt(src_ds.GetProjection()) dst_sr = osr.SpatialReference() dst_sr.ImportFromEPSG(self.srid) if src_sr.IsSame(dst_sr) and (src_ds.GetGeoTransform()[1] > 0) \ and (src_ds.GetGeoTransform()[5] < 0) \ and (src_ds.GetGeoTransform()[2] == 0) \ and (src_ds.GetGeoTransform()[4] == 0): logger.info( "Source and destination projection are equal and image " "is not flipped or has rotated axes. Thus, no " "reprojection is required.") return src_ds # create a temporary dataset to get information about the output size tmp_ds = gdal.AutoCreateWarpedVRT(src_ds, None, dst_sr.ExportToWkt(), gdal.GRA_Bilinear, 0.125) try: # create the output dataset dst_ds = create_temp(tmp_ds.RasterXSize, tmp_ds.RasterYSize, src_ds.RasterCount, src_ds.GetRasterBand(1).DataType, temp_root=self.temporary_directory) # initialize with no data for i in range(src_ds.RasterCount): src_band = src_ds.GetRasterBand(i + 1) if src_band.GetNoDataValue() is not None: dst_band = dst_ds.GetRasterBand(i + 1) dst_band.SetNoDataValue(src_band.GetNoDataValue()) dst_band.Fill(src_band.GetNoDataValue()) # reproject the image dst_ds.SetProjection(dst_sr.ExportToWkt()) dst_ds.SetGeoTransform(tmp_ds.GetGeoTransform()) gdal.ReprojectImage(src_ds, dst_ds, src_sr.ExportToWkt(), dst_sr.ExportToWkt(), gdal.GRA_Bilinear) tmp_ds = None # copy the metadata copy_metadata(src_ds, dst_ds) return dst_ds except: cleanup_temp(dst_ds) raise
def testGCPProjection(self): res_proj = self.res_ds.GetGCPProjection() if not res_proj: self.fail("Response Dataset has no GCP Projection defined") res_srs = osr.SpatialReference(res_proj) exp_proj = self.exp_ds.GetGCPProjection() if not exp_proj: self.fail("Expected Dataset has no GCP Projection defined") exp_srs = osr.SpatialReference(exp_proj) self.assert_(res_srs.IsSame(exp_srs))
def __call__(self, src_ds, footprint_wkt): logger.info("Applying AlphaBandOptimization") dt = src_ds.GetRasterBand(1).DataType if src_ds.RasterCount == 3: src_ds.AddBand(dt) elif src_ds.RasterCount == 4: pass # okay else: raise Exception("Cannot add alpha band, as the current number of " "bands '%d' does not match" % src_ds.RasterCount) # initialize the alpha band with zeroes (completely transparent) band = src_ds.GetRasterBand(4) band.Fill(0) # set up the layer with geometry ogr_ds = ogr.GetDriverByName('Memory').CreateDataSource('wkt') sr = osr.SpatialReference() sr.ImportFromEPSG(4326) layer = ogr_ds.CreateLayer('poly', srs=sr.sr) feat = ogr.Feature(layer.GetLayerDefn()) feat.SetGeometryDirectly(ogr.Geometry(wkt=footprint_wkt)) layer.CreateFeature(feat) # rasterize the polygon, burning the opaque value into the alpha band gdal.RasterizeLayer(src_ds, [4], layer, burn_values=[get_limits(dt)[1]])
def spatial_reference(self): if self.srid is not None: sr = osr.SpatialReference() sr.ImportFromEPSG(self.srid) return sr else: return self.projection.spatial_reference
def validateEPSGCode(string): """Check whether the given string is a valid EPSG code (True) or not (False)""" try: osr.SpatialReference().ImportFromEPSG(int(string)) except (ValueError, RuntimeError): return False return True
def spatial_reference(self): sr = osr.SpatialReference() if self.format == "WKT": sr.ImportFromWkt(self.definition) elif self.format == "XML": sr.ImportFromXML(self.definition) elif self.format == "URL": sr.ImportFromXUrl(self.definition) return sr
def __init__(self, dataset, wkt, srid=None, temporary_directory=None): super(GDALGeometryMaskMergeSource, self).__init__(dataset) srs = None srid = 4326 if srid is not None: srs = osr.SpatialReference() srs.ImportFromEPSG(srid) # create a geometry from the given WKT #geom = ogr.CreateGeometryFromWkt(wkt) # create an in-memory datasource and add one single layer ogr_mem_driver = ogr.GetDriverByName("Memory") data_source = ogr_mem_driver.CreateDataSource("xxx") layer = data_source.CreateLayer("poly", srs) # create a single feature and add the given geometry feature = ogr.Feature(layer.GetLayerDefn()) feature.SetGeometryDirectly(ogr.Geometry(wkt=str(wkt))) layer.CreateFeature(feature) temporary_ds = temporary_dataset(self.dataset.RasterXSize, self.dataset.RasterYSize, 1, temp_root=temporary_directory) # create a temporary raster dataset with the exact same size as the # dataset to be masked with temporary_ds as mask_dataset: band = mask_dataset.GetRasterBand(1) band.Fill(1) mask_dataset.SetGeoTransform(self.dataset.GetGeoTransform()) mask_dataset.SetProjection(self.dataset.GetProjection()) # finally rasterize the vector layer to the mask dataset gdal.RasterizeLayer(mask_dataset, (1, ), layer, burn_values=(0, )) source_mask_band = mask_dataset.GetRasterBand(1) self.dataset.CreateMaskBand(gdal.GMF_PER_DATASET) band = self.dataset.GetRasterBand(1) mask_band = band.GetMaskBand() block_x_size, block_y_size = source_mask_band.GetBlockSize() num_x = source_mask_band.XSize / block_x_size num_y = source_mask_band.YSize / block_y_size for x, y in product(range(num_x), range(num_y)): mask_band.WriteArray( source_mask_band.ReadAsArray(x * block_x_size, y * block_y_size, block_x_size, block_y_size), x * block_x_size, y * block_y_size)
def crs_tolerance(srid): """ Get the "tolerance" of the CRS """ srs = osr.SpatialReference() srs.ImportFromEPSG(srid) if srs.IsGeographic(): return 1e-8 else: return 1e-2
def from_gdal_dataset(cls, ds): projection = ds.GetProjection() gt = ds.GetGeoTransform() sr = osr.SpatialReference(projection) axis_names = ['x', 'y'] if sr.IsProjected() else ['long', 'lat'] return cls(projection, [ Axis(axis_names[0], 'spatial', gt[1]), Axis(axis_names[1], 'spatial', gt[5]), ])
def _determine_parameters(datasets): first = datasets[0] first_proj = first.GetProjection() first_srs = osr.SpatialReference(first_proj) first_gt = first.GetGeoTransform() others = datasets[1:] res_x, res_y = first_gt[1], first_gt[5] o_x, o_y = first_gt[0], first_gt[3] e_x = o_x + res_x * first.RasterXSize e_y = o_y + res_y * first.RasterYSize for dataset in others: proj = dataset.GetProjection() srs = osr.SpatialReference(proj) gt = dataset.GetGeoTransform() dx, dy = gt[1], gt[5] res_x = min(dx, res_x) res_y = max(dy, res_y) o_x = min(gt[0], o_x) o_y = max(gt[3], o_y) e_x = max(gt[0] + dx * dataset.RasterXSize, e_x) e_y = min(gt[3] + dy * dataset.RasterYSize, e_y) assert srs.IsSame(first_srs) assert dataset.RasterCount == first.RasterCount x_size = int(math.ceil(abs(o_x - e_x) / res_x)) y_size = int(math.ceil(abs(o_y - e_y) / abs(res_y))) return first_proj, (o_x, o_y), (e_x, e_y), (res_x, res_y), (x_size, y_size)
def apply(self, ds): """ Set the geotransform and projection of the dataset according to the defined extent and SRID. """ sr = osr.SpatialReference() sr.ImportFromEPSG(self.srid) ds.SetGeoTransform([ self.minx, (self.maxx - self.minx) / ds.RasterXSize, 0, self.maxy, 0, -(self.maxy - self.miny) / ds.RasterYSize ]) ds.SetProjection(sr.ExportToWkt()) return ds, None
def crs_bounds(srid): """ Get the maximum bounds of the CRS. """ srs = osr.SpatialReference() srs.ImportFromEPSG(srid) if srs.IsGeographic(): return (-180.0, -90.0, 180.0, 90.0) else: earth_circumference = 2 * math.pi * srs.GetSemiMajor() return ( -earth_circumference, -earth_circumference, earth_circumference, earth_circumference )
def _create_dataset(self, identifier, extent, size, projection, footprint, begin_time, end_time, coverage_type, range_type_name, data_items): CoverageType = getattr(models, coverage_type) coverage = CoverageType() coverage.range_type = models.RangeType.objects.get( name=range_type_name) if isinstance(projection, int): coverage.srid = projection else: definition, format = projection # Try to identify the SRID from the given input try: sr = osr.SpatialReference(definition, format) coverage.srid = sr.srid except: prj = models.Projection.objects.get(format=format, definition=definition) coverage.projection = prj coverage.identifier = identifier coverage.extent = extent coverage.size = size coverage.footprint = footprint coverage.begin_time = begin_time coverage.end_time = end_time # coverage.visible = kwargs["visible"] coverage.full_clean() coverage.save() # attach all data items for data_item in data_items: data_item.dataset = coverage data_item.full_clean() data_item.save() return coverage
def warp_fields(coverages, field_name, bbox, crs, width, height): driver = gdal.GetDriverByName('MEM') out_ds = driver.Create( '', width, height, 1, coverages[0].range_type.get_field(field_name).data_type) out_ds.SetGeoTransform([ bbox[0], (bbox[2] - bbox[0]) / width, 0, bbox[3], 0, -(bbox[3] - bbox[1]) / height, ]) epsg = crss.parseEPSGCode(crs, [crss.fromShortCode]) sr = osr.SpatialReference() sr.ImportFromEPSG(epsg) out_ds.SetProjection(sr.ExportToWkt()) for coverage in coverages: location = coverage.get_location_for_field(field_name) band_index = coverage.get_band_index_for_field(field_name) orig_ds = gdal.open_with_env(location.path, location.env) vrt_filename = None if orig_ds.RasterCount > 1: vrt_filename = '/vsimem/' + uuid4().hex gdal.BuildVRT(vrt_filename, orig_ds, bandList=[band_index]) ds = gdal.Open(vrt_filename) else: ds = orig_ds gdal.Warp(out_ds, ds) ds = None if vrt_filename: gdal.Unlink(vrt_filename) band = out_ds.GetRasterBand(1) return band.ReadAsArray()
def _generate_browse_complex(parsed_exprs, fields_and_coverages, width, height, bbox, crs, generator): o_x = bbox[0] o_y = bbox[3] res_x = (bbox[2] - bbox[0]) / width res_y = -(bbox[3] - bbox[1]) / height tiff_driver = gdal.GetDriverByName('GTiff') field_names = set() for parsed_expression in parsed_exprs: field_names |= set(extract_fields(parsed_expression)) fields_and_datasets = {} for field_name in field_names: coverages = fields_and_coverages[field_name] field_data = warp_fields(coverages, field_name, bbox, crs, width, height) fields_and_datasets[field_name] = field_data out_filename = generator.generate('tif') tiff_driver = gdal.GetDriverByName('GTiff') out_ds = tiff_driver.Create(out_filename, width, height, len(parsed_exprs), gdal.GDT_Float32, options=["TILED=YES", "COMPRESS=PACKBITS"]) out_ds.SetGeoTransform([o_x, res_x, 0, o_y, 0, res_y]) out_ds.SetProjection(osr.SpatialReference(crs).wkt) for band_index, parsed_expr in enumerate(parsed_exprs, start=1): with np.errstate(divide='ignore', invalid='ignore'): out_data = _evaluate_expression(parsed_expr, fields_and_datasets, generator) if isinstance(out_data, (int, float)): out_data = np.full((height, width), out_data) out_band = out_ds.GetRasterBand(band_index) out_band.WriteArray(out_data) return BrowseCreationInfo(out_filename, None)
def create_coverage_layer(self, map_obj, coverage, fields, style=None, ranges=None): """ Creates a mapserver layer object for the given coverage """ filename_generator = FilenameGenerator( '/vsimem/{uuid}.{extension}', 'vrt' ) field_locations = [ (field, coverage.get_location_for_field(field)) for field in fields ] locations = [ location for _, location in field_locations ] # TODO: apply subsets in time/elevation dims num_locations = len(set(locations)) if num_locations == 1: if not coverage.grid.is_referenceable: location = field_locations[0][1] data = location.path ms.set_env(map_obj, location.env, True) else: vrt_path = filename_generator.generate() e = map_obj.extent resx = (e.maxx - e.minx) / map_obj.width resy = (e.maxy - e.miny) / map_obj.height wkt = osr.SpatialReference(map_obj.getProjection()).wkt # TODO: env? reftools.create_rectified_vrt( field_locations[0][1].path, vrt_path, order=1, max_error=10, resolution=(resx, -resy), srid_or_wkt=wkt ) data = vrt_path elif num_locations > 1: paths_set = set( field_location[1].path for field_location in field_locations ) if len(paths_set) == 1: location = field_locations[0][1] data = location.path ms.set_env(map_obj, location.env, True) else: # TODO _build_vrt(coverage.size, field_locations) if not coverage.grid.is_referenceable: extent = coverage.extent sr = coverage.grid.spatial_reference else: map_extent = map_obj.extent extent = ( map_extent.minx, map_extent.miny, map_extent.maxx, map_extent.maxy ) sr = osr.SpatialReference(map_obj.getProjection()) layer_objs = _create_raster_layer_objs( map_obj, extent, sr, data, filename_generator ) for i, layer_obj in enumerate(layer_objs): layer_obj.name = '%s__%d' % (coverage.identifier, i) layer_obj.setProcessingKey("CLOSE_CONNECTION", "CLOSE") if num_locations == 1: for layer_obj in layer_objs: layer_obj.setProcessingKey("BANDS", ",".join([ str(coverage.get_band_index_for_field(field)) for field in fields ])) elif num_locations > 1: for layer_obj in layer_objs: if len(field_locations) == 3: layer_obj.setProcessingKey("BANDS", "1,2,3") else: layer_obj.setProcessingKey("BANDS", "1") # make a color-scaled layer if len(fields) == 1: field = fields[0] if ranges: range_ = ranges[0] else: range_ = _get_range(field) for layer_obj in layer_objs: _create_raster_style( style or "blackwhite", layer_obj, range_[0], range_[1], [ nil_value[0] for nil_value in field.nil_values ] ) elif len(fields) in (3, 4): for i, field in enumerate(fields, start=1): if ranges: if len(ranges) == 1: range_ = ranges[0] else: range_ = ranges[i - 1] else: range_ = _get_range(field) for layer_obj in layer_objs: layer_obj.setProcessingKey( "SCALE_%d" % i, "%s,%s" % range_ ) layer_obj.offsite = ms.colorObj(0, 0, 0) else: raise Exception("Too many bands specified") return filename_generator
def make_browse_layer_generator(self, map_obj, browses, map_, filename_generator, group_name, ranges, style): for browse in browses: if isinstance(browse, GeneratedBrowse): creation_info, filename_generator, reset_info = \ generate_browse( browse.band_expressions, browse.fields_and_coverages, map_.width, map_.height, map_.bbox, map_.crs, filename_generator ) layer_objs = _create_raster_layer_objs( map_obj, browse.extent, browse.spatial_reference, creation_info.filename, filename_generator ) for layer_obj in layer_objs: layer_obj.data = creation_info.filename if creation_info.env: ms.set_env(map_obj, creation_info.env, True) if creation_info.bands: layer_obj.setProcessingKey('BANDS', ','.join( str(band) for band in creation_info.bands )) if reset_info: sr = osr.SpatialReference(map_.crs) extent = map_.bbox layer_obj.setMetaData( "wms_extent", "%f %f %f %f" % extent ) layer_obj.setExtent(*extent) if sr.srid is not None: short_epsg = "EPSG:%d" % sr.srid layer_obj.setMetaData("ows_srs", short_epsg) layer_obj.setMetaData("wms_srs", short_epsg) layer_obj.setProjection(sr.proj) if browse.mode == BROWSE_MODE_GRAYSCALE: field = browse.field_list[0] if ranges: browse_range = ranges[0] elif browse.ranges[0] != (None, None): browse_range = browse.ranges[0] else: browse_range = _get_range(field) for layer_obj in layer_objs: _create_raster_style( style or "blackwhite", layer_obj, browse_range[0], browse_range[1], [ nil_value[0] for nil_value in field.nil_values ] ) else: browse_iter = enumerate( zip(browse.field_list, browse.ranges), start=1 ) for i, (field, field_range) in browse_iter: if ranges: if len(ranges) == 1: range_ = ranges[0] else: range_ = ranges[i - 1] elif field_range != (None, None): range_ = field_range else: range_ = _get_range(field) for layer_obj in layer_objs: layer_obj.setProcessingKey( "SCALE_%d" % i, "%s,%s" % tuple(range_) ) elif isinstance(browse, Browse): layer_objs = _create_raster_layer_objs( map_obj, browse.extent, browse.spatial_reference, browse.filename, filename_generator ) for layer_obj in layer_objs: layer_obj.data = browse.filename ms.set_env(map_obj, browse.env, True) elif browse is None: # TODO: figure out why and deal with it? continue else: raise TypeError( 'Type %s is not supported', type(browse).__name__ ) for layer_obj in layer_objs: layer_obj.group = group_name yield browse, layer_objs
def create_rectified_vrt(path_or_ds, vrt_path, srid_or_wkt=None, resample=0, memory_limit=0.0, max_error=APPROX_ERR_TOL, method=METHOD_GCP, order=0, size=None, resolution=None): """ Creates a VRT dataset that symbolizes a rectified version of the passed "referenceable" GDAL dataset. :param path_or_ds: a :class:`GDAL Dataset <eoxserver.contrib.gdal.Dataset>` or a path to such :param vrt_path: the path to store the VRT dataset under :param resample: the resample method to be used; defaults to 0 which means a nearest neighbour resampling :param memory_limit: the memory limit; by default no limit is used :param max_error: the maximum allowed error :param method: either of :const:`METHOD_GCP`, :const:`METHOD_TPS` or :const:`METHOD_TPS_LSQ`. :param order: the order of the function; see :func:`get_footprint_wkt` for reference """ if size and resolution: raise ValueError('size and resolution ar mutually exclusive') ds = _open_ds(path_or_ds) ptr = C.c_void_p(int(ds.this)) if isinstance(srid_or_wkt, int): srs = osr.SpatialReference() srs.ImportFromEPSG(srid_or_wkt) wkt = srs.ExportToWkt() srs = None elif isinstance(srid_or_wkt, str): wkt = srid_or_wkt else: wkt = ds.GetGCPProjection() # transformer = _create_generic_transformer( # ds, None, None, wkt, method, order # ) # x_size = C.c_int() # y_size = C.c_int() # geotransform = (C.c_double * 6)() # GDALSuggestedWarpOutput( # ptr, # GDALGenImgProjTransform, transformer, geotransform, # C.byref(x_size), C.byref(y_size) # ) # GDALSetGenImgProjTransformerDstGeoTransform(transformer, geotransform) # options = GDALCreateWarpOptions() # options.dfWarpMemoryLimit = memory_limit # options.eResampleAlg = resample # options.pfnTransformer = GDALGenImgProjTransform # options.pTransformerArg = transformer # options.hDstDS = C.c_void_p(int(ds.this)) # nb = options.nBandCount = ds.RasterCount # src_bands = C.cast(CPLMalloc(C.sizeof(C.c_int) * nb), C.POINTER(C.c_int)) # dst_bands = C.cast(CPLMalloc(C.sizeof(C.c_int) * nb), C.POINTER(C.c_int)) # # ctypes.cast(x, ctypes.POINTER(ctypes.c_ulong)) # options.panSrcBands = src_bands # options.panDstBands = dst_bands # # TODO: nodata value setup # for i in xrange(nb): # options.panSrcBands[i] = i + 1 # options.panDstBands[i] = i + 1 # if max_error > 0: # GDALApproxTransform = _libgdal.GDALApproxTransform # options.pTransformerArg = GDALCreateApproxTransformer( # options.pfnTransformer, options.pTransformerArg, max_error # ) # options.pfnTransformer = GDALApproxTransform # TODO: correct for python #GDALApproxTransformerOwnsSubtransformer(options.pTransformerArg, False) # if size: # extent = _to_extent(x_size.value, y_size.value, geotransform) # size_x, size_y = size # x_size.value = size_x # y_size.value = size_y # geotransform = _to_gt(size[0], size[1], extent) # elif resolution: # extent = _to_extent(x_size.value, y_size.value, geotransform) # geotransform[1] = resolution[0] # geotransform[5] = resolution[1] # size_x, size_y = _to_size(geotransform, extent) # x_size.value = size_x # y_size.value = size_y # vrt_ds = GDALCreateWarpedVRT(ptr, x_size, y_size, geotransform, options) if isinstance(wkt, str): wkt = b(wkt) vrt_ds = GDALAutoCreateWarpedVRT(ptr, None, wkt, resample, max_error, None) # GDALSetProjection(vrt_ds, wkt) if isinstance(vrt_path, str): vrt_path = b(vrt_path) GDALSetDescription(vrt_ds, vrt_path) GDALClose(vrt_ds) # GDALDestroyWarpOptions(options) # if size of resolution is overridden parse the VRT and adjust settings if size or resolution: with vsi.open(vrt_path) as f: root = parse(f).getroot() size_x = int(root.attrib['rasterXSize']) size_y = int(root.attrib['rasterYSize']) gt_elem = root.find('GeoTransform') gt = [ float(value.strip()) for value in gt_elem.text.strip().split(',') ] if size: extent = _to_extent(size_x, size_y, gt) size_x, size_y = size gt = _to_gt(size[0], size[1], extent) elif resolution: extent = _to_extent(size_x, size_y, gt) gt[1] = resolution[0] gt[5] = resolution[1] size_x, size_y = _to_size(gt, extent) # Adjust XML root.attrib['rasterXSize'] = str(size_x) root.attrib['rasterYSize'] = str(size_y) gt_str = ",".join(str(v) for v in gt) gt_elem.text = gt_str root.find('GDALWarpOptions/Transformer/ApproxTransformer/' 'BaseTransformer/GenImgProjTransformer/DstGeoTransform' ).text = gt_str inv_gt = gdal.InvGeoTransform(gt) root.find('GDALWarpOptions/Transformer/ApproxTransformer/' 'BaseTransformer/GenImgProjTransformer/DstInvGeoTransform' ).text = ",".join(str(v) for v in inv_gt) # write XML back to file with vsi.open(vrt_path, "w") as f: f.write(etree.tostring(root, pretty_print=True))
def process(self, input_filename, output_filename, geo_reference=None, generate_metadata=True, merge_with=None, original_footprint=None): # open the dataset and create an In-Memory Dataset as copy # to perform optimizations ds = create_mem_copy(gdal.Open(input_filename)) gt = ds.GetGeoTransform() footprint_wkt = None if not geo_reference: if gt == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0): if ds.GetGCPCount() > 0: geo_reference = InternalGCPs() else: raise ValueError("No geospatial reference for " "unreferenced dataset given.") if geo_reference: logger.debug("Applying geo reference '%s'." % type(geo_reference).__name__) # footprint is always in EPSG:4326 ds, footprint_wkt = geo_reference.apply(ds) # apply optimizations for optimization in self.get_optimizations(ds): logger.debug("Applying optimization '%s'." % type(optimization).__name__) try: new_ds = optimization(ds) if new_ds is not ds: # cleanup afterwards cleanup_temp(ds) ds = new_ds except: cleanup_temp(ds) raise # generate the footprint from the dataset if not footprint_wkt: logger.debug("Generating footprint.") footprint_wkt = self._generate_footprint_wkt(ds) # check that footprint is inside of extent of generated image # regenerate otherwise else: tmp_extent = getExtentFromRectifiedDS(ds) tmp_bbox = Polygon.from_bbox( (tmp_extent[0], tmp_extent[1], tmp_extent[2], tmp_extent[3])) # transform image bbox to EPSG:4326 if necessary proj = ds.GetProjection() srs = osr.SpatialReference() try: srs.ImportFromWkt(proj) srs.AutoIdentifyEPSG() ptype = "PROJCS" if srs.IsProjected() else "GEOGCS" srid = int(srs.GetAuthorityCode(ptype)) if srid != '4326': out_srs = osr.SpatialReference() out_srs.ImportFromEPSG(4326) transform = osr.CoordinateTransformation(srs, out_srs) tmp_bbox2 = ogr.CreateGeometryFromWkt(tmp_bbox.wkt) tmp_bbox2.Transform(transform) tmp_bbox = GEOSGeometry(tmp_bbox2.ExportToWkt()) except (RuntimeError, TypeError), e: logger.warn("Projection: %s" % proj) logger.warn("Failed to identify projection's EPSG code." "%s: %s" % (type(e).__name__, str(e))) tmp_footprint = GEOSGeometry(footprint_wkt) if not tmp_bbox.contains(tmp_footprint): logger.debug("Re-generating footprint because not inside of " "generated image.") footprint_wkt = tmp_footprint.intersection(tmp_bbox).wkt
def generate_footprint_wkt(ds, simplification_factor=2): """ Generate a fooptrint from a raster, using black/no-data as exclusion """ # create an empty boolean array initialized as 'False' to store where # values exist as a mask array. nodata_map = np.zeros((ds.RasterYSize, ds.RasterXSize), dtype=np.bool) for idx in range(1, ds.RasterCount + 1): band = ds.GetRasterBand(idx) raster_data = band.ReadAsArray() nodata = band.GetNoDataValue() if nodata is None: nodata = 0 # apply the output to the map nodata_map |= (raster_data != nodata) # create a temporary in-memory dataset and write the nodata mask # into its single band with temporary_dataset(ds.RasterXSize + 2, ds.RasterYSize + 2, 1, gdal.GDT_Byte) as tmp_ds: copy_projection(ds, tmp_ds) tmp_band = tmp_ds.GetRasterBand(1) tmp_band.WriteArray(nodata_map.astype(np.uint8)) # create an OGR in memory layer to hold the created polygon sr = osr.SpatialReference() sr.ImportFromWkt(ds.GetProjectionRef()) ogr_ds = ogr.GetDriverByName('Memory').CreateDataSource('out') layer = ogr_ds.CreateLayer('poly', sr.sr, ogr.wkbPolygon) fd = ogr.FieldDefn('DN', ogr.OFTInteger) layer.CreateField(fd) # polygonize the mask band and store the result in the OGR layer gdal.Polygonize(tmp_band, tmp_band, layer, 0) if layer.GetFeatureCount() != 1: # if there is more than one polygon, compute the minimum bounding polygon geometry = ogr.Geometry(ogr.wkbPolygon) while True: feature = layer.GetNextFeature() if not feature: break geometry = geometry.Union(feature.GetGeometryRef()) # TODO: improve this for a better minimum bounding polygon geometry = geometry.ConvexHull() else: # obtain geometry from the first (and only) layer feature = layer.GetNextFeature() geometry = feature.GetGeometryRef() if geometry.GetGeometryType() not in (ogr.wkbPolygon, ogr.wkbMultiPolygon): raise RuntimeError("Error during poligonization. Wrong geometry " "type.") # check if reprojection to latlon is necessary if not sr.IsGeographic(): dst_sr = osr.SpatialReference() dst_sr.ImportFromEPSG(4326) try: geometry.TransformTo(dst_sr.sr) except RuntimeError: geometry.Transform(osr.CoordinateTransformation(sr.sr, dst_sr.sr)) gt = ds.GetGeoTransform() resolution = min(abs(gt[1]), abs(gt[5])) simplification_value = simplification_factor * resolution # simplify the polygon. the tolerance value is *really* vague try: # SimplifyPreserveTopology() available since OGR 1.9.0 geometry = geometry.SimplifyPreserveTopology(simplification_value) except AttributeError: # use GeoDjango bindings if OGR is too old geometry = ogr.CreateGeometryFromWkt( GEOSGeometry(geometry.ExportToWkt()).simplify( simplification_value, True).wkt) return geometry.ExportToWkt()
def register_stac_product(location, stac_item, product_type=None, storage=None, replace=False): """ Registers a single parsed STAC item as a Product. The product type to be used can be specified via the product_type_name argument. """ identifier = stac_item['id'] replaced = False if replace: if models.Product.objects.filter(identifier=identifier).exists(): models.Product.objects.filter(identifier=identifier).delete() replaced = True geometry = stac_item['geometry'] properties = stac_item['properties'] assets = stac_item['assets'] # fetch the product type by name, metadata or passed object if isinstance(product_type, models.ProductType): pass if isinstance(product_type, str): product_type = models.ProductType.objects.get(name=product_type) else: product_type = models.ProductType.objects.get( name=get_product_type_name(stac_item)) if isinstance(storage, str): storage = backends.Storage.objects.get(name=storage) footprint = GEOSGeometry(json.dumps(geometry)) if 'start_datetime' in properties and 'end_datetime' in properties: start_time = parse_iso8601(properties['start_datetime']) end_time = parse_iso8601(properties['end_datetime']) else: start_time = end_time = parse_iso8601(properties['datetime']) # check if the product already exists if models.Product.objects.filter(identifier=identifier).exists(): if replace: models.Product.objects.filter(identifier=identifier).delete() else: raise RegistrationError('Product %s already exists' % identifier) product = models.Product.objects.create( identifier=identifier, begin_time=start_time, end_time=end_time, footprint=footprint, product_type=product_type, ) metadata = {} simple_mappings = { 'eo:cloud_cover': 'cloud_cover', 'sar:instrument_mode': 'sensor_mode', 'sat:relative_orbit': 'orbit_number', 'view:incidence_angle': ['minimum_incidence_angle', 'maximum_incidence_angle'], 'view:sun_azimuth': 'illumination_azimuth_angle', 'view:sun_elevation': 'illumination_elevation_angle', } for stac_key, field_name in simple_mappings.items(): value = properties.get(stac_key) if value: if isinstance(field_name, str): metadata[field_name] = value else: for name in field_name: metadata[name] = value # 'sar:frequency_band' # 'sar:center_frequency' # doppler_frequency ? # 'sar:product_type' # # 'sar:resolution_range' # 'sar:resolution_azimuth' # 'sar:pixel_spacing_range' # 'sar:pixel_spacing_azimuth' # 'sar:looks_range' # 'sar:looks_azimuth' # 'sar:looks_equivalent_number' # 'view:azimuth' complex_mappings = { 'sar:polarizations': ('polarization_channels', lambda v: ', '.join(v)), 'sar:observation_direction': ('antenna_look_direction', lambda v: v.upper()), 'sat:orbit_state': ('orbit_direction', lambda v: v.upper()), } for stac_key, field_desc in complex_mappings.items(): raw_value = properties.get(stac_key) if raw_value: field_name, prep = field_desc value = prep(raw_value) if isinstance(field_name, str): metadata[field_name] = value else: for name in field_name: metadata[name] = value # actually create the metadata object create_metadata(product, metadata) for asset_name, asset in assets.items(): bands = asset.get('eo:bands') if not bands: continue if not isinstance(bands, list): bands = [bands] band_names = [band['name'] for band in bands] coverage_type = models.CoverageType.objects.get( Q(allowed_product_types=product_type), *[ Q(field_types__identifier=band_name) for band_name in band_names ]) coverage_id = '%s_%s' % (identifier, asset_name) # create the storage item parsed = urlparse(asset['href']) if not isabs(parsed.path): path = normpath(join(dirname(location), parsed.path)) else: path = parsed.path arraydata_item = models.ArrayDataItem( location=path, storage=storage, band_count=len(bands), ) coverage_footprint = footprint if 'proj:geometry' in asset: coverage_footprint = GEOSGeometry( json.dumps(asset['proj:geometry'])) # get/create Grid grid_def = None size = None origin = None shape = asset.get('proj:shape') or properties.get('proj:shape') transform = asset.get('proj:transform') or \ properties.get('proj:transform') epsg = asset.get('proj:epsg') or properties.get('proj:epsg') if shape: size = shape if transform: origin = [transform[transform[0], transform[3]]] if epsg and transform: sr = osr.SpatialReference(epsg) axis_names = ['x', 'y'] if sr.IsProjected() else ['long', 'lat'] grid_def = { 'coordinate_reference_system': epsg, 'axis_names': axis_names, 'axis_types': ['spatial', 'spatial'], 'axis_offsets': [transform[1], transform[5]], } if not grid_def or not size or not origin: ds = gdal_open(arraydata_item) reader = get_reader_by_test(ds) if not reader: raise RegistrationError( 'Failed to get metadata reader for coverage') values = reader.read(ds) grid_def = values['grid'] size = values['size'] origin = values['origin'] grid = get_grid(grid_def) if models.Coverage.objects.filter(identifier=coverage_id).exists(): if replace: models.Coverage.objects.filter(identifier=coverage_id).delete() else: raise RegistrationError('Coverage %s already exists' % coverage_id) coverage = models.Coverage.objects.create( identifier=coverage_id, footprint=coverage_footprint, begin_time=start_time, end_time=end_time, grid=grid, axis_1_origin=origin[0], axis_2_origin=origin[1], axis_1_size=size[0], axis_2_size=size[1], coverage_type=coverage_type, parent_product=product, ) arraydata_item.coverage = coverage arraydata_item.full_clean() arraydata_item.save() # TODO: browses if possible return (product, replaced)
def getFootprintRect(info, numberOfPoints=20, delimiter=" ", repeatFirst=False): """ extract geotiff image footprint of a Rectified image info - [instance of GDalInfo class] image descriptor numberOfPoints - [integer] number of points per single image size (20 by default) delimiter - [string] separator delimiting the footprint coordinates (<space> by default) repeatFirst - [boolean] if True the first point is repeated as last one to close the loop """ # ----------------------------- # projections conversion size = (info.size[0], info.size[1]) gt = GeoTransform(info.GeoTransform) cr_src = osr.SpatialReference() cr_dst = osr.SpatialReference() cr_src.ImportFromWkt(info.Projection) cr_dst.SetWellKnownGeogCS("WGS84") trn = OSRTransform(cr_src, cr_dst) # ----------------------------- # extract footprint (clockwise) foot = [] # top (L2R) for i in xrange(0, numberOfPoints): foot.append( tuple( trn.src2dst( gt.rc2xy( (0, size[1] * float(i) / float(numberOfPoints))))[:2])) # right (T2B) for i in xrange(0, numberOfPoints): foot.append( tuple( trn.src2dst( gt.rc2xy((size[0] * float(i) / float(numberOfPoints), size[1])))[:2])) # bottom (R2L) for i in xrange(0, numberOfPoints): foot.append( tuple( trn.src2dst( gt.rc2xy((size[0], size[1] * float(numberOfPoints - i) / float(numberOfPoints))))[:2])) # left (B2T) for i in xrange(0, numberOfPoints): foot.append( tuple( trn.src2dst( gt.rc2xy((size[0] * float(numberOfPoints - i) / float(numberOfPoints), 0)))[:2])) if (repeatFirst): foot.append(foot[0]) # things to be done # 1) POLAR PROJECTIONS # 2) NON-POLAR FOOTPRINT CROSSING +/-180 MERIDIAN return delimiter.join( map(lambda x: "%.6f%s%.6f" % (x[0], delimiter, x[1]), foot))
def apply(self, src_ds): # setup dst_sr = osr.SpatialReference() dst_sr.ImportFromEPSG(self.srid) logger.debug("Using internal GCP Projection.") num_gcps = src_ds.GetGCPCount() # Try to find and use the best transform method/order. # Orders are: -1 (TPS), 3, 2, and 1 (all GCP) # Loop over the min and max GCP number to order map. for min_gcpnum, max_gcpnum, order in [(3, None, -1), (10, None, 3), (6, None, 2), (3, None, 1)]: # if the number of GCP matches if num_gcps >= min_gcpnum and (max_gcpnum is None or num_gcps <= max_gcpnum): try: if (order < 0): # let the reftools suggest the right interpolator rt_prm = reftools.suggest_transformer(src_ds) else: # use the polynomial GCP interpolation as requested rt_prm = { "method": reftools.METHOD_GCP, "order": order } logger.debug("Trying order '%i' {method:%s,order:%s}" % (order, reftools.METHOD2STR[rt_prm["method"]], rt_prm["order"])) # get the suggested pixel size/geotransform size_x, size_y, gt = reftools.suggested_warp_output( src_ds, None, dst_sr.ExportToWkt(), **rt_prm) if size_x > 100000 or size_y > 100000: raise RuntimeError("Calculated size exceeds limit.") logger.debug("New size is '%i x %i'" % (size_x, size_y)) # create the output dataset dst_ds = create_mem(size_x, size_y, src_ds.RasterCount, src_ds.GetRasterBand(1).DataType) # reproject the image dst_ds.SetProjection(dst_sr.ExportToWkt()) dst_ds.SetGeoTransform(gt) reftools.reproject_image(src_ds, "", dst_ds, "", **rt_prm) copy_metadata(src_ds, dst_ds) # retrieve the footprint from the given GCPs footprint_wkt = reftools.get_footprint_wkt( src_ds, **rt_prm) except RuntimeError, e: logger.debug("Failed using order '%i'. Error was '%s'." % (order, str(e))) # the given method was not applicable, use the next one continue else: logger.debug("Successfully used order '%i'" % order) # the transform method was successful, exit the loop break
def _generate_footprint_wkt(self, ds): """ Generate a footprint from a raster, using black/no-data as exclusion """ # create an empty boolean array initialized as 'False' to store where # values exist as a mask array. nodata_map = numpy.zeros((ds.RasterYSize, ds.RasterXSize), dtype=numpy.bool) for idx in range(1, ds.RasterCount + 1): band = ds.GetRasterBand(idx) raster_data = band.ReadAsArray() nodata = band.GetNoDataValue() if nodata is None: nodata = 0 # apply the output to the map nodata_map |= (raster_data != nodata) # create a temporary in-memory dataset and write the nodata mask # into its single band tmp_ds = create_mem(ds.RasterXSize + 2, ds.RasterYSize + 2, 1, gdal.GDT_Byte) copy_projection(ds, tmp_ds) tmp_band = tmp_ds.GetRasterBand(1) tmp_band.WriteArray(nodata_map.astype(numpy.uint8)) # Remove unwanted small areas of nodata # www.gdal.org/gdal__alg_8h.html#a33309c0a316b223bd33ae5753cc7f616 no_pixels = tmp_ds.RasterXSize * tmp_ds.RasterYSize threshold = 4 max_threshold = (no_pixels / 16) if self.sieve_max_threshold > 0: max_threshold = self.sieve_max_threshold while threshold <= max_threshold and threshold < 2147483647: gdal.SieveFilter(tmp_band, None, tmp_band, threshold, 4) threshold *= 4 #for debugging: #gdal.GetDriverByName('GTiff').CreateCopy('/tmp/test.tif', tmp_ds) # create an OGR in memory layer to hold the created polygon sr = osr.SpatialReference() sr.ImportFromWkt(ds.GetProjectionRef()) ogr_ds = ogr.GetDriverByName('Memory').CreateDataSource('out') layer = ogr_ds.CreateLayer('poly', sr, ogr.wkbPolygon) fd = ogr.FieldDefn('DN', ogr.OFTInteger) layer.CreateField(fd) # polygonize the mask band and store the result in the OGR layer gdal.Polygonize(tmp_band, tmp_band, layer, 0) tmp_ds = None if layer.GetFeatureCount() > 1: # if there is more than one polygon, compute the minimum # bounding polygon logger.debug("Merging %s features in footprint." % layer.GetFeatureCount()) # union all features in one multi-polygon geometry = ogr.Geometry(ogr.wkbMultiPolygon) while True: feature = layer.GetNextFeature() if not feature: break geometry.AddGeometry(feature.GetGeometryRef()) geometry = geometry.UnionCascaded() # TODO: improve this for a better minimum bounding polygon geometry = geometry.ConvexHull() elif layer.GetFeatureCount() < 1: # there was an error during polygonization raise RuntimeError("Error during polygonization. No feature " "obtained.") else: # obtain geometry from the first (and only) layer feature = layer.GetNextFeature() geometry = feature.GetGeometryRef() if geometry.GetGeometryType() != ogr.wkbPolygon: raise RuntimeError( "Error during polygonization. Wrong geometry " "type: %s" % ogr.GeometryTypeToName(geometry.GetGeometryType())) # check if reprojection to latlon is necessary if not sr.IsGeographic(): dst_sr = osr.SpatialReference() dst_sr.ImportFromEPSG(4326) try: geometry.TransformTo(dst_sr) except RuntimeError: geometry.Transform(osr.CoordinateTransformation(sr, dst_sr)) gt = ds.GetGeoTransform() resolution = min(abs(gt[1]), abs(gt[5])) simplification_value = self.simplification_factor * resolution #for debugging: #geometry.GetGeometryRef(0).GetPointCount() # simplify the polygon. the tolerance value is *really* vague try: # SimplifyPreserveTopology() available since OGR 1.9.0 geometry = geometry.SimplifyPreserveTopology(simplification_value) except AttributeError: # use GeoDjango bindings if OGR is too old geometry = ogr.CreateGeometryFromWkt( GEOSGeometry(geometry.ExportToWkt()).simplify( simplification_value, True).wkt) return geometry.ExportToWkt()
def rect_from_subset(path_or_ds, srid, minx, miny, maxx, maxy, method=METHOD_GCP, order=0): """ Returns the smallest area of an image for the given spatial subset. :param path_or_ds: a :class:`GDAL Dataset <eoxserver.contrib.gdal.Dataset>` or a path to such :param srid: the SRID the ``minx``, ``miny``, ``maxx`` and ``maxy`` are expressed in :param minx: the minimum X subset coordinate :param miny: the minimum Y subset coordinate :param maxx: the maximum X subset coordinate :param maxy: the maximum Y subset coordinate :param method: either of :const:`METHOD_GCP`, :const:`METHOD_TPS` or :const:`METHOD_TPS_LSQ`. :param order: the order of the function; see :func:`get_footprint_wkt` for reference :returns: a :class:`Rect <eoxserver.core.util.rect.Rect>` portraing the subset in image coordinates """ #import pdb; pdb.set_trace() ds = path_or_ds x_size = ds.RasterXSize y_size = ds.RasterYSize transformer = _create_referenceable_grid_transformer(ds, method, order) gcp_srs = osr.SpatialReference(ds.GetGCPProjection()) subset_srs = osr.SpatialReference() subset_srs.ImportFromEPSG(srid) coord_array_type = (C.c_double * 4) x = coord_array_type() y = coord_array_type() z = coord_array_type() success = (C.c_int * 4)() x[1] = float(x_size) y[1] = 0.0 x[2] = float(x_size) y[2] = float(y_size) x[3] = 0.0 y[3] = float(y_size) GDALUseTransformer(transformer, False, 4, x, y, z, success) dist = min((max(x) - min(x)) / (x_size / 100), (max(y) - min(y)) / (y_size / 100)) x[0] = x[3] = minx x[1] = x[2] = maxx y[0] = y[1] = miny y[2] = y[3] = maxy ct = CoordinateTransformation(subset_srs, gcp_srs) OCTTransform(ct, 4, x, y, z) num_x = int(math.ceil((max(x) - min(x)) / dist)) num_y = int(math.ceil((max(y) - min(y)) / dist)) x_step = (maxx - minx) / num_x y_step = (maxy - miny) / num_y num_points = 4 + 2 * num_x + 2 * num_y coord_array_type = (C.c_double * num_points) x = coord_array_type() y = coord_array_type() z = coord_array_type() success = (C.c_int * num_points)() x[0] = minx y[0] = miny for i in xrange(1, num_x + 1): x[i] = minx + i * x_step y[i] = miny x[num_x + 1] = maxx y[num_x + 1] = miny for i in xrange(1, num_y + 1): x[num_x + 1 + i] = maxx y[num_x + 1 + i] = miny + i * y_step x[num_x + num_y + 2] = maxx y[num_x + num_y + 2] = maxy for i in xrange(1, num_x + 1): x[num_x + num_y + 2 + i] = maxx - i * x_step y[num_x + num_y + 2 + i] = maxy x[num_x * 2 + num_y + 3] = minx y[num_x * 2 + num_y + 3] = maxy for i in xrange(1, num_y + 1): x[num_x * 2 + num_y + 3 + i] = minx y[num_x * 2 + num_y + 3 + i] = maxy - i * y_step OCTTransform(ct, num_points, x, y, z) GDALUseTransformer(transformer, True, num_points, x, y, z, success) minx = int(math.floor(min(x))) miny = int(math.floor(min(y))) size_x = int(math.ceil(max(x) - minx) + 1) size_y = int(math.ceil(max(y) - miny) + 1) return Rect(minx, miny, size_x, size_y)
def create_rectified_vrt(path_or_ds, vrt_path, srid=None, resample=0, memory_limit=0.0, max_error=APPROX_ERR_TOL, method=METHOD_GCP, order=0): """ Creates a VRT dataset that symbolizes a rectified version of the passed "referenceable" GDAL dataset. :param path_or_ds: a :class:`GDAL Dataset <eoxserver.contrib.gdal.Dataset>` or a path to such :param vrt_path: the path to store the VRT dataset under :param resample: the resample method to be used; defaults to 0 which means a nearest neighbour resampling :param memory_limit: the memory limit; by default no limit is used :param max_error: the maximum allowed error :param method: either of :const:`METHOD_GCP`, :const:`METHOD_TPS` or :const:`METHOD_TPS_LSQ`. :param order: the order of the function; see :func:`get_footprint_wkt` for reference """ ds = _open_ds(path_or_ds) ptr = C.c_void_p(long(ds.this)) if srid: srs = osr.SpatialReference() srs.ImportFromEPSG(srid) wkt = srs.ExportToWkt() srs = None else: wkt = ds.GetGCPProjection() transformer = _create_generic_transformer(ds, None, None, wkt, method, order) x_size = C.c_int() y_size = C.c_int() geotransform = (C.c_double * 6)() GDALSuggestedWarpOutput(ptr, GDALGenImgProjTransform, transformer, geotransform, C.byref(x_size), C.byref(y_size)) GDALSetGenImgProjTransformerDstGeoTransform(transformer, geotransform) options = GDALCreateWarpOptions() options.dfWarpMemoryLimit = memory_limit options.eResampleAlg = resample options.pfnTransformer = GDALGenImgProjTransform options.pTransformerArg = transformer options.hDstDS = ds.this nb = options.nBandCount = ds.RasterCount options.panSrcBands = CPLMalloc(C.sizeof(C.c_int) * nb) options.panDstBands = CPLMalloc(C.sizeof(C.c_int) * nb) # TODO: nodata value setup #for i in xrange(nb): # band = ds.GetRasterBand(i+1) if max_error > 0: GDALApproxTransform = _libgdal.GDALApproxTransform options.pTransformerArg = GDALCreateApproxTransformer( options.pfnTransformer, options.pTransformerArg, max_error) options.pfnTransformer = GDALApproxTransform # TODO: correct for python #GDALApproxTransformerOwnsSubtransformer(options.pTransformerArg, False) #options=GDALCreateWarpOptions() #vrt_ds = GDALCreateWarpedVRT(ptr, x_size, y_size, geotransform, options) vrt_ds = GDALAutoCreateWarpedVRT(ptr, None, wkt, resample, max_error, None) GDALSetProjection(vrt_ds, wkt) GDALSetDescription(vrt_ds, vrt_path) GDALClose(vrt_ds) GDALDestroyWarpOptions(options)
def isProjected(epsg): """Is the coordinate system projected (True) or Geographic (False)? """ spat_ref = osr.SpatialReference() spat_ref.ImportFromEPSG(epsg) return bool(spat_ref.IsProjected())
def apply(self, src_ds): # setup dst_sr = osr.SpatialReference() gcp_sr = osr.SpatialReference() dst_sr.ImportFromEPSG(self.srid if self.srid is not None else self.gcp_srid) gcp_sr.ImportFromEPSG(self.gcp_srid) logger.debug("Using GCP Projection '%s'" % gcp_sr.ExportToWkt()) logger.debug("Applying GCPs: MULTIPOINT(%s) -> MULTIPOINT(%s)" % (", ".join([("(%f %f)") % (gcp.GCPX, gcp.GCPY) for gcp in self.gcps]) , ", ".join([("(%f %f)") % (gcp.GCPPixel, gcp.GCPLine) for gcp in self.gcps]))) # set the GCPs src_ds.SetGCPs(self.gcps, gcp_sr.ExportToWkt()) # Try to find and use the best transform method/order. # Orders are: -1 (TPS), 3, 2, and 1 (all GCP) # Loop over the min and max GCP number to order map. for min_gcpnum, max_gcpnum, order in [(3, None, -1), (10, None, 3), (6, None, 2), (3, None, 1)]: # if the number of GCP matches if len(self.gcps) >= min_gcpnum and (max_gcpnum is None or len(self.gcps) <= max_gcpnum): try: if ( order < 0 ) : # let the reftools suggest the right interpolator rt_prm = rt.suggest_transformer( src_ds ) else: # use the polynomial GCP interpolation as requested rt_prm = { "method":rt.METHOD_GCP, "order":order } logger.debug("Trying order '%i' {method:%s,order:%s}" % \ (order, rt.METHOD2STR[rt_prm["method"]] , rt_prm["order"] ) ) # get the suggested pixel size/geotransform size_x, size_y, geotransform = rt.suggested_warp_output( src_ds, None, dst_sr.ExportToWkt(), **rt_prm ) if size_x > 100000 or size_y > 100000: raise RuntimeError("Calculated size exceeds limit.") logger.debug("New size is '%i x %i'" % (size_x, size_y)) # create the output dataset dst_ds = create_mem(size_x, size_y, src_ds.RasterCount, src_ds.GetRasterBand(1).DataType) # reproject the image dst_ds.SetProjection(dst_sr.ExportToWkt()) dst_ds.SetGeoTransform(geotransform) rt.reproject_image(src_ds, "", dst_ds, "", **rt_prm ) copy_metadata(src_ds, dst_ds) # retrieve the footprint from the given GCPs footprint_wkt = rt.get_footprint_wkt(src_ds, **rt_prm ) except RuntimeError as e: logger.debug("Failed using order '%i'. Error was '%s'." % (order, str(e))) # the given method was not applicable, use the next one continue else: logger.debug("Successfully used order '%i'" % order) # the transform method was successful, exit the loop break else: # no method worked, so raise an error raise GCPTransformException("Could not find a valid transform method.") # reproject the footprint to a lon/lat projection if necessary if not gcp_sr.IsGeographic(): out_sr = osr.SpatialReference() out_sr.ImportFromEPSG(4326) geom = ogr.CreateGeometryFromWkt(footprint_wkt, gcp_sr) geom.TransformTo(out_sr) footprint_wkt = geom.ExportToWkt() logger.debug("Calculated footprint: '%s'." % footprint_wkt) return dst_ds, footprint_wkt
# retrieve the footprint from the given GCPs footprint_wkt = reftools.get_footprint_wkt( src_ds, **rt_prm) except RuntimeError, e: logger.debug("Failed using order '%i'. Error was '%s'." % (order, str(e))) # the given method was not applicable, use the next one continue else: logger.debug("Successfully used order '%i'" % order) # the transform method was successful, exit the loop break else: # no method worked, so raise an error raise GCPTransformException( "Could not find a valid transform method.") # reproject the footprint to a lon/lat projection if necessary if not dst_sr.IsGeographic(): out_sr = osr.SpatialReference() out_sr.ImportFromEPSG(4326) geom = ogr.CreateGeometryFromWkt(footprint_wkt, gcp_sr) geom.TransformTo(out_sr) footprint_wkt = geom.ExportToWkt() logger.debug("Calculated footprint: '%s'." % footprint_wkt) return dst_ds, footprint_wkt