def encode(self, request, collection_id, queryset, search_context): namespaces = dict(nsmap) namespaces.update(search_context.namespaces) ATOM = ElementMaker(namespace=ns_atom.uri, nsmap=namespaces) tree = ATOM("feed", ATOM("id", request.build_absolute_uri()), ATOM("title", "%s Search" % collection_id), ATOM("description"), OS("totalResults", str(search_context.total_count)), OS("startIndex", str(search_context.start_index or 0)), OS("itemsPerPage", str(search_context.count)), OS("Query", role="request", **dict( ("{%s}%s" % (namespaces[prefix], name), value) for prefix, params in search_context.parameters.items() for name, value in params.items() )), *chain( self.encode_feed_links(request, search_context), [ self.encode_entry(request, collection_id, item) for item in queryset ] ) ) return etree.tostring(tree, pretty_print=True)
def encode(self, request, collection_id, queryset, search_context): namespaces = dict(nsmap) namespaces.update(search_context.namespaces) ATOM = ElementMaker(namespace=ns_atom.uri, nsmap=namespaces) tree = ATOM("feed", ATOM("id", request.build_absolute_uri()), ATOM("title", "%s Search" % collection_id), ATOM("link", rel="self", href=request.build_absolute_uri()), ATOM("description"), OS("totalResults", str(search_context.total_count)), OS("startIndex", str(search_context.start_index or 0)), OS("itemsPerPage", str(search_context.count)), OS("Query", role="request", **dict( ("{%s}%s" % (namespaces[prefix], name), value) for prefix, params in search_context.parameters.items() for name, value in params.items() )), *chain( self.encode_feed_links(request, search_context), [ self.encode_entry(request, item) for item in queryset ] ) ) return etree.tostring(tree, pretty_print=True)
def encode(self, request, collection_id, queryset, search_context): # prepare RSS factory with additional namespaces from search context namespaces = dict(nsmap) namespaces.update(search_context.namespaces) RSS = ElementMaker(namespace=None, nsmap=namespaces) tree = RSS("rss", RSS( "channel", RSS("title", "%s Search" % collection_id), RSS("link", request.build_absolute_uri()), RSS("description"), *chain( self.encode_opensearch_elements(search_context), self.encode_feed_links(request, search_context), [ self.encode_item(request, item, search_context) for item in queryset ])), version="2.0") return etree.tostring(tree, pretty_print=True)
def encode(self, request, collection_id, queryset, search_context): # prepare RSS factory with additional namespaces from search context namespaces = dict(nsmap) namespaces.update(search_context.namespaces) RSS = ElementMaker(namespace=None, nsmap=namespaces) tree = RSS("rss", RSS("channel", RSS("title", "%s Search" % collection_id), RSS("link", request.build_absolute_uri()), RSS("description"), *chain( self.encode_opensearch_elements(search_context), self.encode_feed_links(request, search_context), [ self.encode_item(request, item, search_context) for item in queryset ] ) ), version="2.0" ) return etree.tostring(tree, pretty_print=True)
def create_rectified_vrt(path_or_ds, vrt_path, srid_or_wkt=None, resample=0, memory_limit=0.0, max_error=APPROX_ERR_TOL, method=METHOD_GCP, order=0, size=None, resolution=None): """ Creates a VRT dataset that symbolizes a rectified version of the passed "referenceable" GDAL dataset. :param path_or_ds: a :class:`GDAL Dataset <eoxserver.contrib.gdal.Dataset>` or a path to such :param vrt_path: the path to store the VRT dataset under :param resample: the resample method to be used; defaults to 0 which means a nearest neighbour resampling :param memory_limit: the memory limit; by default no limit is used :param max_error: the maximum allowed error :param method: either of :const:`METHOD_GCP`, :const:`METHOD_TPS` or :const:`METHOD_TPS_LSQ`. :param order: the order of the function; see :func:`get_footprint_wkt` for reference """ if size and resolution: raise ValueError('size and resolution ar mutually exclusive') ds = _open_ds(path_or_ds) ptr = C.c_void_p(int(ds.this)) if isinstance(srid_or_wkt, int): srs = osr.SpatialReference() srs.ImportFromEPSG(srid_or_wkt) wkt = srs.ExportToWkt() srs = None elif isinstance(srid_or_wkt, str): wkt = srid_or_wkt else: wkt = ds.GetGCPProjection() # transformer = _create_generic_transformer( # ds, None, None, wkt, method, order # ) # x_size = C.c_int() # y_size = C.c_int() # geotransform = (C.c_double * 6)() # GDALSuggestedWarpOutput( # ptr, # GDALGenImgProjTransform, transformer, geotransform, # C.byref(x_size), C.byref(y_size) # ) # GDALSetGenImgProjTransformerDstGeoTransform(transformer, geotransform) # options = GDALCreateWarpOptions() # options.dfWarpMemoryLimit = memory_limit # options.eResampleAlg = resample # options.pfnTransformer = GDALGenImgProjTransform # options.pTransformerArg = transformer # options.hDstDS = C.c_void_p(int(ds.this)) # nb = options.nBandCount = ds.RasterCount # src_bands = C.cast(CPLMalloc(C.sizeof(C.c_int) * nb), C.POINTER(C.c_int)) # dst_bands = C.cast(CPLMalloc(C.sizeof(C.c_int) * nb), C.POINTER(C.c_int)) # # ctypes.cast(x, ctypes.POINTER(ctypes.c_ulong)) # options.panSrcBands = src_bands # options.panDstBands = dst_bands # # TODO: nodata value setup # for i in xrange(nb): # options.panSrcBands[i] = i + 1 # options.panDstBands[i] = i + 1 # if max_error > 0: # GDALApproxTransform = _libgdal.GDALApproxTransform # options.pTransformerArg = GDALCreateApproxTransformer( # options.pfnTransformer, options.pTransformerArg, max_error # ) # options.pfnTransformer = GDALApproxTransform # TODO: correct for python #GDALApproxTransformerOwnsSubtransformer(options.pTransformerArg, False) # if size: # extent = _to_extent(x_size.value, y_size.value, geotransform) # size_x, size_y = size # x_size.value = size_x # y_size.value = size_y # geotransform = _to_gt(size[0], size[1], extent) # elif resolution: # extent = _to_extent(x_size.value, y_size.value, geotransform) # geotransform[1] = resolution[0] # geotransform[5] = resolution[1] # size_x, size_y = _to_size(geotransform, extent) # x_size.value = size_x # y_size.value = size_y # vrt_ds = GDALCreateWarpedVRT(ptr, x_size, y_size, geotransform, options) if isinstance(wkt, str): wkt = b(wkt) vrt_ds = GDALAutoCreateWarpedVRT(ptr, None, wkt, resample, max_error, None) # GDALSetProjection(vrt_ds, wkt) if isinstance(vrt_path, str): vrt_path = b(vrt_path) GDALSetDescription(vrt_ds, vrt_path) GDALClose(vrt_ds) # GDALDestroyWarpOptions(options) # if size of resolution is overridden parse the VRT and adjust settings if size or resolution: with vsi.open(vrt_path) as f: root = parse(f).getroot() size_x = int(root.attrib['rasterXSize']) size_y = int(root.attrib['rasterYSize']) gt_elem = root.find('GeoTransform') gt = [ float(value.strip()) for value in gt_elem.text.strip().split(',') ] if size: extent = _to_extent(size_x, size_y, gt) size_x, size_y = size gt = _to_gt(size[0], size[1], extent) elif resolution: extent = _to_extent(size_x, size_y, gt) gt[1] = resolution[0] gt[5] = resolution[1] size_x, size_y = _to_size(gt, extent) # Adjust XML root.attrib['rasterXSize'] = str(size_x) root.attrib['rasterYSize'] = str(size_y) gt_str = ",".join(str(v) for v in gt) gt_elem.text = gt_str root.find('GDALWarpOptions/Transformer/ApproxTransformer/' 'BaseTransformer/GenImgProjTransformer/DstGeoTransform' ).text = gt_str inv_gt = gdal.InvGeoTransform(gt) root.find('GDALWarpOptions/Transformer/ApproxTransformer/' 'BaseTransformer/GenImgProjTransformer/DstInvGeoTransform' ).text = ",".join(str(v) for v in inv_gt) # write XML back to file with vsi.open(vrt_path, "w") as f: f.write(etree.tostring(root, pretty_print=True))