def output(self): """Output object of driver.""" output_params = dict(self._raw["output"], type=self.output_pyramid.grid, pixelbuffer=self.output_pyramid.pixelbuffer, metatiling=self.output_pyramid.metatiling) if "path" in output_params: output_params.update(path=absolute_path(path=output_params["path"], base_dir=self.config_dir)) if "format" not in output_params: raise MapcheteConfigError("output format not specified") if output_params["format"] not in available_output_formats(): raise MapcheteConfigError( "format %s not available in %s" % (output_params["format"], str(available_output_formats()))) writer = load_output_writer(output_params) try: writer.is_valid_with_config(output_params) except Exception as e: logger.exception(e) raise MapcheteConfigError( "driver %s not compatible with configuration: %s" % (writer.METADATA["driver_name"], e)) return writer
def test_absolute_path(): assert absolute_path(path="file.tif", base_dir="/mnt/data") == "/mnt/data/file.tif" assert absolute_path(path="/mnt/data/file.tif", base_dir="/mnt/other_data") == "/mnt/data/file.tif" with pytest.raises(TypeError): absolute_path(path="file.tif", base_dir=None) with pytest.raises(TypeError): absolute_path(path="file.tif", base_dir="no/abs/dir") assert absolute_path(path="https://file.tif", base_dir="/mnt/data") == "https://file.tif"
def _output_params(self): """Output params of driver.""" output_params = dict(self._raw["output"], grid=self.output_pyramid.grid, pixelbuffer=self.output_pyramid.pixelbuffer, metatiling=self.output_pyramid.metatiling, delimiters=self._delimiters, mode=self.mode) if "path" in output_params: output_params.update(path=absolute_path(path=output_params["path"], base_dir=self.config_dir)) if "format" not in output_params: raise MapcheteConfigError("output format not specified") if output_params["format"] not in available_output_formats(): raise MapcheteConfigError( "format %s not available in %s" % (output_params["format"], str(available_output_formats()))) return output_params
def _guess_geometry(i, base_dir=None): """ Guess and parse geometry if possible. - a WKT string - a GeoJSON mapping - a shapely geometry - a path to a Fiona-readable file """ crs = None # WKT or path: if isinstance(i, str): if i.upper().startswith(("POLYGON ", "MULTIPOLYGON ")): geom = wkt.loads(i) else: with fiona.open(absolute_path(path=i, base_dir=base_dir)) as src: geom = cascaded_union([shape(f["geometry"]) for f in src]) crs = src.crs # GeoJSON mapping elif isinstance(i, dict): geom = shape(i) # shapely geometry elif isinstance(i, BaseGeometry): geom = i else: raise TypeError( "area must be either WKT, GeoJSON mapping, shapely geometry or a " "Fiona-readable path.") if not geom.is_valid: # pragma: no cover raise TypeError("area is not a valid geometry") try: geom = clean_geometry_type(geom, "Polygon", allow_multipart=True) except GeometryTypeError: raise GeometryTypeError( f"area must either be a Polygon or a MultiPolygon, not {geom.geom_type}" ) return geom, crs
def input(self): """ Input items used for process stored in a dictionary. Keys are the hashes of the input parameters, values the respective InputData classes. """ # get input items only of initialized zoom levels raw_inputs = OrderedDict([ # convert input definition to hash (get_hash(v), v) for zoom in self.init_zoom_levels if "input" in self._params_at_zoom[zoom] # to preserve file groups, "flatten" the input tree and use # the tree paths as keys for key, v in _flatten_tree(self._params_at_zoom[zoom]["input"]) if v is not None ]) init_as_readonly = ( self.mode == "readonly" or ( # in case only overview levels are about to be built not len( set(self.baselevels["zooms"]).intersection( set(self.init_zoom_levels))) if self.baselevels else False)) initalized_inputs = OrderedDict() for k, v in raw_inputs.items(): # for files and tile directories if isinstance(v, str): logger.debug("load input reader for simple input %s", v) try: reader = load_input_reader(dict( path=absolute_path(path=v, base_dir=self.config_dir), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=self._delimiters), readonly=init_as_readonly) except Exception as e: logger.exception(e) raise MapcheteDriverError( "error when loading input %s: %s" % (v, e)) logger.debug("input reader for simple input %s is %s", v, reader) # for abstract inputs elif isinstance(v, dict): logger.debug("load input reader for abstract input %s", v) try: reader = load_input_reader(dict( abstract=deepcopy(v), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=self._delimiters, conf_dir=self.config_dir), readonly=init_as_readonly) except Exception as e: logger.exception(e) raise MapcheteDriverError( "error when loading input %s: %s" % (v, e)) logger.debug("input reader for abstract input %s is %s", v, reader) else: raise MapcheteConfigError("invalid input type %s", type(v)) # trigger bbox creation reader.bbox(out_crs=self.process_pyramid.crs) initalized_inputs[k] = reader return initalized_inputs
def __init__(self, input_params, **kwargs): """Initialize.""" super().__init__(input_params, **kwargs) if "abstract" in input_params: self._params = input_params["abstract"] self.path = absolute_path(path=self._params["path"], base_dir=input_params["conf_dir"]) logger.debug("InputData params: %s", input_params) # define pyramid self.td_pyramid = BufferedTilePyramid( self._params["grid"], metatiling=self._params.get("metatiling", 1), tile_size=self._params.get("tile_size", 256), pixelbuffer=self._params.get("pixelbuffer", 0)) self._read_as_tiledir_func = base._read_as_tiledir try: self._tiledir_metadata_json = read_output_metadata( os.path.join(self.path, "metadata.json")) try: self._data_type = self._tiledir_metadata_json["driver"][ "data_type"] except KeyError: self._data_type = driver_metadata( self._tiledir_metadata_json["driver"] ["format"])["data_type"] except FileNotFoundError: # in case no metadata.json is available, try to guess data type via the # format file extension self._data_type = data_type_from_extension( self._params["extension"]) elif "path" in input_params: self.path = absolute_path(path=input_params["path"], base_dir=input_params.get("conf_dir")) try: self._tiledir_metadata_json = read_output_metadata( os.path.join(self.path, "metadata.json")) except FileNotFoundError: raise MapcheteConfigError( f"Cannot find metadata.json in {self.path}") # define pyramid self.td_pyramid = self._tiledir_metadata_json["pyramid"] self.output_data = load_output_writer(dict( self._tiledir_metadata_json["driver"], metatiling=self.td_pyramid.metatiling, pixelbuffer=self.td_pyramid.pixelbuffer, pyramid=self.td_pyramid, grid=self.td_pyramid.grid, path=self.path), readonly=True) self._params = dict( path=self.path, grid=self.td_pyramid.grid.to_dict(), metatiling=self.td_pyramid.metatiling, pixelbuffer=self.td_pyramid.pixelbuffer, tile_size=self.td_pyramid.tile_size, extension=self.output_data.file_extension.split(".")[-1], **self._tiledir_metadata_json["driver"]) self._read_as_tiledir_func = self.output_data._read_as_tiledir self._data_type = driver_metadata( self._tiledir_metadata_json["driver"]["format"])["data_type"] # validate parameters validate_values(self._params, [("path", str), ("grid", (str, dict)), ("extension", str)]) self._ext = self._params["extension"] # additional params self._bounds = self._params.get("bounds", self.td_pyramid.bounds) self.METADATA.update(data_type=self._data_type, file_extensions=[self._params["extension"]]) if self.METADATA.get("data_type") == "raster": self._params["count"] = self._params.get( "count", self._params.get("bands", None)) validate_values(self._params, [("dtype", str), ("count", int)]) self._profile = { "nodata": self._params.get("nodata", 0), "dtype": self._params["dtype"], "count": self._params["count"] } else: self._profile = None
def __init__(self, input_params, **kwargs): """Initialize.""" super().__init__(input_params, **kwargs) if "abstract" in input_params: self._params = input_params["abstract"] self.path = absolute_path( path=self._params["path"], base_dir=input_params["conf_dir"] ) logger.debug("InputData params: %s", input_params) # define pyramid self.td_pyramid = BufferedTilePyramid( self._params["grid"], metatiling=self._params.get("metatiling", 1), tile_size=self._params.get("tile_size", 256), pixelbuffer=self._params.get("pixelbuffer", 0) ) self._read_as_tiledir_func = base._read_as_tiledir elif "path" in input_params: self.path = absolute_path( path=input_params["path"], base_dir=input_params.get("conf_dir") ) try: params = read_output_metadata(os.path.join(self.path, "metadata.json")) except FileNotFoundError: raise MapcheteConfigError( "Cannot find metadata.json in %s" % input_params["path"] ) # define pyramid self.td_pyramid = params["pyramid"] self.output_data = load_output_writer( dict( params["driver"], metatiling=self.td_pyramid.metatiling, pixelbuffer=self.td_pyramid.pixelbuffer, pyramid=self.td_pyramid, grid=self.td_pyramid.grid, path=self.path ), readonly=True ) self._params = dict( path=self.path, grid=self.td_pyramid.grid.to_dict(), metatiling=self.td_pyramid.metatiling, pixelbuffer=self.td_pyramid.pixelbuffer, tile_size=self.td_pyramid.tile_size, extension=self.output_data.file_extension.split(".")[-1], **params["driver"] ) self._read_as_tiledir_func = self.output_data._read_as_tiledir # validate parameters validate_values( self._params, [ ("path", str), ("grid", (str, dict)), ("extension", str) ] ) self._ext = self._params["extension"] # additional params self._bounds = self._params.get("bounds", self.td_pyramid.bounds) self._file_type = ( "vector" if self._params["extension"] == "geojson" else "raster" ) if self._file_type == "raster": self._params["count"] = self._params.get( "count", self._params.get("bands", None) ) validate_values(self._params, [("dtype", str), ("count", int)]) self._profile = { "nodata": self._params.get("nodata", 0), "dtype": self._params["dtype"], "count": self._params["count"] } else: self._profile = None
def input(self): """ Input items used for process stored in a dictionary. Keys are the hashes of the input parameters, values the respective InputData classes. If process mode is `readonly` or if only overviews are about to be built, no inputs are required and thus not initialized due to performance reasons. However, process bounds which otherwise are dependant on input bounds, may change if not explicitly provided in process configuration. """ # get input items only of initialized zoom levels raw_inputs = OrderedDict([ # convert input definition to hash (get_hash(v), v) for zoom in self.init_zoom_levels if "input" in self._params_at_zoom[zoom] # to preserve file groups, "flatten" the input tree and use # the tree paths as keys for key, v in _flatten_tree(self._params_at_zoom[zoom]["input"]) if v is not None ]) initalized_inputs = OrderedDict() if self._init_inputs: for k, v in raw_inputs.items(): # for files and tile directories if isinstance(v, str): logger.debug("load input reader for simple input %s", v) try: reader = load_input_reader( dict(path=absolute_path(path=v, base_dir=self.config_dir), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=self._delimiters), readonly=self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError( "error when loading input %s: %s" % (v, e)) logger.debug("input reader for simple input %s is %s", v, reader) # for abstract inputs elif isinstance(v, dict): logger.debug("load input reader for abstract input %s", v) try: reader = load_input_reader( dict(abstract=deepcopy(v), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=self._delimiters, conf_dir=self.config_dir), readonly=self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError( "error when loading input %s: %s" % (v, e)) logger.debug("input reader for abstract input %s is %s", v, reader) else: raise MapcheteConfigError("invalid input type %s", type(v)) # trigger bbox creation reader.bbox(out_crs=self.process_pyramid.crs) initalized_inputs[k] = reader else: for k in raw_inputs.keys(): initalized_inputs[k] = None return initalized_inputs
def __init__(self, input_params, **kwargs): """Initialize.""" super(InputData, self).__init__(input_params, **kwargs) if "abstract" in input_params: self._params = input_params["abstract"] self.path = absolute_path(path=self._params["path"], base_dir=input_params["conf_dir"]) # define pyramid self.td_pyramid = BufferedTilePyramid( self._params["type"], metatiling=self._params.get("metatiling", 1), tile_size=self._params.get("tile_size", 256), pixelbuffer=self._params.get("pixelbuffer", 0)) elif "path" in input_params: self.path = absolute_path(path=input_params["path"], base_dir=input_params.get("conf_dir")) try: params = read_json(os.path.join(self.path, "metadata.json")) except FileNotFoundError: raise MapcheteConfigError("Cannot find metadata.json in %s" % input_params["path"]) # define pyramid self.td_pyramid = BufferedTilePyramid( params["pyramid"]["grid"]["type"], metatiling=params["pyramid"].get("metatiling", 1), tile_size=params["pyramid"].get("tile_size", 256), pixelbuffer=params["pyramid"].get("pixelbuffer", 0)) output = load_output_writer(dict( params["driver"], metatiling=self.td_pyramid.metatiling, pixelbuffer=self.td_pyramid.pixelbuffer, pyramid=self.td_pyramid, type=self.td_pyramid.type, path=self.path), readonly=True) logger.debug(output) self._params = dict( path=self.path, type=params["pyramid"]["grid"]["type"], metatiling=params["pyramid"].get("metatiling", 1), pixelbuffer=params["pyramid"].get("pixelbuffer", 0), tile_size=params["pyramid"].get("tile_size", 256), extension=output.file_extension.split(".")[-1], **params["driver"]) # validate parameters validate_values(self._params, [("path", six.string_types), ("type", six.string_types), ("extension", six.string_types)]) if not self._params["extension"] in [ "tif", "vrt", "png", "jpg", "mixed", "jp2", "geojson" ]: raise MapcheteConfigError("invalid file extension given: %s" % self._params["extension"]) self._ext = self._params["extension"] # additional params self._bounds = self._params.get("bounds", self.td_pyramid.bounds) self._file_type = ("vector" if self._params["extension"] == "geojson" else "raster") if self._file_type == "raster": self._params["count"] = self._params.get( "count", self._params.get("bands", None)) validate_values(self._params, [("dtype", six.string_types), ("count", int)]) self._profile = { "nodata": self._params.get("nodata", 0), "dtype": self._params["dtype"], "count": self._params["count"] } else: self._profile = None
def input(self): """ Input items used for process stored in a dictionary. Keys are the hashes of the input parameters, values the respective InputData classes. """ # the delimiters are used by some input drivers delimiters = dict(zoom=self.init_zoom_levels, bounds=self.init_bounds, process_bounds=self.bounds, effective_bounds=self.effective_bounds) # get input items only of initialized zoom levels raw_inputs = { # convert input definition to hash get_hash(v): v for zoom in self.init_zoom_levels if "input" in self._params_at_zoom[zoom] # to preserve file groups, "flatten" the input tree and use # the tree paths as keys for key, v in _flatten_tree(self._params_at_zoom[zoom]["input"]) if v is not None } initalized_inputs = {} for k, v in six.iteritems(raw_inputs): # for files and tile directories if isinstance(v, six.string_types): logger.debug("load input reader for simple input %s", v) try: reader = load_input_reader( dict(path=absolute_path(path=v, base_dir=self.config_dir), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=delimiters), readonly=self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError(e) logger.debug("input reader for simple input %s is %s", v, reader) # for abstract inputs elif isinstance(v, dict): logger.debug("load input reader for abstract input %s", v) try: reader = load_input_reader( dict(abstract=deepcopy(v), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=delimiters, conf_dir=self.config_dir), readonly=self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError(e) logger.debug("input reader for abstract input %s is %s", v, reader) else: raise MapcheteConfigError("invalid input type %s", type(v)) # trigger bbox creation reader.bbox(out_crs=self.process_pyramid.crs) initalized_inputs[k] = reader return initalized_inputs