def _input_worker(conf_dir, pyramid, pixelbuffer, kv): key, input_obj = kv if input_obj not in ["none", "None", None, ""]: # prepare input metadata LOGGER.debug("read metadata from %s" % input_obj) # for single file inputs if isinstance(input_obj, str): # get absolute paths if not remote path = input_obj if input_obj.startswith( ("s3://", "https://", "http://")) else os.path.normpath( os.path.join(conf_dir, input_obj)) LOGGER.debug("load input reader for file %s" % input_obj) _input_reader = load_input_reader( dict(path=path, pyramid=pyramid, pixelbuffer=pixelbuffer)) LOGGER.debug("input reader for file %s is %s" % (input_obj, _input_reader)) # for abstract inputs elif isinstance(input_obj, dict): LOGGER.debug("load input reader for abstract input %s" % input_obj) _input_reader = load_input_reader( dict(abstract=input_obj, pyramid=pyramid, pixelbuffer=pixelbuffer)) LOGGER.debug("input reader for abstract input %s is %s" % (input_obj, _input_reader)) # trigger input bounding box caches _input_reader.bbox(out_crs=pyramid.crs) return key, (input_obj, _input_reader) else: return key, (None, None)
def test_input_reader_errors(): """Test errors when loading input readers.""" with pytest.raises(TypeError): load_input_reader("not_a_dictionary") with pytest.raises(errors.MapcheteDriverError): load_input_reader({}) with pytest.raises(errors.MapcheteDriverError): load_input_reader({"abstract": {"format": "invalid_format"}})
def input(self): """ Input items used for process stored in a dictionary. Keys are the hashes of the input parameters, values the respective InputData classes. """ # the delimiters are used by some input drivers delimiters = dict( zoom=self.init_zoom_levels, bounds=self.init_bounds, process_bounds=self.bounds) # get input items only of initialized zoom levels raw_inputs = { # convert input definition to hash get_hash(v): v for zoom in self.init_zoom_levels if "input" in self._params_at_zoom[zoom] # to preserve file groups, "flatten" the input tree and use # the tree paths as keys for key, v in _flatten_tree(self._params_at_zoom[zoom]["input"]) if v is not None } initalized_inputs = {} for k, v in six.iteritems(raw_inputs): if isinstance(v, six.string_types): # get absolute paths if not remote path = v if v.startswith( ("s3://", "https://", "http://")) else os.path.normpath( os.path.join(self.config_dir, v)) logger.debug("load input reader for file %s", v) try: reader = load_input_reader( dict( path=deepcopy(path), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=delimiters ), self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError(e) logger.debug( "input reader for file %s is %s", v, reader) # for abstract inputs elif isinstance(v, dict): logger.debug( "load input reader for abstract input %s", v) try: reader = load_input_reader( dict( abstract=deepcopy(v), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=delimiters, conf_dir=self.config_dir ), self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError(e) logger.debug( "input reader for abstract input %s is %s", v, reader) else: raise MapcheteConfigError("invalid input type %s", type(v)) # trigger bbox creation reader.bbox(out_crs=self.process_pyramid.crs) initalized_inputs[k] = reader return initalized_inputs
def input(self): """ Input items used for process stored in a dictionary. Keys are the hashes of the input parameters, values the respective InputData classes. """ # get input items only of initialized zoom levels raw_inputs = OrderedDict([ # convert input definition to hash (get_hash(v), v) for zoom in self.init_zoom_levels if "input" in self._params_at_zoom[zoom] # to preserve file groups, "flatten" the input tree and use # the tree paths as keys for key, v in _flatten_tree(self._params_at_zoom[zoom]["input"]) if v is not None ]) init_as_readonly = ( self.mode == "readonly" or ( # in case only overview levels are about to be built not len( set(self.baselevels["zooms"]).intersection( set(self.init_zoom_levels))) if self.baselevels else False)) initalized_inputs = OrderedDict() for k, v in raw_inputs.items(): # for files and tile directories if isinstance(v, str): logger.debug("load input reader for simple input %s", v) try: reader = load_input_reader(dict( path=absolute_path(path=v, base_dir=self.config_dir), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=self._delimiters), readonly=init_as_readonly) except Exception as e: logger.exception(e) raise MapcheteDriverError( "error when loading input %s: %s" % (v, e)) logger.debug("input reader for simple input %s is %s", v, reader) # for abstract inputs elif isinstance(v, dict): logger.debug("load input reader for abstract input %s", v) try: reader = load_input_reader(dict( abstract=deepcopy(v), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=self._delimiters, conf_dir=self.config_dir), readonly=init_as_readonly) except Exception as e: logger.exception(e) raise MapcheteDriverError( "error when loading input %s: %s" % (v, e)) logger.debug("input reader for abstract input %s is %s", v, reader) else: raise MapcheteConfigError("invalid input type %s", type(v)) # trigger bbox creation reader.bbox(out_crs=self.process_pyramid.crs) initalized_inputs[k] = reader return initalized_inputs
def input(self): """ Input items used for process stored in a dictionary. Keys are the hashes of the input parameters, values the respective InputData classes. If process mode is `readonly` or if only overviews are about to be built, no inputs are required and thus not initialized due to performance reasons. However, process bounds which otherwise are dependant on input bounds, may change if not explicitly provided in process configuration. """ # get input items only of initialized zoom levels raw_inputs = OrderedDict([ # convert input definition to hash (get_hash(v), v) for zoom in self.init_zoom_levels if "input" in self._params_at_zoom[zoom] # to preserve file groups, "flatten" the input tree and use # the tree paths as keys for key, v in _flatten_tree(self._params_at_zoom[zoom]["input"]) if v is not None ]) initalized_inputs = OrderedDict() if self._init_inputs: for k, v in raw_inputs.items(): # for files and tile directories if isinstance(v, str): logger.debug("load input reader for simple input %s", v) try: reader = load_input_reader( dict(path=absolute_path(path=v, base_dir=self.config_dir), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=self._delimiters), readonly=self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError( "error when loading input %s: %s" % (v, e)) logger.debug("input reader for simple input %s is %s", v, reader) # for abstract inputs elif isinstance(v, dict): logger.debug("load input reader for abstract input %s", v) try: reader = load_input_reader( dict(abstract=deepcopy(v), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=self._delimiters, conf_dir=self.config_dir), readonly=self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError( "error when loading input %s: %s" % (v, e)) logger.debug("input reader for abstract input %s is %s", v, reader) else: raise MapcheteConfigError("invalid input type %s", type(v)) # trigger bbox creation reader.bbox(out_crs=self.process_pyramid.crs) initalized_inputs[k] = reader else: for k in raw_inputs.keys(): initalized_inputs[k] = None return initalized_inputs