def populate_with_keys(self, dataset_keys: set, query=None): """Populate the dependency tree. Args: dataset_keys (set): Strings, DataIDs, DataQuerys to find dependencies for query (DataQuery): Additional filter parameters. See `satpy.readers.get_key` for more details. Returns: (Node, set): Root node of the dependency tree and a set of unknown datasets """ unknown_datasets = list() known_nodes = list() for key in dataset_keys.copy(): try: dsq = create_filtered_query(key, query) node = self._create_subtree_for_key(dsq, query) except MissingDependencies as unknown: unknown_datasets.append(unknown.missing_dependencies) else: known_nodes.append(node) self.add_child(self._root, node) for key in dataset_keys.copy(): dataset_keys.discard(key) for node in known_nodes: dataset_keys.add(node.name) if unknown_datasets: raise MissingDependencies(unknown_datasets, "Unknown datasets:")
def _create_subtree_from_compositors(self, dataset_key, query): try: node = self._find_compositor(dataset_key, query) LOG.trace("Found composite:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node and node.name)) except KeyError: LOG.trace("Composite not found: {}".format(dataset_key)) raise MissingDependencies({dataset_key}) return node
def _get_unique_matching_id(self, matching_ids, dataset_key, query): """Get unique matching id from `matching_ids`, for a given `dataset_key` and some optional `query`.""" all_ids = sum(matching_ids.values(), []) if len(all_ids) == 0: raise MissingDependencies({dataset_key}) elif len(all_ids) == 1: result = all_ids[0] else: sorted_ids, distances = dataset_key.sort_dataids_with_preference(all_ids, query) try: result = self._get_unique_id_from_sorted_ids(sorted_ids, distances) except TooManyResults: LOG.trace("Too many datasets matching key {} in readers {}".format(dataset_key, matching_ids.keys())) raise TooManyResults("Too many keys matching: {}".format(dataset_key)) except MissingDependencies: raise MissingDependencies({dataset_key}) return result
def _get_subtree_for_existing_key(self, dsq): try: node = self.getitem(dsq) LOG.trace("Found exact dataset already loaded: {}".format(node.name)) return node except KeyError: LOG.trace("Exact dataset {} isn't loaded, will try reader...".format(dsq)) raise MissingDependencies({dsq})
def _get_subtree_for_existing_name(self, dsq): try: # assume that there is no such thing as a "better" composite # version so if we find any DataIDs already loaded then # we want to use them node = self[dsq] LOG.trace("Composite already loaded:\n\tRequested: {}\n\tFound: {}".format(dsq, node.name)) return node except KeyError: # composite hasn't been loaded yet, let's load it below LOG.trace("Composite hasn't been loaded yet, will load: {}".format(dsq)) raise MissingDependencies({dsq})
def _create_required_subtrees(self, parent, prereqs, query=None): """Determine required prerequisite Nodes for a composite. Args: parent (Node): Compositor node to add these prerequisites under prereqs (sequence): Strings (names), floats (wavelengths), DataQuerys or Nodes to analyze. """ prereq_nodes, unknown_datasets = self._create_prerequisite_subtrees(parent, prereqs, query) if unknown_datasets: raise MissingDependencies(unknown_datasets) return prereq_nodes
def _create_subtree_for_key(self, dataset_key, query=None): """Find the dependencies for *dataset_key*. Args: dataset_key (str, float, DataID, DataQuery): Dataset identifier to locate and find any additional dependencies for. query (DataQuery): Additional filter parameters. See `satpy.readers.get_key` for more details. """ # 0 check if the *exact* dataset is already loaded try: node = self._get_subtree_for_existing_key(dataset_key) except MissingDependencies: # exact dataset isn't loaded, let's load it below pass else: return node # 1 try to get *best* dataset from reader try: node = self._create_subtree_from_reader(dataset_key, query) except TooManyResults: LOG.warning("Too many possible datasets to load for {}".format( dataset_key)) raise MissingDependencies({dataset_key}) except MissingDependencies: pass else: return node # 2 try to find a composite by name (any version of it is good enough) try: node = self._get_subtree_for_existing_name(dataset_key) except MissingDependencies: pass else: return node # 3 try to find a composite that matches try: node = self._create_subtree_from_compositors(dataset_key, query) except MissingDependencies: raise else: return node
def test_new_missing_dependencies(self): """Test new MissingDependencies.""" from satpy.node import MissingDependencies error = MissingDependencies('bla') assert error.missing_dependencies == 'bla'
def test_new_missing_dependencies_with_message(self): """Test new MissingDependencies with a message.""" from satpy.node import MissingDependencies error = MissingDependencies('bla', "This is a message") assert 'This is a message' in str(error)