def load(self, warn=True): # capture all error messages oldErr = _system.err _system.setErr(_pstream(_NoOutputStream())) try: jdh = self._loadFile() finally: _system.setErr(oldErr) data = asDatasetList(jdh.getList()) names = jdh.getNames() basenames = [] from os import path as _path for n in names: # remove bits of path so sanitising works if _path.exists(n): basenames.append(_path.basename(n)) else: basenames.append(n) if len(data) != len(basenames): raise io_exception, "Number of names does not match number of datasets" metadata = None if self.load_metadata: meta = jdh.getMetadata() if meta: mnames = meta.metaNames if mnames: metadata = [(k, meta.getMetaValue(k)) for k in mnames] return DataHolder(zip(basenames, data), metadata, warn)
def load(self): # capture all error messages oldErr = _system.err _system.setErr(_pstream(_NoOutputStream())) try: jdh = self.loadFile() finally: _system.setErr(oldErr) data = asDatasetList(jdh.getList()) names = jdh.getNames() basenames = [] from os import path as _path for n in names: # remove bits of path so sanitising works if _path.exists(n): basenames.append(_path.basename(n)) else: basenames.append(n) if len(data) != len(basenames): raise io_exception, "Number of names does not match number of datasets" metadata = None if self.load_metadata: meta = jdh.getMetadata() if meta: mnames = meta.metaNames if mnames: metadata = [ (k, meta.getMetaValue(k)) for k in mnames ] return DataHolder(zip(basenames, data), metadata)
def __getitem__(self, key): '''Return a list of datasets in tree whose names match given key''' return asDatasetList(self.grp.getDatasets(key))