def save_fields(fname, flds, **kwargs): """ save a list of fields, filetype is inferred from fname """ ftype = VFile.detect_type(fname, mode='w', prefer=kwargs.pop('prefer', None)) ftype.save_fields(fname, flds, **kwargs)
def save_grid(fname, grd, **kwargs): """ save a grid, filetype is inferred from fname """ ftype = VFile.detect_type(fname, mode='w', prefer=kwargs.pop('prefer', None)) ftype.save_grid(fname, grd, **kwargs)
def save_fields(fname, flds, **kwargs): """ save a list of fields, filetype is inferred from fname """ ftype = VFile.detect_type(fname, mode='w') ftype.save_fields(fname, flds, **kwargs)
def save_field(fname, fld, **kwargs): """ save a field, filetype is inferred from fname""" ftype = VFile.detect_type(fname) ftype.save_field(fname, fld, **kwargs)
def save_grid(fname, grd, **kwargs): """ save a grid, filetype is inferred from fname """ ftype = VFile.detect_type(fname) ftype.save_grid(fname, grd, **kwargs)
def load_files(self, fnames, index_handle=True, file_type=None, _add_ref=False, **kwargs): """Load files, and add them to the bucket Initialize obj before it's put into the list, whatever is returned is what gets stored, returning None means object init failed, do not add to the _objs list Parameters: fnames: a list of file names (can cantain glob patterns) index_handle: ?? file_type: a class that is a subclass of VFile, if given, use this file type, don't use the autodetect mechanism kwargs: passed to file constructor Returns: A list of VFile instances. The length may not be the same as the length of fnames, and the order may not be the same in order to accomidate globs and file grouping. """ orig_fnames = fnames if not isinstance(fnames, (list, tuple)): fnames = [fnames] file_lst = [] # glob and convert to absolute paths globbed_fnames = [] for fname in fnames: slglob = slice_globbed_filenames(fname) if isinstance(slglob, string_types): slglob = [slglob] globbed_fnames += slglob # print(">>", fname) # print("==", globbed_fnames) # expanded_fname = os.path.expanduser(os.path.expandvars(fname)) # absfname = os.path.abspath(expanded_fname) # if '*' in absfname or '?' in absfname: # globbed_fnames += glob(absfname) # else: # globbed_fnames += [absfname] # Is it necessary to recall abspath here? We did it before # the glob to make sure it didn't start with a '.' since that # tells glob not to fill wildcards fnames = globbed_fnames # detect file types types_detected = OrderedDict() for i, fname in enumerate(fnames): _ftype = None if file_type is None: _ftype = VFile.detect_type(fname) else: _ftype = file_type if not _ftype: raise RuntimeError("Can't determine type " "for {0}".format(fname)) value = (fname, i) try: types_detected[_ftype].append(value) except KeyError: types_detected[_ftype] = [value] # see if the file's already been loaded, or load it, and add it # to the bucket and all that good stuff file_lst = [] for ftype, vals in types_detected.items(): names = [v[0] for v in vals] # group all file names of a given type groups = ftype.group_fnames(names) # iterate all the groups and add them for group in groups: f = None handle_name = ftype.collective_name(group) try: f = self[handle_name] except KeyError: try: f = ftype(group, parent_bucket=self, **kwargs) f.handle_name = handle_name except IOError as e: s = " IOError on file: {0}\n".format(handle_name) s += " File Type: {0}\n".format(handle_name) s += " {0}".format(str(e)) logger.warn(s) except ValueError as e: # ... why am i explicitly catching ValueErrors? # i'm probably breaking something by re-raising # this exception, but i didn't document what :( s = " ValueError on file load: {0}\n".format(handle_name) s += " File Type: {0}\n".format(handle_name) s += " {0}".format(str(e)) logger.warn(s) # re-raise the last expection raise self.set_item([handle_name], f, index_handle=index_handle, _add_ref=_add_ref) file_lst.append(f) if len(file_lst) == 0: logger.warn("No files loaded for '{0}', is the path " "correct?".format(orig_fnames)) return file_lst
def load_files(self, fnames, index_handle=True, file_type=None, prefer=None, force_reload=False, _add_ref=False, **kwargs): """Load files, and add them to the bucket Initialize obj before it's put into the list, whatever is returned is what gets stored, returning None means object init failed, do not add to the _objs list Parameters: fnames: a list of file names (can cantain glob patterns) index_handle: ?? file_type: a class that is a subclass of VFile, if given, use this file type, don't use the autodetect mechanism kwargs: passed to file constructor Returns: A list of VFile instances. The length may not be the same as the length of fnames, and the order may not be the same in order to accomidate globs and file grouping. """ orig_fnames = fnames if not isinstance(fnames, (list, tuple)): fnames = [fnames] file_lst = [] # glob and convert to absolute paths globbed_fnames = [] for fname in fnames: slglob = slice_globbed_filenames(fname) if isinstance(slglob, string_types): slglob = [slglob] globbed_fnames += slglob # print(">>", fname) # print("==", globbed_fnames) # expanded_fname = os.path.expanduser(os.path.expandvars(fname)) # absfname = os.path.abspath(expanded_fname) # if '*' in absfname or '?' in absfname: # globbed_fnames += glob(absfname) # else: # globbed_fnames += [absfname] # Is it necessary to recall abspath here? We did it before # the glob to make sure it didn't start with a '.' since that # tells glob not to fill wildcards fnames = globbed_fnames # detect file types types_detected = OrderedDict() for i, fname in enumerate(fnames): _ftype = None if file_type is None: _ftype = VFile.detect_type(fname, prefer=prefer) elif isinstance(file_type, string_types): _ftype = VFile.resolve_type(file_type) else: _ftype = file_type if not _ftype: raise RuntimeError("Can't determine type " "for {0}".format(fname)) value = (fname, i) try: types_detected[_ftype].append(value) except KeyError: types_detected[_ftype] = [value] # see if the file's already been loaded, or load it, and add it # to the bucket and all that good stuff file_lst = [] for ftype, vals in types_detected.items(): names = [v[0] for v in vals] # group all file names of a given type groups = ftype.group_fnames(names) # iterate all the groups and add them for group in groups: f = None handle_name = ftype.collective_name(group) try: f = self[handle_name] if force_reload: f.reload() except KeyError: try: f = ftype(group, parent_bucket=self, **kwargs) f.handle_name = handle_name except IOError as e: s = " IOError on file: {0}\n".format(handle_name) s += " File Type: {0}\n".format( handle_name) s += " {0}".format(str(e)) logger.warning(s) except ValueError as e: # ... why am i explicitly catching ValueErrors? # i'm probably breaking something by re-raising # this exception, but i didn't document what :( s = " ValueError on file load: {0}\n".format( handle_name) s += " File Type: {0}\n".format( handle_name) s += " {0}".format(str(e)) logger.warning(s) # re-raise the last expection raise self.set_item([handle_name], f, index_handle=index_handle, _add_ref=_add_ref) file_lst.append(f) if len(file_lst) == 0: logger.warning("No files loaded for '{0}', is the path " "correct?".format(orig_fnames)) return file_lst