def __call__(self, parser, namespace, values, option_string=None): path_ = Path(values) if not path_.exists(): raise argparse.ArgumentError(self, 'path does not exist') if not path_.is_dir(): raise argparse.ArgumentError(self, 'path is no directory') setattr(namespace, self.dest, path_)
def __call__(self, parser, namespace, values, option_string=None): path_ = Path(values) if not path_.exists(): raise argparse.ArgumentError(self, "path does not exist") if not path_.is_dir(): raise argparse.ArgumentError(self, "path is no directory") setattr(namespace, self.dest, path_)
def create(self, path, metadata, filter_=filter_hidden, object_class=None): """ Create objects in CDSTAR and register them in the catalog. Note that we guess the mimetype based on the filename extension, using `mimetypes.guess_type`. Thus, it is the caller's responsibility to add custom or otherwise uncommon types to the list of known types using `mimetypes.add_type`. :param path: :param metadata: :param filter_: :return: """ path = Path(path) if path.is_file(): fnames = [path] elif path.is_dir(): fnames = list(walk(path, mode='files')) else: raise ValueError( 'path must be a file or directory') # pragma: no cover for fname in fnames: if not filter_ or filter_(fname): created, obj = self._create(fname, metadata, object_class=object_class) yield fname, created, obj
def in_dir(cls, d, empty_tables=False): fname = Path(d) if not fname.exists(): fname.mkdir() assert fname.is_dir() res = cls.from_metadata(fname) if empty_tables: del res.tables[:] return res
def from_metadata(cls, fname): fname = Path(fname) if fname.is_dir(): name = '{0}{1}'.format(cls.__name__, MD_SUFFIX) tablegroup = TableGroup.from_file(pkg_path('modules', name)) # adapt the path of the metadata file such that paths to tables are resolved # correctly: tablegroup._fname = fname.joinpath(name) else: tablegroup = TableGroup.from_file(fname) for mod in get_modules(): if mod.match(tablegroup): return mod.cls(tablegroup) return cls(tablegroup)
def datasets(args): """ cldf datasets <DIR> [ATTRS] List all CLDF datasets in directory <DIR> """ if len(args.args) < 1: raise ParserError('not enough arguments') d = Path(args.args[0]) if not d.exists() or not d.is_dir(): raise ParserError('%s is not an existing directory' % d) for fname in sorted(d.glob('*' + MD_SUFFIX), key=lambda p: p.name): md = Metadata(load(fname)) data = fname.parent.joinpath( md.get_table().url or fname.name[:-len(MD_SUFFIX)]) if data.exists(): print(data) if len(args.args) > 1: maxlen = max(len(a) for a in args.args[1:]) for attr in args.args[1:]: if md.get(attr): print(' %s %s' % ((attr + ':').ljust(maxlen + 1), md[attr]))
def from_metadata(cls, fname): fname = Path(fname) if fname.is_dir(): name = '{0}{1}'.format(cls.__name__, MD_SUFFIX) tablegroup = TableGroup.from_file(pkg_path('modules', name)) # adapt the path of the metadata file such that paths to tables are resolved # correctly: tablegroup._fname = fname.joinpath(name) else: tablegroup = TableGroup.from_file(fname) comps = Counter() for table in tablegroup.tables: try: comps.update([Dataset.get_tabletype(table)]) except ValueError: pass if comps and comps.most_common(1)[0][1] > 1: raise ValueError('{0}: duplicate components!'.format(fname)) for mod in get_modules(): if mod.match(tablegroup): return mod.cls(tablegroup) return cls(tablegroup)