def list_repository(index): """List repository index content.""" # Read the index from the optional file or Url. By default, the index that # is specified in the environment is loaded. loader = DictLoader( util.read_index(index)) if index is not None else UrlLoader() datasets = RepositoryManager(doc=loader.load()).find() headers = ['Identifier', 'Name', 'Description'] data = list() # Maintain the maximum with for each columns. widths = [len(h) + 1 for h in headers] # Sort datasets by name before output. for dataset in sorted(datasets, key=lambda d: d.name): desc = dataset.description if dataset.description is not None else '' row = [dataset.identifier, dataset.name, desc] for i in range(len(row)): w = len(row[i]) + 1 if w > widths[i]: widths[i] = w data.append(row) tp.table(data, headers=headers, width=widths, style='grid', out=util.TPrinter())
def list_datasets(basedir, db, index): """List local store content.""" # Read the index of given. loader = DictLoader( util.read_index(index)) if index is not None else UrlLoader() store = RefStore(basedir=basedir, loader=loader, connect_url=db) datasets = store.list() headers = ['Name', 'Size', 'Downloaded', 'Package'] data = list() # Maintain the maximum with for each columns. widths = [len(h) + 1 for h in headers] # Sort datasets by name before output. for dataset in sorted(datasets, key=lambda d: d.name): row = [ dataset.identifier, '{:.2a}'.format(DataSize(dataset.filesize)), ' '.join(dataset.created_at.isoformat()[:19].split('T')), '{} {}'.format(dataset.package_name, dataset.package_version) ] for i in range(len(row)): w = len(row[i]) + 1 if w > widths[i]: widths[i] = w data.append(row) tp.table(data, headers=headers, width=widths, style='grid', out=util.TPrinter())
def download_dataset(basedir, db, index, key): """List local store content.""" # Read the index of given. loader = DictLoader( util.read_index(index)) if index is not None else UrlLoader() store = RefStore(basedir=basedir, loader=loader, connect_url=db) store.download(key)
def show_dataset(basedir, db, index, raw, key): """Show descriptor for downloaded dataset.""" # Read the index of given. loader = DictLoader( util.read_index(index)) if index is not None else UrlLoader() store = RefStore(basedir=basedir, loader=loader, connect_url=db) util.print_dataset(dataset=store.load(key), raw=raw)
def show_dataset(index, raw, key): """Show dataset descriptor from repository index.""" # Read the index from the optional file or Url. By default, the index that # is specified in the environment is loaded. loader = DictLoader( util.read_index(index)) if index is not None else UrlLoader() util.print_dataset(dataset=RepositoryManager(doc=loader.load()).get(key), raw=raw)
def remove_dataset(basedir, db, index, force, key): """Remove dataset from local store.""" # Confirm that the user wants to remove the dataset from the local store. if not force: # pragma: no cover msg = "Do you really want to remove dataset '{}'".format(key) click.confirm(msg, default=True, abort=True) # Read the index of given. loader = DictLoader( util.read_index(index)) if index is not None else UrlLoader() store = RefStore(basedir=basedir, loader=loader, connect_url=db) store.remove(key)
def validate_index_file(file): """Validate repository index file.""" validate(doc=util.read_index(file)) click.echo('Document is valid.')
def test_read_remote_index(mock_response): doc = read_index(filename='index.json') assert doc is not None