def cmd_unfold_pipe(args): arc = args.tote out = sys.stdout conn = tote.connect(arc) with conn.read_file(arc) as items_in: with conn.write_stream(out) as items_out: items_out.writeall(items_in)
def cmd_import_blobs(args): conn = tote.connect() for f in args.file: print(f, '...') p = Path(f) with open(p, 'rb') as i: b = i.read() conn.store.save(b)
def cmd_fold_pipe(args): arc = args.tote out = sys.stdout.buffer conn = tote.connect() with conn.read_file(arc, unfold=False) as i: with conn.write_stream(out) as o: f = conn.fold(i) o.writeall(f)
def cmd_put(args): conn = tote.connect() out = conn.write_stream(sys.stdout) if args.path: paths = [Path(path) for path in args.path] for path in tote.list_trees(paths, recurse=args.recursive): out.write(conn.put_file(path)) else: out.write(conn.put_stream(sys.stdin.buffer))
def cmd_list(args): arc = args.tote files = args.file conn = tote.connect(arc) with conn.read_file(arc) as items: if files: items = tote._filter_items_by_names(items, files) for item in items: print(item.type, item.size, item.name)
def cmd_append(args): arc = args.tote files = args.file recursive = args.recursive u = sys.stdout conn = tote.connect() with conn.append_file(arc) as o: for file in tote.list_trees(files, recurse=recursive): print('append', file, file=u) f = conn.put_file(file) o.write(f)
def cmd_status(args): conn = tote.connect() last_checkin = conn._most_recent_checkin() tote.tote_update( arc=last_checkin, paths=[conn.workdir_path], relative_to=conn.workdir_path, base_path=conn.workdir_path, conn=conn, dryrun=True, )
def cmd_unfold(args): arc = args.tote conn = tote.connect(arc) with conn.read_file(arc) as items_in: with conn.write_file(arc + '.part') as items_out: items_out.writeall(items_in) with conn.append_file(arc + '.history') as items_out: items_out.write(conn.put_file(arc)) os.rename(arc + '.part', arc)
def cmd_extract(args): '''extract files from archive''' arc = args.tote files = args.file to = args.to conn = tote.connect(arc) with conn.read_file(arc) as items_in: if files: items_in = tote._filter_items_by_names(items_in, files) for item in items_in: print(item.name) conn.get_file(item, out_base=to)
def cmd_cat(args): conn = tote.connect() out = sys.stdout.buffer if args.tote: for file in args.tote: with conn.read_file(file) as items_in: for item in items_in: for chunk in conn.get_chunks(item): out.write(chunk) else: for item in conn.read_stream(sys.stdin): for chunk in conn.get_chunks(item): out.write(chunk)
def cmd_checkin(args): conn = tote.connect() arc_output = conn.tote_path / 'checkin' / 'default' / ( tote.format_timestamp(safe=True) + '.tote') arc_output.parent.mkdir(parents=True, exist_ok=True) last_checkin = conn._most_recent_checkin() tote.tote_update( arc=last_checkin, arc_output=arc_output, paths=[conn.workdir_path], relative_to=conn.workdir_path, base_path=conn.workdir_path, conn=conn, )
def cmd_blob_cat(args): conn = tote.connect() blob = conn.store.load_blob(args.data) sys.stdout.buffer.write(blob)
def cmd_show_workdir(args): conn = tote.connect(args.path) print('workdir_path =', conn.workdir_path) print('store_path =', conn.store_path) print('store =', conn.store)