def do_init(self, args): """Initialize an empty repository""" print('Initializing repository at "%s"' % args.repository.orig) repository = self.open_repository(args.repository, create=True) key = key_creator(repository, args) manifest = Manifest(key, repository) manifest.key = key manifest.write() repository.commit() return self.exit_code
def do_init(self, args): """Initialize an empty repository""" print('Initializing repository at "%s"' % args.repository.orig) repository = self.open_repository(args.repository, create=True, exclusive=True) key = key_creator(repository, args) manifest = Manifest(key, repository) manifest.key = key manifest.write() repository.commit() return self.exit_code
def do_init(self, args): """Initialize an empty repository""" print('Initializing repository at "%s"' % args.repository.orig) repository = self.open_repository(args.repository, create=True, exclusive=True) key = key_creator(repository, args) manifest = Manifest(key, repository) manifest.key = key manifest.write() repository.commit() Cache(repository, key, manifest, warn_if_unencrypted=False) return self.exit_code
def do_mount(self, args): """Mount archive or an entire repository as a FUSE fileystem""" try: from attic.fuse import AtticOperations except ImportError: self.print_error( 'the "llfuse" module is required to use this feature') return self.exit_code if not os.path.isdir(args.mountpoint) or not os.access( args.mountpoint, os.R_OK | os.W_OK | os.X_OK): self.print_error('%s: Mountpoint must be a writable directory' % args.mountpoint) return self.exit_code repository = self.open_repository(args.src) manifest, key = Manifest.load(repository) if args.src.archive: archive = Archive(repository, key, manifest, args.src.archive) else: archive = None operations = AtticOperations(key, repository, manifest, archive) self.print_verbose("Mounting filesystem") try: operations.mount(args.mountpoint, args.options, args.foreground) except RuntimeError: # Relevant error message already printed to stderr by fuse self.exit_code = 1 return self.exit_code
def do_list(self, args): """List archive or repository contents""" repository = self.open_repository(args.src) manifest, key = Manifest.load(repository) if args.src.archive: tmap = {1: 'p', 2: 'c', 4: 'd', 6: 'b', 0o10: '-', 0o12: 'l', 0o14: 's'} archive = Archive(repository, key, manifest, args.src.archive) for item in archive.iter_items(): type = tmap.get(item[b'mode'] // 4096, '?') mode = format_file_mode(item[b'mode']) size = 0 if type == '-': try: size = sum(size for _, size, _ in item[b'chunks']) except KeyError: pass mtime = format_time(datetime.fromtimestamp(bigint_to_int(item[b'mtime']) / 1e9)) if b'source' in item: if type == 'l': extra = ' -> %s' % item[b'source'] else: type = 'h' extra = ' link to %s' % item[b'source'] else: extra = '' print('%s%s %-6s %-6s %8d %s %s%s' % (type, mode, item[b'user'] or item[b'uid'], item[b'group'] or item[b'gid'], size, mtime, remove_surrogates(item[b'path']), extra)) else: for archive in sorted(Archive.list_archives(repository, key, manifest), key=attrgetter('ts')): print(format_archive(archive)) return self.exit_code
def do_change_passphrase(self, args): """Change repository key file passphrase """ repository = self.open_repository(args.repository) manifest, key = Manifest.load(repository) key.change_passphrase() return self.exit_code
def rebuild_manifest(self): """Rebuild the manifest object if it is missing Iterates through all objects in the repository looking for archive metadata blocks. """ self.report_progress( 'Rebuilding missing manifest, this might take some time...', error=True) manifest = Manifest(self.key, self.repository) for chunk_id, _ in self.chunks.iteritems(): cdata = self.repository.get(chunk_id) data = self.key.decrypt(chunk_id, cdata) # Some basic sanity checks of the payload before feeding it into msgpack if len(data) < 2 or ((data[0] & 0xf0) != 0x80) or ( (data[1] & 0xe0) != 0xa0): continue if not b'cmdline' in data or not b'\xa7version\x01' in data: continue try: archive = msgpack.unpackb(data) except: continue if isinstance( archive, dict) and b'items' in archive and b'cmdline' in archive: self.report_progress('Found archive ' + archive[b'name'].decode('utf-8'), error=True) manifest.archives[archive[b'name'].decode('utf-8')] = { b'id': chunk_id, b'time': archive[b'time'] } self.report_progress('Manifest rebuild complete', error=True) return manifest
def do_mount(self, args): """Mount archive or an entire repository as a FUSE fileystem""" try: from attic.fuse import AtticOperations except ImportError: self.print_error('the "llfuse" module is required to use this feature') return self.exit_code if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK): self.print_error('%s: Mountpoint must be a writable directory' % args.mountpoint) return self.exit_code repository = self.open_repository(args.src) manifest, key = Manifest.load(repository) if args.src.archive: archive = Archive(repository, key, manifest, args.src.archive) else: archive = None operations = AtticOperations(key, repository, manifest, archive) self.print_verbose("Mounting filesystem") try: operations.mount(args.mountpoint, args.options, args.foreground) except RuntimeError: # Relevant error message already printed to stderr by fuse self.exit_code = 1 return self.exit_code
def do_extract(self, args): """Extract archive contents """ repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) archive = Archive(repository, key, manifest, args.archive.archive, numeric_owner=args.numeric_owner) patterns = adjust_patterns(args.paths, args.excludes) dirs = [] for item, peek in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns)): while dirs and not item[b'path'].startswith(dirs[-1][b'path']): archive.extract_item(dirs.pop(-1)) self.print_verbose(remove_surrogates(item[b'path'])) try: if stat.S_ISDIR(item[b'mode']): dirs.append(item) archive.extract_item(item, restore_attrs=False) else: archive.extract_item(item, peek=peek) except IOError as e: self.print_error('%s: %s', remove_surrogates(item[b'path']), e) while dirs: archive.extract_item(dirs.pop(-1)) return self.exit_code
def do_verify(self, args): """Verify archive consistency """ repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) archive = Archive(repository, key, manifest, args.archive.archive) patterns = adjust_patterns(args.paths, args.excludes) def start_cb(item): self.print_verbose('%s ...', remove_surrogates(item[b'path']), newline=False) def result_cb(item, success): if success: self.print_verbose('OK') else: self.print_verbose('ERROR') self.print_error('%s: verification failed' % remove_surrogates(item[b'path'])) for item, peek in archive.iter_items( lambda item: not exclude_path(item[b'path'], patterns)): if stat.S_ISREG(item[b'mode']) and b'chunks' in item: archive.verify_file(item, start_cb, result_cb, peek=peek) return self.exit_code
def do_extract(self, args): """Extract archive contents """ repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) archive = Archive(repository, key, manifest, args.archive.archive, numeric_owner=args.numeric_owner) patterns = adjust_patterns(args.paths, args.excludes) dirs = [] for item, peek in archive.iter_items( lambda item: not exclude_path(item[b'path'], patterns)): while dirs and not item[b'path'].startswith(dirs[-1][b'path']): archive.extract_item(dirs.pop(-1)) self.print_verbose(remove_surrogates(item[b'path'])) try: if stat.S_ISDIR(item[b'mode']): dirs.append(item) archive.extract_item(item, restore_attrs=False) else: archive.extract_item(item, peek=peek) except IOError as e: self.print_error('%s: %s', remove_surrogates(item[b'path']), e) while dirs: archive.extract_item(dirs.pop(-1)) return self.exit_code
def do_prune(self, args): """Prune repository archives according to specified rules """ repository = self.open_repository(args.repository) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest) archives = list(sorted(Archive.list_archives(repository, key, manifest, cache), key=attrgetter('ts'), reverse=True)) if args.hourly + args.daily + args.weekly + args.monthly + args.yearly == 0: self.print_error('At least one of the "hourly", "daily", "weekly", "monthly" or "yearly" ' 'settings must be specified') return 1 if args.prefix: archives = [archive for archive in archives if archive.name.startswith(args.prefix)] keep = [] if args.hourly: keep += prune_split(archives, '%Y-%m-%d %H', args.hourly) if args.daily: keep += prune_split(archives, '%Y-%m-%d', args.daily, keep) if args.weekly: keep += prune_split(archives, '%G-%V', args.weekly, keep) if args.monthly: keep += prune_split(archives, '%Y-%m', args.monthly, keep) if args.yearly: keep += prune_split(archives, '%Y', args.yearly, keep) keep.sort(key=attrgetter('ts'), reverse=True) to_delete = [a for a in archives if a not in keep] for archive in keep: self.print_verbose('Keeping archive "%s"' % archive.name) for archive in to_delete: self.print_verbose('Pruning archive "%s"', archive.name) archive.delete(cache) return self.exit_code
def do_create(self, args): """Create new archive""" t0 = datetime.now() repository = self.open_repository(args.archive, exclusive=True) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest, do_files=args.cache_files) archive = Archive(repository, key, manifest, args.archive.archive, cache=cache, create=True, checkpoint_interval=args.checkpoint_interval, numeric_owner=args.numeric_owner, progress=args.progress) # Add cache dir to inode_skip list skip_inodes = set() try: st = os.stat(get_cache_dir()) skip_inodes.add((st.st_ino, st.st_dev)) except IOError: pass # Add local repository dir to inode_skip list if not args.archive.host: try: st = os.stat(args.archive.path) skip_inodes.add((st.st_ino, st.st_dev)) except IOError: pass for path in args.paths: if path == '-': # stdin path = 'stdin' self.print_verbose(path) try: archive.process_stdin(path, cache) except IOError as e: self.print_error('%s: %s', path, e) continue path = os.path.normpath(path) if args.dontcross: try: restrict_dev = os.lstat(path).st_dev except OSError as e: self.print_error('%s: %s', path, e) continue else: restrict_dev = None self._process(archive, cache, args.excludes, args.exclude_caches, skip_inodes, path, restrict_dev) archive.save(timestamp=args.timestamp) if args.progress: archive.stats.show_progress(final=True) if args.stats: t = datetime.now() diff = t - t0 print('-' * 78) print('Archive name: %s' % args.archive.archive) print('Archive fingerprint: %s' % hexlify(archive.id).decode('ascii')) print('Start time: %s' % t0.strftime('%c')) print('End time: %s' % t.strftime('%c')) print('Duration: %s' % format_timedelta(diff)) print('Number of files: %d' % archive.stats.nfiles) archive.stats.print_('This archive:', cache) print('-' * 78) return self.exit_code
def do_delete(self, args): """Delete archive """ repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest) archive = Archive(repository, key, manifest, args.archive.archive, cache=cache) archive.delete(cache) return self.exit_code
def do_create(self, args): """Create new archive""" t0 = datetime.now() repository = self.open_repository(args.archive, exclusive=True) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest) archive = Archive(repository, key, manifest, args.archive.archive, cache=cache, create=True, checkpoint_interval=args.checkpoint_interval, numeric_owner=args.numeric_owner) # Add Attic cache dir to inode_skip list skip_inodes = set() try: st = os.stat(get_cache_dir()) skip_inodes.add((st.st_ino, st.st_dev)) except IOError: pass # Add local repository dir to inode_skip list if not args.archive.host: try: st = os.stat(args.archive.path) skip_inodes.add((st.st_ino, st.st_dev)) except IOError: pass for path in args.paths: path = os.path.normpath(path) if args.dontcross: try: restrict_dev = os.lstat(path).st_dev except OSError as e: self.print_error('%s: %s', path, e) continue else: restrict_dev = None excludes = adjust_exclude_patterns(path, args.excludes) self._process(archive, cache, excludes, args.exclude_caches, skip_inodes, path, restrict_dev) archive.save() if args.stats: t = datetime.now() diff = t - t0 print('-' * 78) print('Archive name: %s' % args.archive.archive) print('Archive fingerprint: %s' % hexlify(archive.id).decode('ascii')) print('Start time: %s' % t0.strftime('%c')) print('End time: %s' % t.strftime('%c')) print('Duration: %s' % format_timedelta(diff)) print('Number of files: %d' % archive.stats.nfiles) archive.stats.print_('This archive:', cache) print('-' * 78) return self.exit_code
def do_rename(self, args): """Rename an existing archive""" repository = self.open_repository(args.archive, exclusive=True) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest) archive = Archive(repository, key, manifest, args.archive.archive, cache=cache) archive.rename(args.name) manifest.write() repository.commit() cache.commit() return self.exit_code
def do_prune(self, args): """Prune repository archives according to specified rules""" repository = self.open_repository(args.repository, exclusive=True) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest) archives = list( sorted(Archive.list_archives(repository, key, manifest, cache), key=attrgetter('ts'), reverse=True)) if args.hourly + args.daily + args.weekly + args.monthly + args.yearly == 0 and args.within is None: self.print_error( 'At least one of the "within", "hourly", "daily", "weekly", "monthly" or "yearly" ' 'settings must be specified') return 1 if args.prefix: archives = [ archive for archive in archives if archive.name.startswith(args.prefix) ] keep = [] if args.within: keep += prune_within(archives, args.within) if args.hourly: keep += prune_split(archives, '%Y-%m-%d %H', args.hourly, keep) if args.daily: keep += prune_split(archives, '%Y-%m-%d', args.daily, keep) if args.weekly: keep += prune_split(archives, '%G-%V', args.weekly, keep) if args.monthly: keep += prune_split(archives, '%Y-%m', args.monthly, keep) if args.yearly: keep += prune_split(archives, '%Y', args.yearly, keep) keep.sort(key=attrgetter('ts'), reverse=True) to_delete = [a for a in archives if a not in keep] stats = Statistics() for archive in keep: self.print_verbose('Keeping archive: %s' % format_archive(archive)) for archive in to_delete: if args.dry_run: self.print_verbose('Would prune: %s' % format_archive(archive)) else: self.print_verbose('Pruning archive: %s' % format_archive(archive)) archive.delete(stats) if to_delete and not args.dry_run: manifest.write() repository.commit() cache.commit() if args.stats: stats.print_('Deleted data:', cache) return self.exit_code
def do_delete(self, args): """Delete an existing archive""" repository = self.open_repository(args.archive, exclusive=True) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest) archive = Archive(repository, key, manifest, args.archive.archive, cache=cache) stats = Statistics() archive.delete(stats) manifest.write() repository.commit() cache.commit() if args.stats: stats.print_('Deleted data:', cache) return self.exit_code
def check(self, repository, repair=False): self.report_progress('Starting archive consistency check...') self.repair = repair self.repository = repository self.init_chunks() self.key = self.identify_key(repository) if not Manifest.MANIFEST_ID in self.chunks: self.manifest = self.rebuild_manifest() else: self.manifest, _ = Manifest.load(repository, key=self.key) self.rebuild_refcounts() self.verify_chunks() if not self.error_found: self.report_progress('Archive consistency check complete, no problems found.') return self.repair or not self.error_found
def do_create(self, args): """Create new archive """ t0 = datetime.now() repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest) archive = Archive(repository, key, manifest, args.archive.archive, cache=cache, create=True, checkpoint_interval=args.checkpoint_interval, numeric_owner=args.numeric_owner) # Add Attic cache dir to inode_skip list skip_inodes = set() try: st = os.stat(get_cache_dir()) skip_inodes.add((st.st_ino, st.st_dev)) except IOError: pass # Add local repository dir to inode_skip list if not args.archive.host: try: st = os.stat(args.archive.path) skip_inodes.add((st.st_ino, st.st_dev)) except IOError: pass for path in args.paths: path = os.path.normpath(path) if args.dontcross: try: restrict_dev = os.lstat(path).st_dev except OSError as e: self.print_error('%s: %s', path, e) continue else: restrict_dev = None self._process(archive, cache, args.excludes, skip_inodes, path, restrict_dev) archive.save() if args.stats: t = datetime.now() diff = t - t0 print('-' * 40) print('Archive name: %s' % args.archive.archive) print('Archive fingerprint: %s' % hexlify(archive.id).decode('ascii')) print('Start time: %s' % t0.strftime('%c')) print('End time: %s' % t.strftime('%c')) print('Duration: %s' % format_timedelta(diff)) archive.stats.print_() print('-' * 40) return self.exit_code
def do_extract(self, args): """Extract archive contents""" # be restrictive when restoring files, restore permissions later if sys.getfilesystemencoding() == 'ascii': print( 'Warning: File system encoding is "ascii", extracting non-ascii filenames will not be supported.' ) os.umask(0o077) repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) archive = Archive(repository, key, manifest, args.archive.archive, numeric_owner=args.numeric_owner) patterns = adjust_include_patterns(args.paths, args.excludes) dry_run = args.dry_run strip_components = args.strip_components dirs = [] for item in archive.iter_items( lambda item: not exclude_path(item[b'path'], patterns), preload=True): orig_path = item[b'path'] if strip_components: item[b'path'] = os.sep.join( orig_path.split(os.sep)[strip_components:]) if not item[b'path']: continue if not args.dry_run: while dirs and not item[b'path'].startswith(dirs[-1][b'path']): archive.extract_item(dirs.pop(-1)) self.print_verbose(remove_surrogates(orig_path)) try: if dry_run: archive.extract_item(item, dry_run=True) else: if stat.S_ISDIR(item[b'mode']): dirs.append(item) archive.extract_item(item, restore_attrs=False) else: archive.extract_item(item) except IOError as e: self.print_error('%s: %s', remove_surrogates(orig_path), e) if not args.dry_run: while dirs: archive.extract_item(dirs.pop(-1)) return self.exit_code
def do_info(self, args): """Show archive details such as disk space used""" repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest) archive = Archive(repository, key, manifest, args.archive.archive, cache=cache) stats = archive.calc_stats(cache) print('Name:', archive.name) print('Fingerprint: %s' % hexlify(archive.id).decode('ascii')) print('Hostname:', archive.metadata[b'hostname']) print('Username:'******'username']) print('Time: %s' % to_localtime(archive.ts).strftime('%c')) print('Command line:', remove_surrogates(' '.join(archive.metadata[b'cmdline']))) print('Number of files: %d' % stats.nfiles) stats.print_('This archive:', cache) return self.exit_code
def do_prune(self, args): """Prune repository archives according to specified rules """ repository = self.open_repository(args.repository) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest) archives = list(sorted(Archive.list_archives(repository, key, manifest, cache), key=attrgetter('ts'), reverse=True)) if args.hourly + args.daily + args.weekly + args.monthly + args.yearly == 0 and args.within is None: self.print_error('At least one of the "within", "hourly", "daily", "weekly", "monthly" or "yearly" ' 'settings must be specified') return 1 if args.prefix: archives = [archive for archive in archives if archive.name.startswith(args.prefix)] keep = [] if args.within: keep += prune_within(archives, args.within) if args.hourly: keep += prune_split(archives, '%Y-%m-%d %H', args.hourly, keep) if args.daily: keep += prune_split(archives, '%Y-%m-%d', args.daily, keep) if args.weekly: keep += prune_split(archives, '%G-%V', args.weekly, keep) if args.monthly: keep += prune_split(archives, '%Y-%m', args.monthly, keep) if args.yearly: keep += prune_split(archives, '%Y', args.yearly, keep) keep.sort(key=attrgetter('ts'), reverse=True) to_delete = [a for a in archives if a not in keep] stats = Statistics() for archive in keep: self.print_verbose('Keeping archive: %s' % format_archive(archive)) for archive in to_delete: if args.dry_run: self.print_verbose('Would prune: %s' % format_archive(archive)) else: self.print_verbose('Pruning archive: %s' % format_archive(archive)) archive.delete(stats) if to_delete and not args.dry_run: manifest.write() repository.commit() cache.commit() if args.stats: stats.print_('Deleted data:', cache) return self.exit_code
def do_list(self, args): """List archive or repository contents """ repository = self.open_repository(args.src) manifest, key = Manifest.load(repository) if args.src.archive: tmap = { 1: 'p', 2: 'c', 4: 'd', 6: 'b', 0o10: '-', 0o12: 'l', 0o14: 's' } archive = Archive(repository, key, manifest, args.src.archive) for item, _ in archive.iter_items(): type = tmap.get(item[b'mode'] // 4096, '?') mode = format_file_mode(item[b'mode']) size = 0 if type == '-': try: size = sum(size for _, size, _ in item[b'chunks']) except KeyError: pass mtime = format_time( datetime.fromtimestamp(item[b'mtime'] / 10**9)) if b'source' in item: if type == 'l': extra = ' -> %s' % item[b'source'] else: type = 'h' extra = ' link to %s' % item[b'source'] else: extra = '' print('%s%s %-6s %-6s %8d %s %s%s' % (type, mode, item[b'user'] or item[b'uid'], item[b'group'] or item[b'gid'], size, mtime, remove_surrogates(item[b'path']), extra)) else: for archive in sorted(Archive.list_archives( repository, key, manifest), key=attrgetter('ts')): print('%-20s %s' % (archive.metadata[b'name'], to_localtime(archive.ts).strftime('%c'))) return self.exit_code
def check(self, repository, repair=False, last=None): self.report_progress('Starting archive consistency check...') self.repair = repair self.repository = repository self.init_chunks() self.key = self.identify_key(repository) if Manifest.MANIFEST_ID not in self.chunks: self.manifest = self.rebuild_manifest() else: self.manifest, _ = Manifest.load(repository, key=self.key) self.rebuild_refcounts(last=last) if last is None: self.verify_chunks() else: self.report_progress('Orphaned objects check skipped (needs all archives checked)') if not self.error_found: self.report_progress('Archive consistency check complete, no problems found.') return self.repair or not self.error_found
def test_rename(self): self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('dir2/file2', size=1024 * 80) self.cmd('init', self.repository_location) self.cmd('create', self.repository_location + '::test', 'input') self.cmd('create', self.repository_location + '::test.2', 'input') self.cmd('extract', '--dry-run', self.repository_location + '::test') self.cmd('extract', '--dry-run', self.repository_location + '::test.2') self.cmd('rename', self.repository_location + '::test', 'test.3') self.cmd('extract', '--dry-run', self.repository_location + '::test.2') self.cmd('rename', self.repository_location + '::test.2', 'test.4') self.cmd('extract', '--dry-run', self.repository_location + '::test.3') self.cmd('extract', '--dry-run', self.repository_location + '::test.4') # Make sure both archives have been renamed repository = Repository(self.repository_path) manifest, key = Manifest.load(repository) self.assert_equal(len(manifest.archives), 2) self.assert_in('test.3', manifest.archives) self.assert_in('test.4', manifest.archives)
def do_extract(self, args): """Extract archive contents""" repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) archive = Archive(repository, key, manifest, args.archive.archive, numeric_owner=args.numeric_owner) patterns = adjust_patterns(args.paths, args.excludes) dry_run = args.dry_run strip_components = args.strip_components dirs = [] for item in archive.iter_items( lambda item: not exclude_path(item[b'path'], patterns), preload=True): orig_path = item[b'path'] if strip_components: item[b'path'] = os.sep.join( orig_path.split(os.sep)[strip_components:]) if not item[b'path']: continue if not args.dry_run: while dirs and not item[b'path'].startswith(dirs[-1][b'path']): archive.extract_item(dirs.pop(-1)) self.print_verbose(remove_surrogates(orig_path)) try: if dry_run: archive.extract_item(item, dry_run=True) else: if stat.S_ISDIR(item[b'mode']): dirs.append(item) archive.extract_item(item, restore_attrs=False) else: archive.extract_item(item) except IOError as e: self.print_error('%s: %s', remove_surrogates(orig_path), e) if not args.dry_run: while dirs: archive.extract_item(dirs.pop(-1)) return self.exit_code
def do_extract(self, args): """Extract archive contents""" # be restrictive when restoring files, restore permissions later if sys.getfilesystemencoding() == 'ascii': print('Warning: File system encoding is "ascii", extracting non-ascii filenames will not be supported.') os.umask(0o077) repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) archive = Archive(repository, key, manifest, args.archive.archive, numeric_owner=args.numeric_owner) patterns = adjust_patterns(args.paths, args.excludes) dry_run = args.dry_run stdout = args.stdout sparse = args.sparse strip_components = args.strip_components dirs = [] for item in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns), preload=True): orig_path = item[b'path'] if strip_components: item[b'path'] = os.sep.join(orig_path.split(os.sep)[strip_components:]) if not item[b'path']: continue if not args.dry_run: while dirs and not item[b'path'].startswith(dirs[-1][b'path']): archive.extract_item(dirs.pop(-1), stdout=stdout) self.print_verbose(remove_surrogates(orig_path)) try: if dry_run: archive.extract_item(item, dry_run=True) else: if stat.S_ISDIR(item[b'mode']): dirs.append(item) archive.extract_item(item, restore_attrs=False) else: archive.extract_item(item, stdout=stdout, sparse=sparse) except IOError as e: self.print_error('%s: %s', remove_surrogates(orig_path), e) if not args.dry_run: while dirs: archive.extract_item(dirs.pop(-1)) return self.exit_code
def do_prune(self, args): """Prune repository archives according to specified rules """ repository = self.open_repository(args.repository) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest) archives = list( sorted(Archive.list_archives(repository, key, manifest, cache), key=attrgetter('ts'), reverse=True)) if args.hourly + args.daily + args.weekly + args.monthly + args.yearly == 0: self.print_error( 'At least one of the "hourly", "daily", "weekly", "monthly" or "yearly" ' 'settings must be specified') return 1 if args.prefix: archives = [ archive for archive in archives if archive.name.startswith(args.prefix) ] keep = [] if args.hourly: keep += prune_split(archives, '%Y-%m-%d %H', args.hourly) if args.daily: keep += prune_split(archives, '%Y-%m-%d', args.daily, keep) if args.weekly: keep += prune_split(archives, '%G-%V', args.weekly, keep) if args.monthly: keep += prune_split(archives, '%Y-%m', args.monthly, keep) if args.yearly: keep += prune_split(archives, '%Y', args.yearly, keep) keep.sort(key=attrgetter('ts'), reverse=True) to_delete = [a for a in archives if a not in keep] for archive in keep: self.print_verbose('Keeping archive "%s"' % archive.name) for archive in to_delete: self.print_verbose('Pruning archive "%s"', archive.name) archive.delete(cache) return self.exit_code
def do_verify(self, args): """Verify archive consistency """ repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) archive = Archive(repository, key, manifest, args.archive.archive) patterns = adjust_patterns(args.paths, args.excludes) def start_cb(item): self.print_verbose('%s ...', remove_surrogates(item[b'path']), newline=False) def result_cb(item, success): if success: self.print_verbose('OK') else: self.print_verbose('ERROR') self.print_error('%s: verification failed' % remove_surrogates(item[b'path'])) for item, peek in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns)): if stat.S_ISREG(item[b'mode']) and b'chunks' in item: archive.verify_file(item, start_cb, result_cb, peek=peek) return self.exit_code
def do_delete(self, args): """Delete an existing repository or archive""" repository = self.open_repository(args.target, exclusive=True) manifest, key = Manifest.load(repository) cache = Cache(repository, key, manifest, do_files=args.cache_files) if args.target.archive: archive = Archive(repository, key, manifest, args.target.archive, cache=cache) stats = Statistics() archive.delete(stats) manifest.write() repository.commit() cache.commit() if args.stats: stats.print_('Deleted data:', cache) else: print("You requested to completely DELETE the repository *including* all archives it contains:") for archive in sorted(Archive.list_archives(repository, key, manifest), key=attrgetter('ts')): print(format_archive(archive)) print("""Type "YES" if you understand this and want to continue.\n""") if input('Do you want to continue? ') == 'YES': repository.destroy() cache.destroy() print("Repository and corresponding cache were deleted.") return self.exit_code
def do_extract(self, args): """Extract archive contents""" repository = self.open_repository(args.archive) manifest, key = Manifest.load(repository) archive = Archive(repository, key, manifest, args.archive.archive, numeric_owner=args.numeric_owner) patterns = adjust_patterns(args.paths, args.excludes) dry_run = args.dry_run strip_components = args.strip_components dirs = [] for item in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns), preload=True): orig_path = item[b'path'] if strip_components: item[b'path'] = os.sep.join(orig_path.split(os.sep)[strip_components:]) if not item[b'path']: continue if not args.dry_run: while dirs and not item[b'path'].startswith(dirs[-1][b'path']): archive.extract_item(dirs.pop(-1)) self.print_verbose(remove_surrogates(orig_path)) try: if dry_run: archive.extract_item(item, dry_run=True) else: if stat.S_ISDIR(item[b'mode']): dirs.append(item) archive.extract_item(item, restore_attrs=False) else: archive.extract_item(item) except IOError as e: self.print_error('%s: %s', remove_surrogates(orig_path), e) if not args.dry_run: while dirs: archive.extract_item(dirs.pop(-1)) return self.exit_code
def open_archive(self, name): repository = Repository(self.repository_path) manifest, key = Manifest.load(repository) archive = Archive(repository, key, manifest, name) return archive, repository