def prune_archives(self, archives, repository): """ Prune list of two tuples (delete, archive), all of which must be in the same `Repository`. Return `Statistics`. """ # TODO Maybe commit some stuff here after a while, because this can seriously take some time. stats = Statistics() with open_repository(repository) as borg_repository: manifest, key = Manifest.load(borg_repository) with Cache(borg_repository, key, manifest, lock_wait=1) as cache: for delete, archive in archives: assert archive.repository == repository if delete: log.info('Deleting archive %s [%s]', archive.name, archive.id) archive.delete(manifest, stats, cache) else: log.info('Skipping archive %s [%s]', archive.name, archive.id) manifest.write() borg_repository.commit() cache.commit() transaction.commit() log.error(stats.summary.format(label='Deleted data:', stats=stats)) return stats
def ensure_cache(self, cache_path): if not cache_path.is_dir(): log.info('No cache found, creating one') with open_repository(self.repository) as repository: manifest, key = Manifest.load(repository) with Cache(repository, key, manifest, path=str(cache_path), lock_wait=1) as cache: cache.commit() self.check_archive_chunks_cache()
def synthesize_crypto(job): with open_repository(job.repository) as repository: if bin_to_hex(repository.id) != job.repository.repository_id: raise RepositoryIDMismatch(bin_to_hex(repository.id), job.repository.repository_id) manifest, key = Manifest.load(repository) client_key = synthesize_client_key(key, repository) if not isinstance(client_key, PlaintextKey): job.client_key_data = client_key.get_key_data() job.client_key_type = client_key.synthetic_type client_manifest = SyntheticManifest(client_key, repository.id) job.client_manifest_data = bin_to_hex(client_manifest.write()) job.client_manifest_id_str = client_manifest.id_str transaction.get().note('Synthesized crypto for job %s' % job.id) transaction.commit()
def execute(self): log.debug('Beginning check job on repository %r', self.repository.url) with open_repository(self.repository) as repository: if self.config.check_repository: self.check_repository(repository) if self.config.verify_data or self.config.check_archives: self.job.update_state(CheckJob.State.repository_check, CheckJob.State.verify_data) archive_checker = self.get_archive_checker(repository) if self.config.verify_data: self.verify_data(repository, archive_checker) self.job.update_state(CheckJob.State.verify_data, CheckJob.State.archives_check) if self.config.check_archives: self.check_archives(repository, archive_checker) else: self.job.update_state(CheckJob.State.verify_data, CheckJob.State.archives_check) self.job.update_state(CheckJob.State.archives_check, CheckJob.State.done)
def handle(self, *args, **options): try: client = data_root().clients[options['client']] except KeyError: raise CommandError('Client %s not found' % options['client']) for repository in data_root().repositories: if repository.name == options['repository']: break if repository.id.startswith(options['repository']): break if repository.url == options['repository']: break else: raise CommandError('Repository %s not found' % options['repository']) with open_repository(repository) as borg_repository: manifest, key = Manifest.load(borg_repository) with Cache(borg_repository, key, manifest, lock_wait=1) as cache: names = self.find_archives(manifest, options['archive'], regex=options['regex']) imported = 0 pi = ProgressIndicatorPercent( msg='Importing archives %4.1f %%: %s', total=len(names), step=0.1) for name in names: imported += self.import_archive(manifest, cache, repository, name, client) pi.show(info=[name]) pi.finish() print('Imported %d archives.' % imported, file=sys.stderr)