def _process_client_archive_info(self, archive_info): if not archive_info.name.startswith(self.job.archive_name): log.error( 'Client tried to push invalid archive %r (id=%s) to repository. Aborting.', archive_info.name, bin_to_hex(archive_info.id)) raise ValueError('BorgCube: illegal archive push %r' % archive_info.name) log.debug('Adding archive %r (id %s)', archive_info.name, bin_to_hex(archive_info.id)) checkpoint_re = re.escape( self.job.archive_name) + r'\.checkpoint(\d+)?' if re.fullmatch(checkpoint_re, archive_info.name): log.debug('%r is a checkpoint - remembering that', archive_info.name) self._add_checkpoint(archive_info.id) else: log.debug('%r is the finalised archive', archive_info.name) self._final_archive = True if not self._cache_sync_archive(archive_info.id): log.error( 'Failed to synchronize archive %r into cache (see above), aborting.', archive_info.name) raise ValueError('BorgCube: cache sync failed') # TODO additional sanitation? self._manifest.archives[ archive_info.name] = archive_info.id, archive_info.ts log.info('Added archive %r (id %s) to repository.', archive_info.name, bin_to_hex(archive_info.id)) self._got_archive = True
def delete(self, id, wait=True): """API""" if bin_to_hex(id) not in self.job.checkpoint_archives: raise ValueError( 'BorgCube: illegal delete(id=%s), not a checkpoint archive ID', bin_to_hex(id)) self.repository.delete(id, wait) self._cache.chunks.decref(id) assert not self._cache.seen_chunk(id) del self._cache.chunks[id]
def synthesize_crypto(job): with open_repository(job.repository) as repository: if bin_to_hex(repository.id) != job.repository.repository_id: raise RepositoryIDMismatch(bin_to_hex(repository.id), job.repository.repository_id) manifest, key = Manifest.load(repository) client_key = synthesize_client_key(key, repository) if not isinstance(client_key, PlaintextKey): job.client_key_data = client_key.get_key_data() job.client_key_type = client_key.synthetic_type client_manifest = SyntheticManifest(client_key, repository.id) job.client_manifest_data = bin_to_hex(client_manifest.write()) job.client_manifest_id_str = client_manifest.id_str transaction.get().note('Synthesized crypto for job %s' % job.id) transaction.commit()
def import_archive(self, manifest, cache, repository, archive_name, client=None): with transaction.manager as txn: archive_info = manifest.archives[archive_name] fpr = bin_to_hex(archive_info.id) if fpr in data_root().archives: print('Skipping archive %s [%s], already known' % (archive_info.name, fpr), file=sys.stderr) return False archive = borg.archive.Archive(manifest.repository, manifest.key, manifest, archive_name, cache=cache) stats = archive.calc_stats(cache) duration = archive.ts_end - archive.ts Archive( id=archive.fpr, repository=repository, name=archive.name, client=client, nfiles=stats.nfiles, original_size=stats.osize, compressed_size=stats.csize, deduplicated_size=stats.usize, duration=duration, timestamp=archive.ts, timestamp_end=archive.ts_end, ) txn.note( '(cli) associated archive %s on repository %s with client %s' % (archive_name, repository.name, client.hostname)) return True
def _add_checkpoint(self, id): self.job.checkpoint_archives.append(bin_to_hex(id)) transaction.get().note('Added checkpoint archive %s for job %s' % (bin_to_hex(id), self.job.id)) transaction.commit()