def process_result(self, result): if result['returncode'] == 0: repo, created = RepoModel.get_or_create(url=result['cmd'][-1]) if not result['data']: result['data'] = { } # TODO: Workaround for tests. Can't read mock results 2x. remote_archives = result['data'].get('archives', []) # Delete archives that don't exist on the remote side for archive in ArchiveModel.select().where( ArchiveModel.repo == repo.id): if not list( filter(lambda s: s['id'] == archive.snapshot_id, remote_archives)): archive.delete_instance() # Add remote archives we don't have locally. for archive in result['data'].get('archives', []): new_archive, _ = ArchiveModel.get_or_create( snapshot_id=archive['id'], repo=repo.id, defaults={ 'name': archive['name'], 'time': parser.parse(archive['time']) }) new_archive.save()
def process_result(self, result): if result['returncode'] in [0, 1] and 'archive' in result['data']: new_archive, created = ArchiveModel.get_or_create( snapshot_id=result['data']['archive']['id'], defaults={ 'name': result['data']['archive']['name'], 'time': parser.parse(result['data']['archive']['start']), 'repo': result['params']['repo_id'], 'duration': result['data']['archive']['duration'], 'size': result['data']['archive']['stats']['deduplicated_size'] }) new_archive.save() if 'cache' in result['data'] and created: stats = result['data']['cache']['stats'] repo = RepoModel.get(id=result['params']['repo_id']) repo.total_size = stats['total_size'] repo.unique_csize = stats['unique_csize'] repo.unique_size = stats['unique_size'] repo.total_unique_chunks = stats['total_unique_chunks'] repo.save() if result['returncode'] == 1: self.app.backup_progress_event.emit( self. tr('Backup finished with warnings. See logs for details.')) else: self.app.backup_progress_event.emit( self.tr('Backup finished.'))
def process_result(self, result): if result['returncode'] == 0: repo, created = RepoModel.get_or_create(url=result['cmd'][-1]) remote_snapshots = result['data'].get('archives', []) # Delete snapshots that don't exist on the remote side for snapshot in ArchiveModel.select().where(ArchiveModel.repo == repo.id): if not list(filter(lambda s: s['id'] == snapshot.snapshot_id, remote_snapshots)): snapshot.delete_instance() # Add remote snapshots we don't have locally. for snapshot in result['data'].get('archives', []): new_snapshot, _ = ArchiveModel.get_or_create( snapshot_id=snapshot['id'], defaults={ 'repo': repo.id, 'name': snapshot['name'], 'time': parser.parse(snapshot['time']) } ) new_snapshot.save()