def process_result(self, result): if result['returncode'] == 0: repo, created = RepoModel.get_or_create(url=result['cmd'][-1]) if not result['data']: result['data'] = { } # TODO: Workaround for tests. Can't read mock results 2x. remote_archives = result['data'].get('archives', []) # Delete archives that don't exist on the remote side for archive in ArchiveModel.select().where( ArchiveModel.repo == repo.id): if not list( filter(lambda s: s['id'] == archive.snapshot_id, remote_archives)): archive.delete_instance() # Add remote archives we don't have locally. for archive in result['data'].get('archives', []): new_archive, _ = ArchiveModel.get_or_create( snapshot_id=archive['id'], repo=repo.id, defaults={ 'name': archive['name'], 'time': parser.parse(archive['time']) }) new_archive.save()
def process_result(self, result): if result['returncode'] == 0: new_repo, created = RepoModel.get_or_create( url=result['params']['repo_url'], defaults={'encryption': result['params']['encryption']}) if new_repo.encryption != 'none': keyring.set_password("vorta-repo", new_repo.url, result['params']['password']) new_repo.save()
def process_result(self, result): if result['returncode'] == 0: new_repo, _ = RepoModel.get_or_create(url=result['cmd'][-1]) if 'cache' in result['data']: stats = result['data']['cache']['stats'] new_repo.total_size = stats['total_size'] new_repo.unique_csize = stats['unique_csize'] new_repo.unique_size = stats['unique_size'] new_repo.total_unique_chunks = stats['total_unique_chunks'] if 'encryption' in result['data']: new_repo.encryption = result['data']['encryption']['mode'] if new_repo.encryption != 'none': keyring.set_password("vorta-repo", new_repo.url, result['params']['password']) new_repo.save()
def process_result(self, result): if result['returncode'] == 0: repo, created = RepoModel.get_or_create(url=result['cmd'][-1]) remote_snapshots = result['data'].get('archives', []) # Delete snapshots that don't exist on the remote side for snapshot in ArchiveModel.select().where(ArchiveModel.repo == repo.id): if not list(filter(lambda s: s['id'] == snapshot.snapshot_id, remote_snapshots)): snapshot.delete_instance() # Add remote snapshots we don't have locally. for snapshot in result['data'].get('archives', []): new_snapshot, _ = ArchiveModel.get_or_create( snapshot_id=snapshot['id'], defaults={ 'repo': repo.id, 'name': snapshot['name'], 'time': parser.parse(snapshot['time']) } ) new_snapshot.save()