def app(tmpdir, qtbot, mocker): tmp_db = tmpdir.join('settings.sqlite') mock_db = peewee.SqliteDatabase(str(tmp_db)) vorta.models.init_db(mock_db) mocker.patch.object(vorta.application.VortaApp, 'set_borg_details_action', return_value=None) new_repo = RepoModel(url='[email protected]:repo') new_repo.save() profile = BackupProfileModel.get(id=1) profile.repo = new_repo.id profile.save() test_archive = ArchiveModel(snapshot_id='99999', name='test-archive', time=dt(2000, 1, 1, 0, 0), repo=1) test_archive.save() source_dir = SourceFileModel(dir='/tmp/another', repo=new_repo) source_dir.save() app = VortaApp([]) app.open_main_window_action() qtbot.addWidget(app.main_window) app.main_window.tests_running = True return app
def init_db(qapp): vorta.models.db.drop_tables(models) vorta.models.init_db() new_repo = RepoModel(url='[email protected]:repo') new_repo.save() profile = BackupProfileModel.get(id=1) profile.repo = new_repo.id profile.save() test_archive = ArchiveModel(snapshot_id='99999', name='test-archive', time=dt(2000, 1, 1, 0, 0), repo=1) test_archive.save() test_archive1 = ArchiveModel(snapshot_id='99998', name='test-archive1', time=dt(2000, 1, 1, 0, 0), repo=1) test_archive1.save() source_dir = SourceFileModel(dir='/tmp/another', repo=new_repo) source_dir.save() qapp.open_main_window_action()
def test_repo_add_success(qapp, qtbot, mocker, borg_json_output): LONG_PASSWORD = '******' # Add new repo window main = qapp.main_window main.repoTab.repo_added.disconnect() add_repo_window = AddRepoWindow(main) test_repo_url = f'vorta-test-repo.{uuid.uuid4()}.com:repo' # Random repo URL to avoid macOS keychain qtbot.keyClicks(add_repo_window.repoURL, test_repo_url) qtbot.keyClicks(add_repo_window.passwordLineEdit, LONG_PASSWORD) stdout, stderr = borg_json_output('info') popen_result = mocker.MagicMock(stdout=stdout, stderr=stderr, returncode=0) mocker.patch.object(vorta.borg.borg_thread, 'Popen', return_value=popen_result) qtbot.mouseClick(add_repo_window.saveButton, QtCore.Qt.LeftButton) with qtbot.waitSignal(add_repo_window.thread.result, timeout=3000) as blocker: pass main.repoTab.process_new_repo(blocker.args[0]) assert EventLogModel.select().count() == 1 assert RepoModel.get(id=2).url == test_repo_url from vorta.keyring.abc import get_keyring keyring = get_keyring() assert keyring.get_password("vorta-repo", RepoModel.get(id=2).url) == LONG_PASSWORD
def init_db(qapp): vorta.models.db.drop_tables(models) vorta.models.init_db() new_repo = RepoModel(url='[email protected]:repo') new_repo.encryption = 'none' new_repo.save() profile = BackupProfileModel.get(id=1) profile.repo = new_repo.id profile.dont_run_on_metered_networks = False profile.save() test_archive = ArchiveModel(snapshot_id='99999', name='test-archive', time=dt(2000, 1, 1, 0, 0), repo=1) test_archive.save() test_archive1 = ArchiveModel(snapshot_id='99998', name='test-archive1', time=dt(2000, 1, 1, 0, 0), repo=1) test_archive1.save() source_dir = SourceFileModel(dir='/tmp/another', repo=new_repo, dir_size=100, dir_files_count=18, path_isdir=True) source_dir.save() qapp.main_window = MainWindow( qapp) # Re-open main window to apply mock data in UI
def test_repo_add_success(qapp, qtbot, mocker, borg_json_output): # Add new repo window main = qapp.main_window main.repoTab.repoSelector.setCurrentIndex(1) add_repo_window = main.repoTab._window test_repo_url = f'vorta-test-repo.{uuid.uuid4()}.com:repo' # Random repo URL to avoid macOS keychain qtbot.keyClicks(add_repo_window.repoURL, test_repo_url) qtbot.keyClicks(add_repo_window.passwordLineEdit, LONG_PASSWORD) qtbot.keyClicks(add_repo_window.confirmLineEdit, LONG_PASSWORD) stdout, stderr = borg_json_output('info') popen_result = mocker.MagicMock(stdout=stdout, stderr=stderr, returncode=0) mocker.patch.object(vorta.borg.borg_thread, 'Popen', return_value=popen_result) qtbot.mouseClick(add_repo_window.saveButton, QtCore.Qt.LeftButton) with qtbot.waitSignal(add_repo_window.thread.result, timeout=3000) as _: pass assert EventLogModel.select().count() == 2 assert RepoModel.get(id=2).url == test_repo_url keyring = VortaKeyring.get_keyring() assert keyring.get_password("vorta-repo", RepoModel.get(id=2).url) == LONG_PASSWORD assert main.repoTab.repoSelector.currentText() == test_repo_url
def app(tmpdir, qtbot): tmp_db = tmpdir.join('settings.sqlite') mock_db = peewee.SqliteDatabase(str(tmp_db)) vorta.models.init_db(mock_db) new_repo = RepoModel(url='[email protected]:repo') new_repo.save() profile = BackupProfileModel.get(id=1) profile.repo = new_repo.id profile.save() test_archive = ArchiveModel(snapshot_id='99999', name='test-archive', time=dt(2000, 1, 1, 0, 0), repo=1) test_archive.save() source_dir = SourceFileModel(dir='/tmp/another', repo=new_repo) source_dir.save() app = VortaApp([]) app.main_window.show() qtbot.addWidget(app.main_window) return app
def app_with_repo(app): profile = app.main_window.current_profile new_repo = RepoModel(url='[email protected]:repo') new_repo.save() profile.repo = new_repo profile.save() source_dir = SourceDirModel(dir='/tmp', repo=new_repo) source_dir.save() return app
def process_result(self, result): if result['returncode'] == 0: repo, created = RepoModel.get_or_create(url=result['cmd'][-1]) if not result['data']: result['data'] = { } # TODO: Workaround for tests. Can't read mock results 2x. remote_archives = result['data'].get('archives', []) # Delete archives that don't exist on the remote side for archive in ArchiveModel.select().where( ArchiveModel.repo == repo.id): if not list( filter(lambda s: s['id'] == archive.snapshot_id, remote_archives)): archive.delete_instance() # Add remote archives we don't have locally. for archive in result['data'].get('archives', []): new_archive, _ = ArchiveModel.get_or_create( snapshot_id=archive['id'], repo=repo.id, defaults={ 'name': archive['name'], 'time': parser.parse(archive['time']) }) new_archive.save()
def process_result(self, result): if result['returncode'] == 0: remote_archives = result['data'].get('archives', []) # get info stored during BorgThread.prepare() # repo_id = self.params['repo_id'] repo_id = result['params']['repo_id'] # Update remote archives. for remote_archive in remote_archives: archive = ArchiveModel.get_or_none( snapshot_id=remote_archive['id'], repo=repo_id) archive.name = remote_archive['name'] # incase name changed # archive.time = parser.parse(remote_archive['time']) archive.duration = remote_archive['duration'] archive.size = remote_archive['stats']['deduplicated_size'] archive.save() if 'cache' in result['data']: stats = result['data']['cache']['stats'] repo = RepoModel.get(id=result['params']['repo_id']) repo.total_size = stats['total_size'] repo.unique_csize = stats['unique_csize'] repo.unique_size = stats['unique_size'] repo.total_unique_chunks = stats['total_unique_chunks'] repo.save()
def process_result(self, result): if result['returncode'] in [0, 1] and 'archive' in result['data']: new_archive, created = ArchiveModel.get_or_create( snapshot_id=result['data']['archive']['id'], defaults={ 'name': result['data']['archive']['name'], 'time': parser.parse(result['data']['archive']['start']), 'repo': result['params']['repo_id'], 'duration': result['data']['archive']['duration'], 'size': result['data']['archive']['stats']['deduplicated_size'] }) new_archive.save() if 'cache' in result['data'] and created: stats = result['data']['cache']['stats'] repo = RepoModel.get(id=result['params']['repo_id']) repo.total_size = stats['total_size'] repo.unique_csize = stats['unique_csize'] repo.unique_size = stats['unique_size'] repo.total_unique_chunks = stats['total_unique_chunks'] repo.save() if result['returncode'] == 1: self.app.backup_progress_event.emit( self. tr('Backup finished with warnings. See logs for details.')) else: self.app.backup_progress_event.emit( self.tr('Backup finished.'))
def test_repo_add(app, qtbot, mocker, borg_json_output): # Add new repo window main = app.main_window add_repo_window = AddRepoWindow(main.repoTab) qtbot.keyClicks(add_repo_window.repoURL, 'aaa') qtbot.mouseClick(add_repo_window.saveButton, QtCore.Qt.LeftButton) assert add_repo_window.errorText.text().startswith('Please enter a valid') qtbot.keyClicks(add_repo_window.repoURL, 'bbb.com:repo') qtbot.mouseClick(add_repo_window.saveButton, QtCore.Qt.LeftButton) assert add_repo_window.errorText.text() == 'Please use a longer password.' qtbot.keyClicks(add_repo_window.passwordLineEdit, 'long-password-long') stdout, stderr = borg_json_output('info') popen_result = mocker.MagicMock(stdout=stdout, stderr=stderr, returncode=0) mocker.patch.object(vorta.borg.borg_thread, 'Popen', return_value=popen_result) qtbot.mouseClick(add_repo_window.saveButton, QtCore.Qt.LeftButton) with qtbot.waitSignal(add_repo_window.thread.result, timeout=1000) as blocker: pass main.repoTab.process_new_repo(blocker.args[0]) # assert EventLogModel.select().count() == 2 assert RepoModel.get(id=1).url == 'aaabbb.com:repo'
def init_db(qapp, qtbot, tmpdir_factory): tmp_db = tmpdir_factory.mktemp('Vorta').join('settings.sqlite') mock_db = peewee.SqliteDatabase(str(tmp_db), pragmas={ 'journal_mode': 'wal', }) vorta.models.init_db(mock_db) default_profile = BackupProfileModel(name='Default') default_profile.save() new_repo = RepoModel(url='[email protected]:repo') new_repo.encryption = 'none' new_repo.save() default_profile.repo = new_repo.id default_profile.dont_run_on_metered_networks = False default_profile.save() test_archive = ArchiveModel(snapshot_id='99999', name='test-archive', time=dt(2000, 1, 1, 0, 0), repo=1) test_archive.save() test_archive1 = ArchiveModel(snapshot_id='99998', name='test-archive1', time=dt(2000, 1, 1, 0, 0), repo=1) test_archive1.save() source_dir = SourceFileModel(dir='/tmp/another', repo=new_repo, dir_size=100, dir_files_count=18, path_isdir=True) source_dir.save() qapp.main_window.deleteLater() del qapp.main_window qapp.main_window = MainWindow( qapp) # Re-open main window to apply mock data in UI yield qapp.backup_cancelled_event.emit() qtbot.waitUntil(lambda: not vorta.borg.borg_thread.BorgThread.is_running()) mock_db.close()
def set_repos(self): count = self.repoSelector.count() for x in range( 4, count ): # Repositories are listed after 4th entry in repoSelector self.repoSelector.removeItem(4) for repo in RepoModel.select(): self.repoSelector.addItem(repo.url, repo.id)
def process_result(self, result): if result['returncode'] == 0: repo_url, old_name = result['cmd'][-2].split('::') new_name = result['cmd'][-1] repo = RepoModel.get(url=repo_url) renamed_archive = ArchiveModel.get(name=old_name, repo=repo) renamed_archive.name = new_name renamed_archive.save()
def process_result(self, result): if result['returncode'] == 0: new_repo, created = RepoModel.get_or_create( url=result['params']['repo_url'], defaults={'encryption': result['params']['encryption']}) if new_repo.encryption != 'none': keyring.set_password("vorta-repo", new_repo.url, result['params']['password']) new_repo.save()
def process_new_repo(self, result): if result['returncode'] == 0: new_repo = RepoModel.get(url=result['params']['repo_url']) profile = self.profile() profile.repo = new_repo.id profile.save() self.set_repos() self.repoSelector.setCurrentIndex(self.repoSelector.count() - 1) self.repo_added.emit() self.init_repo_stats()
def validate(self): """Pre-flight check for valid input and borg binary.""" if self.is_remote_repo and not re.match(r'.+:.+', self.values['repo_url']): self._set_status(self.tr('Please enter a valid repo URL or select a local path.')) return False if RepoModel.get_or_none(RepoModel.url == self.values['repo_url']) is not None: self._set_status(self.tr('This repo has already been added.')) return False return True
def test_repo_unlink(qapp, qtbot): main = qapp.main_window tab = main.repoTab main.tabWidget.setCurrentIndex(0) qtbot.mouseClick(tab.repoRemoveToolbutton, QtCore.Qt.LeftButton) qtbot.waitUntil(lambda: tab.repoSelector.count() == 4, timeout=5000) assert RepoModel.select().count() == 0 qtbot.mouseClick(main.createStartBtn, QtCore.Qt.LeftButton) assert main.progressText.text() == 'Add a backup repository first.'
def test_repo_unlink(app, qtbot, monkeypatch): monkeypatch.setattr(QMessageBox, "exec_", lambda *args: QMessageBox.Yes) main = app.main_window tab = main.repoTab main.tabWidget.setCurrentIndex(0) qtbot.mouseClick(tab.repoRemoveToolbutton, QtCore.Qt.LeftButton) qtbot.waitUntil(lambda: tab.repoSelector.count() == 4, timeout=5000) assert RepoModel.select().count() == 0 qtbot.mouseClick(main.createStartBtn, QtCore.Qt.LeftButton) assert main.createProgressText.text() == 'Add a backup repository first.'
def __init__(self, parent=None): super().__init__(parent) self.setupUi(parent) # Populate dropdowns self.repoSelector.model().item(0).setEnabled(False) self.repoSelector.addItem(self.tr('+ Initialize New Repository'), 'new') self.repoSelector.addItem(self.tr('+ Add Existing Repository'), 'existing') self.repoSelector.insertSeparator(3) for repo in RepoModel.select(): self.repoSelector.addItem(repo.url, repo.id) self.repoSelector.currentIndexChanged.connect(self.repo_select_action) self.repoRemoveToolbutton.clicked.connect(self.repo_unlink_action) # note: it is hard to describe these algorithms with attributes like low/medium/high # compression or speed on a unified scale. this is not 1-dimensional and also depends # on the input data. so we just tell what we know for sure. # "auto" is used for some slower / older algorithms to avoid wasting a lot of time # on uncompressible data. self.repoCompression.addItem(self.tr('LZ4 (modern, default)'), 'lz4') self.repoCompression.addItem(self.tr('Zstandard Level 3 (modern)'), 'zstd,3') self.repoCompression.addItem(self.tr('Zstandard Level 8 (modern)'), 'zstd,8') # zlib and lzma come from python stdlib and are there (and in borg) since long. # but maybe not much reason to start with these nowadays, considering zstd supports # a very wide range of compression levels and has great speed. if speed is more # important than compression, lz4 is even a little better. self.repoCompression.addItem(self.tr('ZLIB Level 6 (auto, legacy)'), 'auto,zlib,6') self.repoCompression.addItem(self.tr('LZMA Level 6 (auto, legacy)'), 'auto,lzma,6') self.repoCompression.addItem(self.tr('No Compression'), 'none') self.repoCompression.currentIndexChanged.connect( self.compression_select_action) self.toggle_available_compression() self.repoCompression.currentIndexChanged.connect( self.compression_select_action) self.init_ssh() self.sshComboBox.currentIndexChanged.connect(self.ssh_select_action) self.sshKeyToClipboardButton.clicked.connect( self.ssh_copy_to_clipboard_action) self.init_repo_stats() self.populate_from_profile()
def test_create(qapp, borg_json_output, mocker, qtbot): main = qapp.main_window stdout, stderr = borg_json_output('create') popen_result = mocker.MagicMock(stdout=stdout, stderr=stderr, returncode=0) mocker.patch.object(vorta.borg.borg_thread, 'Popen', return_value=popen_result) qtbot.mouseClick(main.createStartBtn, QtCore.Qt.LeftButton) qtbot.waitUntil(lambda: main.progressText.text().startswith('Backup finished.'), timeout=3000) qtbot.waitUntil(lambda: main.createStartBtn.isEnabled(), timeout=3000) assert EventLogModel.select().count() == 1 assert ArchiveModel.select().count() == 3 assert RepoModel.get(id=1).unique_size == 15520474 assert main.createStartBtn.isEnabled() assert main.archiveTab.archiveTable.rowCount() == 3 assert main.scheduleTab.logTableWidget.rowCount() == 1
def process_result(self, result): if result['returncode'] == 0: new_repo, _ = RepoModel.get_or_create(url=result['cmd'][-1]) if 'cache' in result['data']: stats = result['data']['cache']['stats'] new_repo.total_size = stats['total_size'] new_repo.unique_csize = stats['unique_csize'] new_repo.unique_size = stats['unique_size'] new_repo.total_unique_chunks = stats['total_unique_chunks'] if 'encryption' in result['data']: new_repo.encryption = result['data']['encryption']['mode'] if new_repo.encryption != 'none': keyring.set_password("vorta-repo", new_repo.url, result['params']['password']) new_repo.save()
def process_result(self, result): if result['returncode'] == 0: repo, created = RepoModel.get_or_create(url=result['cmd'][-1]) remote_snapshots = result['data'].get('archives', []) # Delete snapshots that don't exist on the remote side for snapshot in ArchiveModel.select().where(ArchiveModel.repo == repo.id): if not list(filter(lambda s: s['id'] == snapshot.snapshot_id, remote_snapshots)): snapshot.delete_instance() # Add remote snapshots we don't have locally. for snapshot in result['data'].get('archives', []): new_snapshot, _ = ArchiveModel.get_or_create( snapshot_id=snapshot['id'], defaults={ 'repo': repo.id, 'name': snapshot['name'], 'time': parser.parse(snapshot['time']) } ) new_snapshot.save()
def validate(self): """Pre-flight check for valid input and borg binary.""" if self.is_remote_repo and not re.match(r'.+:.+', self.values['repo_url']): self._set_status( self.tr( 'Please enter a valid repo URL or select a local path.')) return False if RepoModel.get_or_none( RepoModel.url == self.values['repo_url']) is not None: self._set_status(self.tr('This repo has already been added.')) return False if self.__class__ == AddRepoWindow: if self.values['encryption'] != 'none': if len(self.values['password']) < 8: self._set_status( self.tr('Please use a longer passphrase.')) return False return True
def repo_unlink_action(self): profile = self.profile() self.init_repo_stats() msg = QMessageBox() msg.setStandardButtons(QMessageBox.Ok) msg.setParent(self, QtCore.Qt.Sheet) selected_repo_id = self.repoSelector.currentData() selected_repo_index = self.repoSelector.currentIndex() if selected_repo_index > 2: repo = RepoModel.get(id=selected_repo_id) ArchiveModel.delete().where(ArchiveModel.repo_id == repo.id).execute() profile.repo = None profile.save() repo.delete_instance(recursive=True) # This also deletes archives. self.repoSelector.setCurrentIndex(0) self.repoSelector.removeItem(selected_repo_index) msg.setText(self.tr('Repository was Unlinked')) msg.setInformativeText(self.tr('You can always connect it again later.')) msg.exec_() self.repo_changed.emit() self.init_repo_stats()
def to_db(self, overwrite_profile=False, overwrite_settings=True): profile_schema = self._profile_dict['SchemaVersion']['version'] keyring = VortaKeyring.get_keyring() if SCHEMA_VERSION < profile_schema: raise VersionException() elif SCHEMA_VERSION > profile_schema: # Add model upgrading code here, only needed if not adding columns if profile_schema < 16: for sourcedir in self._profile_dict['SourceFileModel']: sourcedir['dir_files_count'] = -1 sourcedir['dir_size'] = -1 sourcedir['path_isdir'] = False existing_profile = None if overwrite_profile: existing_profile = BackupProfileModel.get_or_none(BackupProfileModel.name == self.name) if existing_profile: self._profile_dict['id'] = existing_profile.id if not overwrite_profile or not existing_profile: # Guarantee uniqueness of ids while BackupProfileModel.get_or_none(BackupProfileModel.id == self.id) is not None: self._profile_dict['id'] += 1 # Add suffix incase names are the same if BackupProfileModel.get_or_none(BackupProfileModel.name == self.name) is not None: suffix = 1 while BackupProfileModel.get_or_none(BackupProfileModel.name == f"{self.name}-{suffix}") is not None: suffix += 1 self._profile_dict['name'] = f"{self.name}-{suffix}" # Load existing repo or restore it if self._profile_dict['repo']: repo = RepoModel.get_or_none(RepoModel.url == self.repo_url) if repo is None: # Load repo from export repo = dict_to_model(RepoModel, self._profile_dict['repo']) repo.save(force_insert=True) self._profile_dict['repo'] = model_to_dict(repo) if self.repo_password: keyring.set_password('vorta-repo', self.repo_url, self.repo_password) del self._profile_dict['password'] # Delete and recreate the tables to clear them if overwrite_settings: db.drop_tables([SettingsModel, WifiSettingModel]) db.create_tables([SettingsModel, WifiSettingModel]) SettingsModel.insert_many(self._profile_dict['SettingsModel']).execute() WifiSettingModel.insert_many(self._profile_dict['WifiSettingModel']).execute() # Set the profile ids to be match new profile for source in self._profile_dict['SourceFileModel']: source['profile'] = self.id SourceFileModel.insert_many(self._profile_dict['SourceFileModel']).execute() # Delete added dictionaries to make it match BackupProfileModel del self._profile_dict['SettingsModel'] del self._profile_dict['SourceFileModel'] del self._profile_dict['WifiSettingModel'] del self._profile_dict['SchemaVersion'] # dict to profile new_profile = dict_to_model(BackupProfileModel, self._profile_dict) if overwrite_profile and existing_profile: force_insert = False else: force_insert = True new_profile.save(force_insert=force_insert) init_db() # rerun db init code to perform the same operations on the new as as on application boot return new_profile