def test_revert_stream(mock_popen, input_file, keyring_file, secret_keyring_file): recipient = '*****@*****.**' mock_proc = mock.Mock() mock_proc.communicate.return_value = (None, None) mock_proc.returncode = 0 mock_popen.return_value = mock_proc with open(str(input_file)) as stream: gpg = Gpg(stream, recipient, str(keyring_file), secret_keyring=str(secret_keyring_file)) with gpg.revert_stream(): expected_cmd = [ 'gpg', '--no-default-keyring', '--trust-model', 'always', '--secret-keyring', gpg.secret_keyring, '--keyring', gpg.keyring, '--recipient', gpg.recipient, '--decrypt', '--yes', '--batch' ] mock_popen.assert_called_once_with(expected_cmd, stdin=gpg.input, stdout=PIPE, stderr=PIPE)
def restore_from_file(twindb_config, copy, dst_dir): """ Restore a directory from a backup copy in the directory :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy: Instance of BaseCopy or and inheriting classes. :type copy: BaseCopy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str """ LOG.info('Restoring %s in %s', copy.key, dst_dir) mkdir_p(dst_dir) restore_start = time.time() keep_local_path = twindb_config.keep_local_path if keep_local_path and os.path.exists(osp.join(keep_local_path, copy.key)): dst = Local(osp.join(keep_local_path, copy.key)) stream = dst.get_stream(copy) else: dst = twindb_config.destination() stream = dst.get_stream(copy) # GPG modifier if twindb_config.gpg: gpg = Gpg(stream, twindb_config.gpg.recipient, twindb_config.gpg.keyring, secret_keyring=twindb_config.gpg.secret_keyring) LOG.debug('Decrypting stream') stream = gpg.revert_stream() else: LOG.debug('Not decrypting the stream') with stream as handler: try: LOG.debug('handler type: %s', type(handler)) LOG.debug('stream type: %s', type(stream)) cmd = ["tar", "zvxf", "-"] LOG.debug('Running %s', ' '.join(cmd)) proc = Popen(cmd, stdin=handler, cwd=dst_dir) cout, cerr = proc.communicate() ret = proc.returncode if ret: LOG.error('%s exited with code %d', cmd, ret) if cout: LOG.error('STDOUT: %s', cout) if cerr: LOG.error('STDERR: %s', cerr) return LOG.info('Successfully restored %s in %s', copy.key, dst_dir) except (OSError, DestinationError) as err: LOG.error('Failed to decompress %s: %s', copy.key, err) exit(1) export_info(twindb_config, data=time.time() - restore_start, category=ExportCategory.files, measure_type=ExportMeasureType.restore)
def restore_from_mysql_full(stream, dst_dir, config, redo_only=False): """ Restore MySQL datadir from a backup copy :param stream: Generator that provides backup copy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str :param config: Tool configuration. :type config: ConfigParser.ConfigParser :param redo_only: True if the function has to do final apply of the redo log. For example, if you restore backup from a full copy it should be False. If you restore from incremental copy and you restore base full copy redo_only should be True. :type redo_only: bool :return: If success, return True :rtype: bool """ # GPG modifier try: gpg = Gpg(stream, config.get('gpg', 'recipient'), config.get('gpg', 'keyring'), secret_keyring=config.get('gpg', 'secret_keyring')) LOG.debug('Decrypting stream') stream = gpg.revert_stream() except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): LOG.debug('Not decrypting the stream') stream = Gzip(stream).revert_stream() with stream as handler: if not _extract_xbstream(handler, dst_dir): return False mem_usage = psutil.virtual_memory() try: xtrabackup_cmd = ['innobackupex', '--use-memory=%d' % (mem_usage.available/2), '--apply-log'] if redo_only: xtrabackup_cmd += ['--redo-only'] xtrabackup_cmd += [dst_dir] LOG.debug('Running %s', ' '.join(xtrabackup_cmd)) xtrabackup_proc = Popen(xtrabackup_cmd, stdout=None, stderr=None) xtrabackup_proc.communicate() ret = xtrabackup_proc.returncode if ret: LOG.error('%s exited with code %d', " ".join(xtrabackup_cmd), ret) return ret == 0 except OSError as err: LOG.error('Failed to prepare backup in %s: %s', dst_dir, err) return False
def restore_from_file(config, backup_copy, dst_dir): """ Restore a directory from a backup copy in the directory :param config: Tool configuration. :type config: ConfigParser.ConfigParser :param backup_copy: Backup name. :type backup_copy: str :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str """ LOG.info('Restoring %s in %s', backup_copy, dst_dir) mkdir_p(dst_dir) restore_start = time.time() if os.path.exists(backup_copy): dst = Local(backup_copy) stream = dst.get_stream(backup_copy) else: dst = get_destination(config) stream = dst.get_stream(backup_copy) # GPG modifier try: gpg = Gpg(stream, config.get('gpg', 'recipient'), config.get('gpg', 'keyring'), secret_keyring=config.get('gpg', 'secret_keyring')) LOG.debug('Decrypting stream') stream = gpg.revert_stream() except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): LOG.debug('Not decrypting the stream') with stream as handler: try: LOG.debug('handler type: %s', type(handler)) LOG.debug('stream type: %s', type(stream)) cmd = ["tar", "zvxf", "-"] LOG.debug('Running %s', ' '.join(cmd)) proc = Popen(cmd, stdin=handler, cwd=dst_dir) cout, cerr = proc.communicate() ret = proc.returncode if ret: LOG.error('%s exited with code %d', cmd, ret) if cout: LOG.error('STDOUT: %s', cout) if cerr: LOG.error('STDERR: %s', cerr) return LOG.info('Successfully restored %s in %s', backup_copy, dst_dir) except (OSError, DestinationError) as err: LOG.error('Failed to decompress %s: %s', backup_copy, err) exit(1) export_info(config, data=time.time() - restore_start, category=ExportCategory.files, measure_type=ExportMeasureType.restore)
def test_get_stream(mock_popen, input_file, keyring_file): recipient = '*****@*****.**' with open(str(input_file)) as stream: gpg = Gpg(stream, recipient, str(keyring_file)) with gpg.get_stream() as s: cmd = [ 'gpg', '--no-default-keyring', '--trust-model', 'always', '--keyring', gpg.keyring, '--recipient', gpg.recipient, '--encrypt', '--yes', '--batch' ] mock_popen.assert_called_once_with(cmd, stdin=gpg.input, stdout=PIPE, stderr=PIPE)
def _backup_stream(config, src, dst, callbacks=None): stream = src.get_stream() # Gzip modifier stream = Gzip(stream).get_stream() src.suffix += '.gz' # KeepLocal modifier try: keep_local_path = config.get('destination', 'keep_local_path') kl_modifier = KeepLocal(stream, os.path.join(keep_local_path, src.get_name())) stream = kl_modifier.get_stream() if callbacks is not None: callbacks.append((kl_modifier, { 'keep_local_path': keep_local_path, 'dst': dst })) except ConfigParser.NoOptionError: LOG.debug('keep_local_path is not present in the config file') # GPG modifier try: stream = Gpg(stream, config.get('gpg', 'recipient'), config.get('gpg', 'keyring')).get_stream() src.suffix += '.gpg' except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): pass except ModifierException as err: LOG.warning(err) LOG.warning('Will skip encryption') if not dst.save(stream, src.get_name()): LOG.error('Failed to save backup copy %s', src.get_name()) exit(1)
def test_revert_stream(mock__revert_stream, input_file, keyring_file, secret_keyring_file): recipient = '*****@*****.**' with open(str(input_file)) as stream: gpg = Gpg(stream, recipient, str(keyring_file), secret_keyring=str(secret_keyring_file)) gpg.revert_stream() cmd = [ 'gpg', '--no-default-keyring', '--trust-model', 'always', '--secret-keyring', gpg.secret_keyring, '--keyring', gpg.keyring, '--recipient', gpg.recipient, '--decrypt', '--yes', '--batch' ] mock__revert_stream.assert_called_once_with(cmd)
def backup_files(run_type, config): """Backup local directories :param run_type: Run type :type run_type: str :param config: Configuration :type config: ConfigParser.ConfigParser """ for directory in get_directories_to_backup(config): LOG.debug('copying %s', directory) src = FileSource(directory, run_type) dst = get_destination(config) stream = src.get_stream() # Gzip modifier stream = Gzip(stream).get_stream() src.suffix += '.gz' # KeepLocal modifier try: keep_local_path = config.get('destination', 'keep_local_path') # src.suffix = 'tar.gz.aaa' dst_name = src.get_name() kl_modifier = KeepLocal(stream, os.path.join(keep_local_path, dst_name)) stream = kl_modifier.get_stream() except ConfigParser.NoOptionError: pass # GPG modifier try: keyring = config.get('gpg', 'keyring') recipient = config.get('gpg', 'recipient') gpg = Gpg(stream, recipient, keyring) stream = gpg.get_stream() src.suffix += '.gpg' except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): pass except ModifierException as err: LOG.warning(err) LOG.warning('Will skip encryption') dst.save(stream, src.get_name()) src.apply_retention_policy(dst, config, run_type)
def _backup_stream(config, src, dst, callbacks=None): """ :param config: Tool config :type config: TwinDBBackupConfig :param src: :param dst: :param callbacks: :return: """ stream = src.get_stream() # Compression modifier cmp_modifier = config.compression.get_modifier(stream) stream = cmp_modifier.get_stream() src.suffix += cmp_modifier.suffix # KeepLocal modifier if config.keep_local_path: keep_local_path = config.keep_local_path kl_modifier = KeepLocal( stream, osp.join( keep_local_path, src.get_name() ) ) stream = kl_modifier.get_stream() if callbacks is not None: callbacks.append((kl_modifier, { 'keep_local_path': keep_local_path, 'dst': dst })) else: LOG.debug('keep_local_path is not present in the config file') # GPG modifier if config.gpg: gpg_modifier = Gpg( stream, config.gpg.recipient, config.gpg.keyring ) stream = gpg_modifier.get_stream() src.suffix += '.gpg' dst.save(stream, src.get_name())
def test_gpg_init(input_file, keyring_file): recipient = '*****@*****.**' with open(str(input_file)) as stream: gpg = Gpg(stream, recipient, str(keyring_file)) assert gpg.keyring == str(keyring_file) assert gpg.recipient == recipient assert gpg.input == stream
def _backup_stream(config, src, dst, callbacks=None): """ :param config: Tool config :type config: TwinDBBackupConfig :param src: :param dst: :param callbacks: :return: """ stream = src.get_stream() # Compression modifier cmp_modifier = config.compression.get_modifier(stream) stream = cmp_modifier.get_stream() src.suffix += cmp_modifier.suffix # KeepLocal modifier if config.keep_local_path: keep_local_path = config.keep_local_path kl_modifier = KeepLocal(stream, osp.join(keep_local_path, src.get_name())) stream = kl_modifier.get_stream() if callbacks is not None: callbacks.append((kl_modifier, { "keep_local_path": keep_local_path, "dst": dst })) else: LOG.debug("keep_local_path is not present in the config file") # GPG modifier if config.gpg: gpg_modifier = Gpg(stream, config.gpg.recipient, config.gpg.keyring) stream = gpg_modifier.get_stream() src.suffix += ".gpg" dst.save(stream, src.get_name())
def restore_from_file(twindb_config, copy, dst_dir): """ Restore a directory from a backup copy in the directory :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy: Instance of BaseCopy or and inheriting classes. :type copy: BaseCopy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str """ LOG.info('Restoring %s in %s', copy.key, dst_dir) mkdir_p(dst_dir) restore_start = time.time() keep_local_path = twindb_config.keep_local_path if keep_local_path and os.path.exists(osp.join(keep_local_path, copy.key)): dst = Local(osp.join(keep_local_path, copy.key)) stream = dst.get_stream(copy) else: dst = twindb_config.destination() stream = dst.get_stream(copy) # GPG modifier if twindb_config.gpg: gpg = Gpg( stream, twindb_config.gpg.recipient, twindb_config.gpg.keyring, secret_keyring=twindb_config.gpg.secret_keyring ) LOG.debug('Decrypting stream') stream = gpg.revert_stream() else: LOG.debug('Not decrypting the stream') with stream as handler: try: LOG.debug('handler type: %s', type(handler)) LOG.debug('stream type: %s', type(stream)) cmd = ["tar", "zvxf", "-"] LOG.debug('Running %s', ' '.join(cmd)) proc = Popen(cmd, stdin=handler, cwd=dst_dir) cout, cerr = proc.communicate() ret = proc.returncode if ret: LOG.error('%s exited with code %d', cmd, ret) if cout: LOG.error('STDOUT: %s', cout) if cerr: LOG.error('STDERR: %s', cerr) return LOG.info('Successfully restored %s in %s', copy.key, dst_dir) except (OSError, DestinationError) as err: LOG.error('Failed to decompress %s: %s', copy.key, err) exit(1) export_info( twindb_config, data=time.time() - restore_start, category=ExportCategory.files, measure_type=ExportMeasureType.restore )
def restore_from_mysql_full(stream, dst_dir, config, redo_only=False, xtrabackup_binary=XTRABACKUP_BINARY, xbstream_binary=XBSTREAM_BINARY): """ Restore MySQL datadir from a backup copy :param stream: Generator that provides backup copy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str :param config: Tool configuration. :type config: TwinDBBackupConfig :param redo_only: True if the function has to do final apply of the redo log. For example, if you restore backup from a full copy it should be False. If you restore from incremental copy and you restore base full copy redo_only should be True. :type redo_only: bool :param xtrabackup_binary: path to xtrabackup binary. :param xbstream_binary: Path to xbstream binary :return: If success, return True :rtype: bool """ # GPG modifier if config.gpg: gpg = Gpg( stream, config.gpg.recipient, config.gpg.keyring, secret_keyring=config.gpg.secret_keyring ) LOG.debug('Decrypting stream') stream = gpg.revert_stream() else: LOG.debug('Not decrypting the stream') stream = config.compression.get_modifier(stream).revert_stream() with stream as handler: if not _extract_xbstream(handler, dst_dir, xbstream_binary): return False mem_usage = psutil.virtual_memory() try: xtrabackup_cmd = [xtrabackup_binary, '--use-memory=%d' % (mem_usage.available/2), '--prepare'] if redo_only: xtrabackup_cmd += ['--apply-log-only'] xtrabackup_cmd += ["--target-dir", dst_dir] LOG.debug('Running %s', ' '.join(xtrabackup_cmd)) xtrabackup_proc = Popen(xtrabackup_cmd, stdout=None, stderr=None) xtrabackup_proc.communicate() ret = xtrabackup_proc.returncode if ret: LOG.error('%s exited with code %d', " ".join(xtrabackup_cmd), ret) return ret == 0 except OSError as err: LOG.error('Failed to prepare backup in %s: %s', dst_dir, err) return False
def restore_from_mysql_incremental(stream, dst_dir, config, tmp_dir=None, xtrabackup_binary=XTRABACKUP_BINARY, xbstream_binary=XBSTREAM_BINARY): """ Restore MySQL datadir from an incremental copy. :param stream: Generator that provides backup copy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str :param config: Tool configuration. :type config: TwinDBBackupConfig :param tmp_dir: Path to temp dir :type tmp_dir: str :param xtrabackup_binary: Path to xtrabackup binary. :param xbstream_binary: Path to xbstream binary :return: If success, return True :rtype: bool """ if tmp_dir is None: try: inc_dir = tempfile.mkdtemp() except (IOError, OSError): try: empty_dir(dst_dir) except (IOError, OSError): raise raise else: inc_dir = tmp_dir # GPG modifier if config.gpg: gpg = Gpg( stream, config.gpg.recipient, config.gpg.keyring, secret_keyring=config.gpg.secret_keyring ) LOG.debug('Decrypting stream') stream = gpg.revert_stream() else: LOG.debug('Not decrypting the stream') stream = config.compression.get_modifier(stream).revert_stream() with stream as handler: if not _extract_xbstream(handler, inc_dir, xbstream_binary): return False try: mem_usage = psutil.virtual_memory() try: xtrabackup_cmd = [ xtrabackup_binary, '--use-memory=%d' % (mem_usage.available / 2), '--prepare', '--apply-log-only', '--target-dir=%s' % dst_dir ] LOG.debug('Running %s', ' '.join(xtrabackup_cmd)) xtrabackup_proc = Popen( xtrabackup_cmd, stdout=None, stderr=None ) xtrabackup_proc.communicate() ret = xtrabackup_proc.returncode if ret: LOG.error( '%s exited with code %d', " ".join(xtrabackup_cmd), ret) return False xtrabackup_cmd = [ xtrabackup_binary, '--use-memory=%d' % (mem_usage.available / 2), '--prepare', "--target-dir=%s" % dst_dir, "--incremental-dir=%s" % inc_dir ] LOG.debug('Running %s', ' '.join(xtrabackup_cmd)) xtrabackup_proc = Popen( xtrabackup_cmd, stdout=None, stderr=None ) xtrabackup_proc.communicate() ret = xtrabackup_proc.returncode if ret: LOG.error('%s exited with code %d', " ".join(xtrabackup_cmd), ret) return ret == 0 except OSError as err: LOG.error('Failed to prepare backup in %s: %s', dst_dir, err) return False finally: try: pass except OSError as exc: if exc.errno != errno.ENOENT: # ENOENT - no such file or directory raise # re-raise exception
def restore_from_mysql_incremental(stream, dst_dir, config, tmp_dir=None, xtrabackup_binary=XTRABACKUP_BINARY, xbstream_binary=XBSTREAM_BINARY): """ Restore MySQL datadir from an incremental copy. :param stream: Generator that provides backup copy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str :param config: Tool configuration. :type config: ConfigParser.ConfigParser :param tmp_dir: Path to temp dir :type tmp_dir: str :param xtrabackup_binary: Path to xtrabackup binary. :param xbstream_binary: Path to xbstream binary :return: If success, return True :rtype: bool """ if tmp_dir is None: try: inc_dir = tempfile.mkdtemp() except (IOError, OSError): try: empty_dir(dst_dir) except (IOError, OSError): raise raise else: inc_dir = tmp_dir # GPG modifier try: gpg = Gpg(stream, config.get('gpg', 'recipient'), config.get('gpg', 'keyring'), secret_keyring=config.get('gpg', 'secret_keyring')) LOG.debug('Decrypting stream') stream = gpg.revert_stream() except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): LOG.debug('Not decrypting the stream') stream = Gzip(stream).revert_stream() with stream as handler: if not _extract_xbstream(handler, inc_dir, xbstream_binary): return False try: mem_usage = psutil.virtual_memory() try: xtrabackup_cmd = [ xtrabackup_binary, '--use-memory=%d' % (mem_usage.available / 2), '--prepare', '--apply-log-only', '--target-dir=%s' % dst_dir ] LOG.debug('Running %s', ' '.join(xtrabackup_cmd)) xtrabackup_proc = Popen(xtrabackup_cmd, stdout=None, stderr=None) xtrabackup_proc.communicate() ret = xtrabackup_proc.returncode if ret: LOG.error('%s exited with code %d', " ".join(xtrabackup_cmd), ret) return False xtrabackup_cmd = [ xtrabackup_binary, '--use-memory=%d' % (mem_usage.available / 2), '--prepare', "--target-dir=%s" % dst_dir, "--incremental-dir=%s" % inc_dir ] LOG.debug('Running %s', ' '.join(xtrabackup_cmd)) xtrabackup_proc = Popen(xtrabackup_cmd, stdout=None, stderr=None) xtrabackup_proc.communicate() ret = xtrabackup_proc.returncode if ret: LOG.error('%s exited with code %d', " ".join(xtrabackup_cmd), ret) return ret == 0 except OSError as err: LOG.error('Failed to prepare backup in %s: %s', dst_dir, err) return False finally: try: pass except OSError as exc: if exc.errno != errno.ENOENT: # ENOENT - no such file or directory raise # re-raise exception
def restore_from_mysql_full( stream, dst_dir, config, redo_only=False, xtrabackup_binary=XTRABACKUP_BINARY, xbstream_binary=XBSTREAM_BINARY, ): """ Restore MySQL datadir from a backup copy :param stream: Generator that provides backup copy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str :param config: Tool configuration. :type config: TwinDBBackupConfig :param redo_only: True if the function has to do final apply of the redo log. For example, if you restore backup from a full copy it should be False. If you restore from incremental copy and you restore base full copy redo_only should be True. :type redo_only: bool :param xtrabackup_binary: path to xtrabackup binary. :param xbstream_binary: Path to xbstream binary :return: If success, return True :rtype: bool """ # GPG modifier if config.gpg: gpg = Gpg( stream, config.gpg.recipient, config.gpg.keyring, secret_keyring=config.gpg.secret_keyring, ) LOG.debug("Decrypting stream") stream = gpg.revert_stream() else: LOG.debug("Not decrypting the stream") if config.mysql.xtrabackup_binary: xtrabackup_binary = config.mysql.xtrabackup_binary stream = config.compression.get_modifier(stream).revert_stream() with stream as handler: if not _extract_xbstream(handler, dst_dir, xbstream_binary): return False mem_usage = psutil.virtual_memory() try: xtrabackup_cmd = [ xtrabackup_binary, "--use-memory=%d" % (mem_usage.available / 2), "--prepare", ] if redo_only: xtrabackup_cmd += ["--apply-log-only"] xtrabackup_cmd += ["--target-dir", dst_dir] LOG.debug("Running %s", " ".join(xtrabackup_cmd)) xtrabackup_proc = Popen(xtrabackup_cmd, stdout=None, stderr=None) xtrabackup_proc.communicate() ret = xtrabackup_proc.returncode if ret: LOG.error("%s exited with code %d", " ".join(xtrabackup_cmd), ret) return ret == 0 except OSError as err: raise TwinDBBackupError("Failed to prepare backup in %s: %s" % (dst_dir, err))
def backup_mysql(run_type, config): """Take backup of local MySQL instance :param run_type: Run type :type run_type: str :param config: Tool configuration :type config: ConfigParser.ConfigParser :return: None """ try: if not config.getboolean('source', 'backup_mysql'): raise TwinDBBackupError('MySQL backups are not enabled in config') except (ConfigParser.NoOptionError, TwinDBBackupError) as err: LOG.debug(err) LOG.debug('Not backing up MySQL') return dst = get_destination(config) try: full_backup = config.get('mysql', 'full_backup') except ConfigParser.NoOptionError: full_backup = 'daily' backup_start = time.time() src = MySQLSource( MySQLConnectInfo(config.get('mysql', 'mysql_defaults_file')), run_type, full_backup, dst) callbacks = [] stream = src.get_stream() src_name = src.get_name() # Gzip modifier stream = Gzip(stream).get_stream() src_name += '.gz' # KeepLocal modifier try: keep_local_path = config.get('destination', 'keep_local_path') kl_modifier = KeepLocal(stream, os.path.join(keep_local_path, src_name)) stream = kl_modifier.get_stream() callbacks.append((kl_modifier, { 'keep_local_path': keep_local_path, 'dst': dst })) except ConfigParser.NoOptionError: LOG.debug('keep_local_path is not present in the config file') # GPG modifier try: stream = Gpg(stream, config.get('gpg', 'recipient'), config.get('gpg', 'keyring')).get_stream() src_name += '.gpg' except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): pass except ModifierException as err: LOG.warning(err) LOG.warning('Will skip encryption') if not dst.save(stream, src_name): LOG.error('Failed to save backup copy %s', src_name) exit(1) status = prepare_status(dst, src, run_type, src_name, backup_start) src.apply_retention_policy(dst, config, run_type, status) dst.status(status) LOG.debug('Callbacks are %r', callbacks) for callback in callbacks: callback[0].callback(**callback[1])
def test_gpg_raises_exception_if_no_keyring(input_file, tmpdir): keyring_file = tmpdir.join('does_not_exit') with open(str(input_file)) as stream: with pytest.raises(ModifierException): Gpg(stream, 'foo@bar', str(keyring_file))