예제 #1
0
    def do_cleanup(self) -> int:
        """
        Cleanup of dirty remote.

        From the duplicity manpage:
        cleanup [--force] [--extra-clean] <url>
            Delete the extraneous duplicity files on the given backend.
            Non-duplicity files, or files in complete data sets will not
            be deleted. This should only be necessary after a duplicity session
            fails or is aborted prematurely. Note that --force will be
            needed to delete the files instead of just listing them.

        :return: returncode
        """
        target = "s3+http://{bucket}/{path}".format(**self._config.get(
            "remote"))  # type: ignore
        args = self._args
        runtime_env = self.get_aws_secrets()
        action = "cleanup"

        if self.dry_run:
            args.append("--dry-run")

        if self.options.get("force"):
            args.append("--force")

        if self.verbose:
            echo_info("Cleanup the backup in target: '{}'".format(target))

        return self._execute(action, *args, target, runtime_env=runtime_env)
예제 #2
0
    def do_list_current_files(self) -> int:
        """
        List current files included in the backup.

        from the docs:
        list-current-files [--time <time>] <url>
            Lists the files contained in the most current backup or backup at
            time. The information will be extracted from the signature files,
            not the archive data itself. Thus the whole archive does not have
            to be downloaded, but on the other hand if the archive has been
            deleted or corrupted, this command will not detect it.

        :return: returncode
        """
        target = "s3+http://{bucket}/{path}".format(**self._config.get(
            "remote"))  # type: ignore
        args = self._args
        action = "list-current-files"

        if self.options.get("time") is not None:
            args.extend(["--time", self.options.get("time")])

        if self.verbose:
            echo_info("Collection status of the backup in target: '{}'".format(
                target))

        return self._execute(action,
                             *args,
                             target,
                             runtime_env=self.get_aws_secrets())
예제 #3
0
    def _execute(self, *cmd_args, runtime_env: Dict = None) -> int:
        """Execute the duplicity command."""
        command = [self.duplicity_cmd(), *cmd_args]

        if self.verbose:
            print("command used:")
            print([*cmd_args])
            print("environment:")
            pprint([
                "{} = {}".format(k, v) for k, v in os.environ.items()
                if ("SECRET" not in k) and (("AWS" in k) or ("DUPLICITY" in k))
            ])

        self.last_results = subprocess.run(command,
                                           shell=NEED_SUBPROCESS_SHELL,
                                           env=runtime_env)

        try:
            self.last_results.check_returncode()
        except subprocess.CalledProcessError as e:
            echo_failure("The duplicity command exited with an error. "
                         "Command may not have succeeded.")
            if self.verbose:
                echo_info("More information on the error:\n{}".format(
                    e.output))
        return self.last_results.returncode
예제 #4
0
def check_config_file(
    config_file: Union[Text, Path],
    path: Union[Text, Path, None] = None,
    exit=True,
    verbose=False,
    testing=False,
) -> Optional[Path]:
    """Validate and return the full absolute Path to the config file otherwise exit or return False.

    Will search of the configuration file int he following order:
    1. the current working directory
    2. the user configuration directory ('~/.config/duplicity_backup/')
    3. the system configuration directory ('~/etc/duplicity_backup/')

    :param config_file: filename and/or path to the config file
    :param path: helper path if the config_file if the config_file is not a full path
    :param exit: when exit is true, exit with return_code 2
    :param testing: in testing mode, no CLI verbosity
    :return: Path to the config file
    """
    config_path = search_config(config_file,
                                path=path,
                                exit=exit and not testing)

    if not config_path.exists():
        if exit:
            echo_failure(
                "Config file does not exist in '{}', please provide or "
                "create an empty one using the command `init`.".format(
                    config_file))
            sys.exit(2)
        else:
            # could be a file that does not exist
            return Path(config_path)

    # performing validation
    from cerberus import Validator
    import yaml

    validator = Validator()
    validator.allow_unknown = False
    with config_path.open() as config_fd, CONFIG_SCHEMA_PATH.open(
    ) as schema_fd:
        if not validator.validate(yaml.safe_load(config_fd),
                                  yaml.safe_load(schema_fd)):
            if not testing:
                echo_failure(
                    "The configuration file is incorrectly formatted: \n{}".
                    format(validator.errors))
            if exit and not testing:
                sys.exit(2)
            return validator.errors

    if verbose and not testing:
        echo_info(
            "The configuration file is succesfully validated against the validation schema."
        )
    return config_path
예제 #5
0
    def do_remove_older(self) -> int:
        """Remove older backup sets.

        From the docs:
        remove-older-than <time> [--force] <url>
            Delete all backup sets older than the given time. Old backup sets will not be deleted if backup sets
            newer than time depend on them. See the TIME FORMATS section for more information. Note, this action
            cannot be combined with backup or other actions, such as cleanup. Note also that --force will be
            needed to delete the files instead of just listing them.

        remove-all-but-n-full <count> [--force] <url>
            Delete all backups sets that are older than the count:th last full backup (in other words, keep the
            last count full backups and associated incremental sets). count must be larger than zero. A value
            of 1 means that only the single most recent backup chain will be kept. Note that --force will be
            needed to delete the files instead of just listing them.

        remove-all-inc-of-but-n-full <count> [--force] <url>
            Delete incremental sets of all backups sets that are older than the count:th last full backup (in
            other words, keep only old full backups and not their increments). count must be larger than zero.
            A value of 1 means that only the single most recent backup chain will be kept intact.
            Note that --force will be needed to delete the files instead of just listing them.
        """
        target = "s3+http://{bucket}/{path}".format(**self._config.get(
            "remote"))  # type: ignore
        args = self._args
        action = None

        if self.options.get("time") is not None:
            action = ["remove-older-than", self.options.get("time")]
        if self.options.get("all_but_n_full") is not None:
            action = [
                "remove-all-but-n-full",
                str(self.options.get("all_but_n_full"))
            ]
        if self.options.get("all_incremental_but_n_full") is not None:
            action = [
                "remove-all-inc-but-n-full",
                str(self.options.get("all_incremental_but_n_full")),
            ]
        if action is None:
            echo_failure("Please provide a remove action")
            if self.verbose:
                print(self.options)
            sys.exit(1)

        if self.options.get("force"):
            args.append("--force")

        if self.verbose:
            echo_info("Collection status of the backup in target: '{}'".format(
                target))

        return self._execute(*action,
                             *args,
                             target,
                             runtime_env=self.get_aws_secrets())
예제 #6
0
    def do_verify(self) -> int:
        """Verify the backup.

        From the duplicity man page:
        Verify [--compare-data] [--time <time>] [--file-to-restore <rel_path>]
          <url> <local_path>
            Restore backup contents temporarily file by file and compare against
            the local path’s contents. Duplicity will exit with a non-zero error
            level if any files are different. On verbosity level info (4) or
            higher, a message for each file that has changed will be logged.

            The --file-to-restore option restricts verify to that file or folder.
            The --time option allows to select a backup to verify against.
            The --compare-data option enables data comparison.

        :return: return_code of duplicity
        """
        from duplicity_backup_s3.utils import temp_chdir

        with temp_chdir() as target:
            source = "s3+http://{bucket}/{path}".format(
                **self._config.get("remote"))  # type: ignore
            args = self._args
            runtime_env = self.get_aws_secrets()
            action = "verify"

            if self.dry_run:
                args.append("--dry-run")

            if self.options.get("file") is not None:
                args.extend(["--file-to-restore", self.options.get("file")])

            if self.options.get("time") is not None:
                args.extend(["--time", self.options.get("time")])

            if self.verbose:
                echo_info("verifying backup in directory: {}".format(target))

            return self._execute(action,
                                 *args,
                                 source,
                                 target,
                                 runtime_env=runtime_env)
예제 #7
0
    def do_collection_status(self) -> int:
        """
        Check the status of the collections in backup.

        From the docs:
        collection-status <url>
            Summarize the status of the backup repository by printing the chains
            and sets found, and the number of volumes in each.

        :return: returncode
        """
        target = "s3+http://{bucket}/{path}".format(**self._config.get(
            "remote"))  # type: ignore
        action = "collection-status"

        if self.verbose:
            echo_info("Collection status of the backup in target: '{}'".format(
                target))

        return self._execute(action,
                             *self._args,
                             target,
                             runtime_env=self.get_aws_secrets())
예제 #8
0
    def do_restore(self) -> int:
        """Restore the backup.

        From the duplicity man page:
        restore [--file-to-restore <relpath>] [--time <time>] <url> <target_folder>
              You can restore the full monty or selected folders/files from
              a specific time. Use the relative path as it is printed by
              list-current-files. Usually not needed as duplicity enters
              restore mode when it detects that the URL comes before the
              local folder.

        :return: return_code of duplicity
        """
        args = self._args
        action = "restore"
        restore_from_url = "s3+http://{bucket}/{path}".format(
            **self._config.get("remote"))  # type: ignore
        target = self.options.get("target")
        runtime_env = self.get_aws_secrets()

        if self.dry_run:
            args.append("--dry-run")

        if self.options.get("file") is not None:
            args.extend((["--file-to-restore", self.options.get("file")]))

        if self.options.get("time") is not None:
            args.extend(["--time", self.options.get("time")])

        if self.verbose:
            echo_info("restoring backup in directory: {}".format(target))

        return self._execute(action,
                             *args,
                             restore_from_url,
                             target,
                             runtime_env=runtime_env)
예제 #9
0
def init(**options):
    """Initialise an empty configuration file."""
    config_path_options = [
        ("1. Current directory", Path.cwd()),
        ("2. User configuration directory", Path(appdirs.user_config_dir)),
        (
            "3. System configuration directory (only root)",
            Path(appdirs.site_config_dir),
        ),
    ]

    echo_info("Choose the path of the configuration file:")
    echo_info("\n".join(["{0} ({1})".format(*o) for o in config_path_options]))
    choice = int(
        click.prompt("Path", default=1, type=click.Choice(["1", "2", "3"])))
    _, config_path = config_path_options[choice - 1]
    echo_success("you choose: {}".format(config_path))

    # when choosing root, ensure you run as root
    if choice == 3 and not run_as_root():
        echo_failure(
            "You need to run this command again with `sudo` rights to manage "
            "the system wide configuration.")
        sys.exit(1)

    # when choosing current dir, let user also choose the name of the config file.
    config_filename = CONFIG_FILENAME
    if choice == 1:
        config_filename = click.prompt("Filename of the configuration file",
                                       default=CONFIG_FILENAME)

    config_path_options = [
        ("1. Current directory", Path.cwd()),
        ("2. User configuration directory", Path(appdirs.user_config_dir)),
        (
            "3. System configuration directory (only root)",
            Path(appdirs.site_config_dir),
        ),
    ]

    echo_info("Choose the path of the configuration file:")
    echo_info("\n".join(["{0} ({1})".format(*o) for o in config_path_options]))
    choice = int(
        click.prompt("Path", default=1, type=click.Choice(["1", "2", "3"])))
    _, config_path = config_path_options[choice - 1]
    echo_success("you choose: {}".format(config_path))

    # when choosing root, ensure you run as root
    if choice == 3 and not run_as_root():
        echo_failure(
            "You need to run this command again with `sudo` rights to manage "
            "the system wide configuration.")
        sys.exit(1)

    # when choosing current dir, let user also choose the name of the config file.
    config_filename = CONFIG_FILENAME
    if choice == 1:
        config_filename = click.prompt("Filename of the configuration file",
                                       default=CONFIG_FILENAME)

    config = check_config_file(Path(config_path / config_filename),
                               exit=False,
                               verbose=options.get("verbose"))

    if config.exists() and not click.confirm(
            "Do you want to override an already existing '{}' (original will be "
            "backedup as '{}.backup'".format(config.name, config.name)):
        echo_info("Exiting without overwriting current config file")
        sys.exit(1)

    if config.exists():
        echo_info("Backing up old config file.")
        deprecated_config_filename = Path("{}.backup".format(config.name))
        config.replace(deprecated_config_filename)

    with CONFIG_TEMPLATE_PATH.open() as f:
        default_config = yaml.safe_load(f)

    # we can alter the default configuration here
    echo_info("Please answer some basic configuration questions to initialise "
              "a working solution.")
    default_config["aws"]["AWS_ACCESS_KEY_ID"] = click.prompt(
        "Provide the S3 (Amazon) Access Key ID",
        default=default_config["aws"]["AWS_ACCESS_KEY_ID"],
    )
    default_config["aws"]["AWS_SECRET_ACCESS_KEY"] = click.prompt(
        "Provide the S3 (Amazon) Access Key ID",
        default=default_config["aws"]["AWS_SECRET_ACCESS_KEY"],
    )
    default_config["backuproot"] = click.prompt(
        "Backup root directory (everything under it will be backed up",
        default=default_config["backuproot"],
        type=click.Path(),
    )
    default_config["remote"]["bucket"] = click.prompt(
        "Name of the s3 bucket as backup target",
        default=default_config["remote"]["bucket"],
    )
    default_config["remote"]["path"] = click.prompt(
        "Name of the path inside the bucket",
        default=default_config["remote"]["path"])

    # write config to disk
    with config.open("w") as fd:
        echo_info("Initialising an empty config file in: '{}'".format(config))
        fd.write(yaml.dump(default_config))

    if config.exists():
        check_config_file(config)
        sys.exit(0)
    else:
        echo_failure(
            "Config file does not exist in '{}', please provide.".format(
                options.get("config")))
        sys.exit(2)