def test_default_config_provided_by_package(self): from duplicity_backup_s3.defaults import CONFIG_TEMPLATE_PATH from duplicity_backup_s3.defaults import CONFIG_SCHEMA_PATH config_tempate_path = CONFIG_TEMPLATE_PATH check_config_file(config_file=config_tempate_path, testing=True)
def test_optional_missing_key_succeed(self): config_yaml = """ aws: AWS_ACCESS_KEY_ID: foobar_aws_key_id AWS_SECRET_ACCESS_KEY: foobar_aws_access_key backuproot: /home remote: bucket: '' path: '__test' full_if_older_than: 7D """ with NamedTemporaryFile(mode="w") as t: t.write(config_yaml) t.flush() self.assertEqual( check_config_file(config_file=Path(t.name), testing=True), Path(t.name))
def test_incorrect_value_type_fails(self): config_yaml = """ aws: AWS_ACCESS_KEY_ID: foobar_aws_key_id AWS_SECRET_ACCESS_KEY: foobar_aws_access_key backuproot: 1 remote: bucket: '' path: '__test' full_if_older_than: 7D """ with NamedTemporaryFile(mode="w") as t: t.write(config_yaml) t.flush() self.assertDictEqual( check_config_file(config_file=Path(t.name), testing=True), {"backuproot": ["must be of string type"]}, )
def test_required_missing_key_fails(self): config_yaml = """ aws: AWS_ACCESS_KEY_ID: foobar_aws_key_id AWS_SECRET_ACCESS_KEY: foobar_aws_access_key backuproot: /home remote: path: '__test' full_if_older_than: 7D """ with NamedTemporaryFile(mode="w") as t: t.write(config_yaml) t.flush() self.assertDictEqual( check_config_file(config_file=Path(t.name), testing=True), {"remote": [{ "bucket": ["required field"] }]}, )
def test_vanilla_config(self): config_yaml = """ aws: AWS_ACCESS_KEY_ID: foobar_aws_key_id AWS_SECRET_ACCESS_KEY: foobar_aws_access_key backuproot: /home excludes: - _TESTFILE_TO_EXCLUDE includes: - Pictures remote: bucket: '' path: '__test' full_if_older_than: 7D """ with NamedTemporaryFile(mode="w") as t: t.write(config_yaml) t.flush() self.assertEqual( check_config_file(config_file=Path(t.name), testing=True), Path(t.name))
def test_config_from_production_success(self): config_yaml = """ aws: AWS_ACCESS_KEY_ID: fakekey AWS_SECRET_ACCESS_KEY: fakesecret backuproot: /opt/dir/ includes: - /opt/dir/*-media - /opt/dir/var/archives excludes: - "**" remote: bucket: somebucket path: __testpath """ with NamedTemporaryFile(mode="w") as t: t.write(config_yaml) t.flush() self.assertEqual( check_config_file(config_file=Path(t.name), testing=True), Path(t.name), )
def test_extra_key_fails(self): config_yaml = """ aws: AWS_ACCESS_KEY_ID: foobar_aws_key_id AWS_SECRET_ACCESS_KEY: foobar_aws_access_key backuproot: /home excludes: - _TESTFILE_TO_EXCLUDE includes: - Pictures remote: bucket: '' path: '__test' full_if_older_than: 7D One_more_key: fail """ with NamedTemporaryFile(mode="w") as t: t.write(config_yaml) t.flush() self.assertDictEqual( check_config_file(config_file=Path(t.name), testing=True), {"One_more_key": ["unknown field"]}, )
def status(**options): """Status of the backup collection.""" check_config_file(options.get("config"), verbose=options.get("verbose")) dup = DuplicityS3(**options) dup.do_collection_status()
def init(**options): """Initialise an empty configuration file.""" config_path_options = [ ("1. Current directory", Path.cwd()), ("2. User configuration directory", Path(appdirs.user_config_dir)), ( "3. System configuration directory (only root)", Path(appdirs.site_config_dir), ), ] echo_info("Choose the path of the configuration file:") echo_info("\n".join(["{0} ({1})".format(*o) for o in config_path_options])) choice = int( click.prompt("Path", default=1, type=click.Choice(["1", "2", "3"]))) _, config_path = config_path_options[choice - 1] echo_success("you choose: {}".format(config_path)) # when choosing root, ensure you run as root if choice == 3 and not run_as_root(): echo_failure( "You need to run this command again with `sudo` rights to manage " "the system wide configuration.") sys.exit(1) # when choosing current dir, let user also choose the name of the config file. config_filename = CONFIG_FILENAME if choice == 1: config_filename = click.prompt("Filename of the configuration file", default=CONFIG_FILENAME) config_path_options = [ ("1. Current directory", Path.cwd()), ("2. User configuration directory", Path(appdirs.user_config_dir)), ( "3. System configuration directory (only root)", Path(appdirs.site_config_dir), ), ] echo_info("Choose the path of the configuration file:") echo_info("\n".join(["{0} ({1})".format(*o) for o in config_path_options])) choice = int( click.prompt("Path", default=1, type=click.Choice(["1", "2", "3"]))) _, config_path = config_path_options[choice - 1] echo_success("you choose: {}".format(config_path)) # when choosing root, ensure you run as root if choice == 3 and not run_as_root(): echo_failure( "You need to run this command again with `sudo` rights to manage " "the system wide configuration.") sys.exit(1) # when choosing current dir, let user also choose the name of the config file. config_filename = CONFIG_FILENAME if choice == 1: config_filename = click.prompt("Filename of the configuration file", default=CONFIG_FILENAME) config = check_config_file(Path(config_path / config_filename), exit=False, verbose=options.get("verbose")) if config.exists() and not click.confirm( "Do you want to override an already existing '{}' (original will be " "backedup as '{}.backup'".format(config.name, config.name)): echo_info("Exiting without overwriting current config file") sys.exit(1) if config.exists(): echo_info("Backing up old config file.") deprecated_config_filename = Path("{}.backup".format(config.name)) config.replace(deprecated_config_filename) with CONFIG_TEMPLATE_PATH.open() as f: default_config = yaml.safe_load(f) # we can alter the default configuration here echo_info("Please answer some basic configuration questions to initialise " "a working solution.") default_config["aws"]["AWS_ACCESS_KEY_ID"] = click.prompt( "Provide the S3 (Amazon) Access Key ID", default=default_config["aws"]["AWS_ACCESS_KEY_ID"], ) default_config["aws"]["AWS_SECRET_ACCESS_KEY"] = click.prompt( "Provide the S3 (Amazon) Access Key ID", default=default_config["aws"]["AWS_SECRET_ACCESS_KEY"], ) default_config["backuproot"] = click.prompt( "Backup root directory (everything under it will be backed up", default=default_config["backuproot"], type=click.Path(), ) default_config["remote"]["bucket"] = click.prompt( "Name of the s3 bucket as backup target", default=default_config["remote"]["bucket"], ) default_config["remote"]["path"] = click.prompt( "Name of the path inside the bucket", default=default_config["remote"]["path"]) # write config to disk with config.open("w") as fd: echo_info("Initialising an empty config file in: '{}'".format(config)) fd.write(yaml.dump(default_config)) if config.exists(): check_config_file(config) sys.exit(0) else: echo_failure( "Config file does not exist in '{}', please provide.".format( options.get("config"))) sys.exit(2)
def remove(**options): """Remove older backups.""" check_config_file(options.get("config"), verbose=options.get("verbose")) dup = DuplicityS3(**options) dup.do_remove_older()
def cleanup(**options): """Cleanup the backup location.""" check_config_file(options.get("config"), verbose=options.get("verbose")) dup = DuplicityS3(**options) dup.do_cleanup()
def list(**options): """List of the current files in the backup.""" check_config_file(options.get("config"), verbose=options.get("verbose")) dup = DuplicityS3(**options) dup.do_list_current_files()
def verify(**options): """Verify backup.""" check_config_file(options.get("config"), verbose=options.get("verbose")) dup = DuplicityS3(**options) dup.do_verify()
def incr(**options): """Perform an Incremental backup.""" check_config_file(options.get("config"), verbose=options.get("verbose")) dupe = DuplicityS3(**options) return dupe.do_incremental()
def restore(**options): """Perform a Restore of a backup.""" check_config_file(options.get("config"), verbose=options.get("verbose")) dupe = DuplicityS3(**options) return dupe.do_restore()