def setUp(self): super(DuplicityTestCase, self).setUp() self.savedEnviron = {} self.savedConfig = {} log.setup() log.setverbosity(log.WARNING) self.set_config(u'print_statistics', 0) backend.import_backends() # Have all file references in tests relative to our testing dir os.chdir(_testing_dir)
def setUp(self): super(DuplicityTestCase, self).setUp() self.savedEnviron = {} self.savedGlobals = {} # TODO: remove these lines log.setup() log.setverbosity(log.WARNING) self.set_global('print_statistics', 0) backend.import_backends() # Have all file references in tests relative to our testing dir os.chdir(_testing_dir)
def ProcessCommandLine(cmdline_list): """Process command line, set globals, return action action will be "list-current", "collection-status", "cleanup", "remove-old", "restore", "verify", "full", or "inc". """ # build initial gpg_profile globals.gpg_profile = gpg.GPGProfile() # parse command line args = parse_cmdline_options(cmdline_list) # if we get a different gpg-binary from the commandline then redo gpg_profile if globals.gpg_binary is not None: src = globals.gpg_profile globals.gpg_profile = gpg.GPGProfile( passphrase=src.passphrase, sign_key=src.sign_key, recipients=src.recipients, hidden_recipients=src.hidden_recipients) log.Debug(_("GPG binary is %s, version %s") % ((globals.gpg_binary or 'gpg'), "%d.%d.%d" % globals.gpg_profile.gpg_version)) # we can now try to import all the backends backend.import_backends() # parse_cmdline_options already verified that we got exactly 1 or 2 # non-options arguments assert len(args) >= 1 and len(args) <= 2, "arg count should have been checked already" if len(args) == 1: if list_current: action = "list-current" elif collection_status: action = "collection-status" elif cleanup: action = "cleanup" elif globals.remove_time is not None: action = "remove-old" elif globals.remove_all_but_n_full_mode: action = "remove-all-but-n-full" elif globals.remove_all_inc_of_but_n_full_mode: action = "remove-all-inc-of-but-n-full" else: command_line_error("Too few arguments") globals.backend = backend.get_backend(args[0]) if not globals.backend: log.FatalError(_("""Bad URL '%s'. Examples of URL strings are "scp://[email protected]:1234/path" and "file:///usr/local". See the man page for more information.""") % (args[0],), log.ErrorCode.bad_url) elif len(args) == 2: # Figure out whether backup or restore backup, local_pathname = set_backend(args[0], args[1]) if backup: if full_backup: action = "full" else: action = "inc" else: if verify: action = "verify" else: action = "restore" process_local_dir(action, local_pathname) if action in ['full', 'inc', 'verify']: set_selection() elif len(args) > 2: raise AssertionError("this code should not be reachable") check_consistency(action) log.Info(_("Main action: ") + action) return action
def ProcessCommandLine(cmdline_list): """Process command line, set globals, return action action will be "list-current", "collection-status", "cleanup", "remove-old", "restore", "verify", "full", or "inc". """ globals.gpg_profile = gpg.GPGProfile() args = parse_cmdline_options(cmdline_list) # we can now try to import all the backends backend.import_backends() # parse_cmdline_options already verified that we got exactly 1 or 2 # non-options arguments assert len(args) >= 1 and len( args) <= 2, "arg count should have been checked already" if len(args) == 1: if list_current: action = "list-current" elif collection_status: action = "collection-status" elif cleanup: action = "cleanup" elif globals.remove_time is not None: action = "remove-old" elif globals.remove_all_but_n_full_mode: action = "remove-all-but-n-full" elif globals.remove_all_inc_of_but_n_full_mode: action = "remove-all-inc-of-but-n-full" else: command_line_error("Too few arguments") globals.backend = backend.get_backend(args[0]) if not globals.backend: log.FatalError( _("""Bad URL '%s'. Examples of URL strings are "scp://[email protected]:1234/path" and "file:///usr/local". See the man page for more information.""") % (args[0], ), log.ErrorCode.bad_url) elif len(args) == 2: # Figure out whether backup or restore backup, local_pathname = set_backend(args[0], args[1]) if backup: if full_backup: action = "full" else: action = "inc" else: if verify: action = "verify" else: action = "restore" process_local_dir(action, local_pathname) if action in ['full', 'inc', 'verify']: set_selection() elif len(args) > 2: raise AssertionError("this code should not be reachable") check_consistency(action) log.Info(_("Main action: ") + action) return action
def main(): output = [] def Log(s, verb_level, code=1, extra=None, force_print=False): if verb_level <= log.getverbosity(): output.extend(s.split("\n")) # def PrintCollectionStatus(col_stats, force_print=False): # # raise ValueError(type(col_stats.matched_chain_pair[1])) # output.append({ # "num_backup_sets": # }) # log.PrintCollectionStatus = PrintCollectionStatus results = None try: settings = dict() Intersplunk.readResults(None, settings, True) dup_time.setcurtime() archive_dir = os.path.join(app_dir, "local", "data", "archive") try: os.makedirs(archive_dir) except: pass if sys.argv[1] == "splunk-last-backups": ap = argparse.ArgumentParser() ap.add_argument("--time", type=int) ap.add_argument("backend") args = ap.parse_args(sys.argv[2:]) dup_globals.gpg_profile = gpg.GPGProfile() dup_globals.gpg_profile.passphrase = os.environ["PASSPHRASE"] backend.import_backends() dup_globals.backend = backend.get_backend(args.backend) if dup_globals.backup_name is None: dup_globals.backup_name = commandline.generate_default_backup_name( args.backend) commandline.set_archive_dir(archive_dir) results = [] time = args.time col_stats = dup_collections.CollectionsStatus( dup_globals.backend, dup_globals.archive_dir_path, "list-current").set_values() try: sig_chain = col_stats.get_backup_chain_at_time(time) except dup_collections.CollectionsError: results.append({ "last_full_backup_time": 0, "last_incr_backup_time": 0, }) else: if sig_chain.incset_list: last_incr_backup_time = max( [incset.end_time for incset in sig_chain.incset_list]) else: last_incr_backup_time = 0 results.append({ "last_full_backup_time": col_stats.get_last_full_backup_time(), "last_incr_backup_time": last_incr_backup_time }) elif sys.argv[1] == "splunk-file-list": ap = argparse.ArgumentParser() ap.add_argument("--time") ap.add_argument("backend") args = ap.parse_args(sys.argv[2:]) args.time = int(args.time.split(".")[0]) dup_time.setcurtime(args.time) dup_globals.restore_time = args.time dup_globals.gpg_profile = gpg.GPGProfile() dup_globals.gpg_profile.passphrase = os.environ["PASSPHRASE"] backend.import_backends() dup_globals.backend = backend.get_backend(args.backend) if dup_globals.backup_name is None: dup_globals.backup_name = commandline.generate_default_backup_name( args.backend) commandline.set_archive_dir(archive_dir) results = [] col_stats = dup_collections.CollectionsStatus( dup_globals.backend, dup_globals.archive_dir_path, "list-current").set_values() time = args.time sig_chain = col_stats.get_signature_chain_at_time(time) path_iter = diffdir.get_combined_path_iter( sig_chain.get_fileobjs(time)) for path in path_iter: if path.difftype != u"deleted" and path.index: mode = bin(path.mode)[2:] perms = "" for p, val in enumerate(mode): if p in (0, 3, 6): c = "r" elif p in (1, 4, 7): c = "w" elif p in (2, 5, 8): c = "x" perms += c if int(val) else "-" if path.type == "dir": perms = "d" + perms elif path.type == "sym": perms = "l" + perms else: perms = "-" + perms results.append({ "perms": perms, "owner": path.stat.st_uid, "group": path.stat.st_gid, "size": path.stat.st_size, "modtime": path.stat.st_mtime, "filename": os.path.join(*path.index), }) else: args = ["--archive-dir", archive_dir] + sys.argv[1:] action = commandline.ProcessCommandLine(args) log.Log = Log try: dup_main.do_backup(action) except dup_collections.CollectionsError: results = [] except SystemExit: pass except Exception as e: import traceback # sys.stderr.write(traceback.format_exc()) Intersplunk.generateErrorResults("Traceback: %s" % traceback.format_exc()) return if output and not results: import time results = [{"_raw": "\n".join(output), "_time": time.time()}] if results: try: Intersplunk.outputResults(results) except Exception: import traceback sys.stderr.write(traceback.format_exc()) results = Intersplunk.generateErrorResults("Traceback: %s" % traceback.format_exc()) Intersplunk.outputResults(results)
from duplicity import globals from duplicity.errors import BackendException import re from models import * from datetime import datetime import sys #needed to handle ftp unicode output - directory names for example #TODO may break something! better to handle unicode some way reload(sys) sys.setdefaultencoding('utf8') log.setup() backend.import_backends() log.setverbosity(log.DEBUG) globals.ssl_no_check_certificate = True #TODO better to check, but way to store self-signed def check_all(): actions = Action.objects.all() for action in actions: check(action) def build_url(action): storage = action.storage url = storage.access_type+"://"+storage.login + ":" + storage.password + "@" + storage.url + action.dest_path print url
def setup(): """ setup for unit tests """ log.setup() log.setverbosity(log.WARNING) globals.print_statistics = 0 backend.import_backends()