def runduplicity(self): if self.url is None: return log.setup() log.setverbosity(int(self.debuglevel)) if self.passphrasefd: self.passphrasefd = int(self.passphrasefd) if self.passwordfd: self.passwordfd = int(self.passwordfd) if self.url.find("file:/") != 0: get_backendpassphrase(self.passwordfd) opts = [] for i in self.options: try: v = eval("self." + i.replace("-", "")) if v: opts.append("--%s=%s" % (i, v)) except: pass for i in self.no_options: try: v = eval("self." + i.replace("-", "")) if v: opts.append("--%s" % (i)) except: pass self.options = [] parameter = ["list-current-files", "--ssh-askpass"] + opts + [self.url] log.Log("processing %s" % (" ".join(parameter)), 5) sys.argv = ["duplicity"] + parameter action = commandline.ProcessCommandLine(parameter) log.Log("running action %s" % (action), 5) globals.gpg_profile.passphrase = get_passphrase(self.passphrasefd) self.col_stats = collections.CollectionsStatus( globals.backend, globals.archive_dir, "collection-status").set_values() self.date_types = [] for chain in self.col_stats.all_backup_chains: for s in chain.get_all_sets(): self.date_types.append( (datetime.fromtimestamp(s.get_time()), s.type)) for s in self.date_types: self.dircache[date2str(s[0]) + '_' + s[1]] = None
def runduplicity(self): if not self.url: return duplicity_log.setup() duplicity_log.setverbosity(int(self.debuglevel)) log.addHandler(logging.handlers.SysLogHandler(address='/dev/log')) if self.passphrasefd: self.passphrasefd = int(self.passphrasefd) if self.passwordfd: self.passwordfd = int(self.passwordfd) if self.url.find("file:/") != 0: get_backendpassphrase(self.passwordfd) opts = [] self_dict = vars(self) for option in self.options: value = self_dict.get(option.replace("-", "")) if value: opts.append("--%s=%s" % (option, value)) for option in self.no_options: if option.replace("-", "") in self_dict: opts.append("--%s" % (option)) self.options = [] parameter = ["list-current-files", "--ssh-askpass"] + opts + [self.url] debug_log("processing %s" % (" ".join(parameter))) sys.argv = ["duplicity"] + parameter action = commandline.ProcessCommandLine(parameter) debug_log("running action %s" % (action)) globals.gpg_profile.passphrase = get_passphrase(self.passphrasefd) self.col_stats = collections.CollectionsStatus( globals.backend, globals.archive_dir, "collection-status").set_values() self.date_types = [(datetime.fromtimestamp(s.get_time()), s.type) for chain in self.col_stats.all_backup_chains for s in chain.get_all_sets()] self.dircache.update({(date2str(timestamp) + '_' + type): None for timestamp, type in self.date_types}) debug_log("initialized cache: " + str(self.date_types) + ", " + str(self.dircache))
def main(): output = [] def Log(s, verb_level, code=1, extra=None, force_print=False): if verb_level <= log.getverbosity(): output.extend(s.split("\n")) # def PrintCollectionStatus(col_stats, force_print=False): # # raise ValueError(type(col_stats.matched_chain_pair[1])) # output.append({ # "num_backup_sets": # }) # log.PrintCollectionStatus = PrintCollectionStatus results = None try: settings = dict() Intersplunk.readResults(None, settings, True) dup_time.setcurtime() archive_dir = os.path.join(app_dir, "local", "data", "archive") try: os.makedirs(archive_dir) except: pass if sys.argv[1] == "splunk-last-backups": ap = argparse.ArgumentParser() ap.add_argument("--time", type=int) ap.add_argument("backend") args = ap.parse_args(sys.argv[2:]) dup_globals.gpg_profile = gpg.GPGProfile() dup_globals.gpg_profile.passphrase = os.environ["PASSPHRASE"] backend.import_backends() dup_globals.backend = backend.get_backend(args.backend) if dup_globals.backup_name is None: dup_globals.backup_name = commandline.generate_default_backup_name( args.backend) commandline.set_archive_dir(archive_dir) results = [] time = args.time col_stats = dup_collections.CollectionsStatus( dup_globals.backend, dup_globals.archive_dir_path, "list-current").set_values() try: sig_chain = col_stats.get_backup_chain_at_time(time) except dup_collections.CollectionsError: results.append({ "last_full_backup_time": 0, "last_incr_backup_time": 0, }) else: if sig_chain.incset_list: last_incr_backup_time = max( [incset.end_time for incset in sig_chain.incset_list]) else: last_incr_backup_time = 0 results.append({ "last_full_backup_time": col_stats.get_last_full_backup_time(), "last_incr_backup_time": last_incr_backup_time }) elif sys.argv[1] == "splunk-file-list": ap = argparse.ArgumentParser() ap.add_argument("--time") ap.add_argument("backend") args = ap.parse_args(sys.argv[2:]) args.time = int(args.time.split(".")[0]) dup_time.setcurtime(args.time) dup_globals.restore_time = args.time dup_globals.gpg_profile = gpg.GPGProfile() dup_globals.gpg_profile.passphrase = os.environ["PASSPHRASE"] backend.import_backends() dup_globals.backend = backend.get_backend(args.backend) if dup_globals.backup_name is None: dup_globals.backup_name = commandline.generate_default_backup_name( args.backend) commandline.set_archive_dir(archive_dir) results = [] col_stats = dup_collections.CollectionsStatus( dup_globals.backend, dup_globals.archive_dir_path, "list-current").set_values() time = args.time sig_chain = col_stats.get_signature_chain_at_time(time) path_iter = diffdir.get_combined_path_iter( sig_chain.get_fileobjs(time)) for path in path_iter: if path.difftype != u"deleted" and path.index: mode = bin(path.mode)[2:] perms = "" for p, val in enumerate(mode): if p in (0, 3, 6): c = "r" elif p in (1, 4, 7): c = "w" elif p in (2, 5, 8): c = "x" perms += c if int(val) else "-" if path.type == "dir": perms = "d" + perms elif path.type == "sym": perms = "l" + perms else: perms = "-" + perms results.append({ "perms": perms, "owner": path.stat.st_uid, "group": path.stat.st_gid, "size": path.stat.st_size, "modtime": path.stat.st_mtime, "filename": os.path.join(*path.index), }) else: args = ["--archive-dir", archive_dir] + sys.argv[1:] action = commandline.ProcessCommandLine(args) log.Log = Log try: dup_main.do_backup(action) except dup_collections.CollectionsError: results = [] except SystemExit: pass except Exception as e: import traceback # sys.stderr.write(traceback.format_exc()) Intersplunk.generateErrorResults("Traceback: %s" % traceback.format_exc()) return if output and not results: import time results = [{"_raw": "\n".join(output), "_time": time.time()}] if results: try: Intersplunk.outputResults(results) except Exception: import traceback sys.stderr.write(traceback.format_exc()) results = Intersplunk.generateErrorResults("Traceback: %s" % traceback.format_exc()) Intersplunk.outputResults(results)
def stream_events(self, inputs, ew): # Splunk Enterprise calls the modular input, # streams XML describing the inputs to stdin, # and waits for XML on stdout describing events. try: output = [] def Log(s, verb_level, code=1, extra=None, force_print=False): if verb_level <= 5: for line in s.split("\n"): output.append(line) (backup_name, config) = inputs.inputs.popitem() config_to_arg = { "full_if_older_than": ("--full-if-older-than", "30D") } params = {} for conf_opt in config_to_arg: (_, default) = config_to_arg[conf_opt] if default is not None: params[conf_opt] = default for conf_opt in config: if conf_opt not in config_to_arg: continue params[conf_opt] = config[conf_opt] args = [] for (param, value) in params.items(): (arg, _) = config_to_arg[param] if value is True: args.append(arg) elif value: args.extend((arg, value)) if "extra_duplicity_args" in config: args.extend(shlex.split(config["extra_duplicity_args"])) archive_dir = os.path.join(app_dir, "local", "data", "archive") backup_base_dir = make_splunkhome_path(["etc"]) # archive_dir_relative = archive_dir[len(backup_base_dir) + 1:] try: os.makedirs(archive_dir) except: pass if config.get("whitelist"): whitelist_file = tempfile.NamedTemporaryFile() with open(whitelist_file.name, "w") as f: for entry in config["whitelist"].split(";"): entry = os.path.expandvars(entry) f.write("%s\n" % entry) args.extend(("--include-filelist", whitelist_file.name)) if config.get("blacklist"): blacklist_file = tempfile.NamedTemporaryFile() with open(blacklist_file.name, "w") as f: for entry in config["blacklist"].split(";"): entry = os.path.expandvars(entry) f.write("%s\n" % entry) args.extend(("--exclude-filelist", blacklist_file.name)) args.extend(["--exclude", archive_dir]) args.extend(["--archive-dir", archive_dir]) args.append(backup_base_dir) args.append(config["target_url"]) dup_time.setcurtime() action = commandline.ProcessCommandLine(args) log.Log = Log dup_main.do_backup(action) event = {} in_stats = False for line in output: line = line.strip() if line.startswith("---"): if in_stats: break elif "Backup Statistics" in line: in_stats = True continue elif not in_stats: continue key, value = line.split(" ")[:2] event[key] = value ew.write_event( Event(data=json.dumps(event), sourcetype="duplicity")) except: import traceback sys.stderr.write("%s\n" % traceback.print_exc())