def restore_check_paths(rpin, rpout, restoreasof = None): """Make sure source and destination exist, and have appropriate type""" if not restoreasof: if not rpin.lstat(): Log.FatalError("Source file %s does not exist" % rpin.path) if not force and rpout.lstat() and (not rpout.isdir() or rpout.listdir()): Log.FatalError("Restore target %s already exists, " "specify --force to overwrite." % rpout.path) if force and rpout.lstat() and not rpout.isdir(): rpout.delete()
def CheckDest(dest_rp): """Check the destination directory, """ dest_rp = require_root_set(dest_rp, 0) need_check = checkdest_need_check(dest_rp) if need_check is None: Log.FatalError("No destination dir found at %s" % (dest_rp.path,)) elif need_check == 0: Log.FatalError("Destination dir %s does not need checking" % (dest_rp.path,), no_fatal_message = 1, errlevel = 0) init_user_group_mapping(dest_rp.conn) dest_rp.conn.regress.Regress(dest_rp)
def get_string_from_file(filename): if not filename: return None rp = rpath.RPath(Globals.local_connection, filename) try: return rp.get_data() except OSError, e: Log.FatalError("Error '%s' reading mapping file '%s'" % (str(e), filename))
def get_cmd_pairs(arglist, remote_schema=None, remote_cmd=None): """Map the given file descriptions into command pairs Command pairs are tuples cmdpair with length 2. cmdpair[0] is None iff it describes a local path, and cmdpair[1] is the path. """ global __cmd_schema if remote_schema: __cmd_schema = remote_schema elif not Globals.ssh_compression: __cmd_schema = __cmd_schema_no_compress if Globals.remote_tempdir: __cmd_schema += (" --tempdir=" + Globals.remote_tempdir) if not arglist: return [] desc_pairs = map(parse_file_desc, arglist) if filter(lambda x: x[0], desc_pairs): # True if any host_info found if remote_cmd: Log.FatalError("The --remote-cmd flag is not compatible " "with remote file descriptions.") elif remote_schema: Log("Remote schema option ignored - no remote file " "descriptions.", 2) cmdpairs = map(desc2cmd_pairs, desc_pairs) if remote_cmd: # last file description gets remote_cmd cmd_pairs[-1] = (remote_cmd, cmd_pairs[-1][1]) return cmdpairs
def Compare(compare_type, src_rp, dest_rp, compare_time = None): """Compare metadata in src_rp with metadata of backup session Prints to stdout whenever a file in the src_rp directory has different metadata than what is recorded in the metadata for the appropriate session. Session time is read from restore_timestr if compare_time is None. """ global return_val dest_rp = require_root_set(dest_rp, 1) if not compare_time: try: compare_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc)) mirror_rp = restore_root.new_index(restore_index) inc_rp = Globals.rbdir.append_path("increments", restore_index) backup_set_select(src_rp) # Sets source rorp iterator if compare_type == "compare": compare_func = compare.Compare elif compare_type == "compare-hash": compare_func = compare.Compare_hash else: assert compare_type == "compare-full", compare_type compare_func = compare.Compare_full return_val = compare_func(src_rp, mirror_rp, inc_rp, compare_time)
def ListAtTime(rp): """List files in archive under rp that are present at restoretime""" rp = require_root_set(rp, 1) try: rest_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc))
def ListChangedSince(rp): """List all the files under rp that have changed since restoretime""" rp = require_root_set(rp, 1) try: rest_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc))
def restore_check_backup_dir(mirror_root, src_rp = None, restore_as_of = 1): """Make sure backup dir root rpin is in consistent state""" if not restore_as_of and not src_rp.isincfile(): Log.FatalError("""File %s does not look like an increment file. Try restoring from an increment file (the filenames look like "foobar.2001-09-01T04:49:04-07:00.diff").""" % src_rp.path) result = checkdest_need_check(mirror_root) if result is None: Log.FatalError("%s does not appear to be an rdiff-backup directory." % (Globals.rbdir.path,)) elif result == 1: Log.FatalError( "Previous backup to %s seems to have failed.\nRerun rdiff-backup " "with --check-destination-dir option to revert directory " "to state before unsuccessful session." % (mirror_root.path,))
def require_root_set(rp, read_only): """Make sure rp is or is in a valid rdiff-backup dest directory. Also initializes fs_abilities (read or read/write) and quoting and return quoted rp if necessary. """ if not restore_set_root(rp): Log.FatalError(("Bad directory %s.\n" % (rp.path,)) + "It doesn't appear to be an rdiff-backup destination dir") try: Globals.rbdir.conn.fs_abilities.single_set_globals(Globals.rbdir, read_only) except (OSError, IOError), exc: print("\n") Log.FatalError("Could not open rdiff-backup directory\n\n%s\n\n" "due to\n\n%s" % (Globals.rbdir.path, exc))
def ListAtTime(rp): """List files in archive under rp that are present at restoretime""" rp = require_root_set(rp, 1) try: rest_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc)) mirror_rp = restore_root.new_index(restore_index) inc_rp = mirror_rp.append_path("increments", restore_index) for rorp in rp.conn.restore.ListAtTime(mirror_rp, inc_rp, rest_time): print rorp.get_indexpath()
def Verify(dest_rp, verify_time=None): """Check the hashes of the regular files against mirror_metadata""" global return_val dest_rp = require_root_set(dest_rp, 1) if not verify_time: try: verify_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc))
def restore_set_root(rpin): """Set data dir, restore_root and index, or return None if fail The idea here is to keep backing up on the path until we find a directory that contains "rdiff-backup-data". That is the mirror root. If the path from there starts "rdiff-backup-data/increments*", then the index is the remainder minus that. Otherwise the index is just the path minus the root. All this could fail if the increment file is pointed to in a funny way, using symlinks or somesuch. """ global restore_root, restore_index, restore_root_set if rpin.isincfile(): relpath = rpin.getincbase().path else: relpath = rpin.path if rpin.conn is not Globals.local_connection: # For security checking consistency, don't get absolute path pathcomps = relpath.split('/') else: pathcomps = os.path.join(os.getcwd(), relpath).split("/") if not pathcomps[0]: min_len_pathcomps = 2 # treat abs paths differently else: min_len_pathcomps = 1 i = len(pathcomps) while i >= min_len_pathcomps: parent_dir = rpath.RPath(rpin.conn, "/".join(pathcomps[:i])) if (parent_dir.isdir() and parent_dir.readable() and "rdiff-backup-data" in parent_dir.listdir()): break if parent_dir.path == rpin.conn.Globals.get('restrict_path'): return None i = i - 1 else: return None restore_root = parent_dir Log("Using mirror root directory %s" % restore_root.path, 6) if restore_root.conn is Globals.local_connection: Security.reset_restrict_path(restore_root) SetConnections.UpdateGlobal('rbdir', restore_root.append_path("rdiff-backup-data")) if not Globals.rbdir.isdir(): Log.FatalError("Unable to read rdiff-backup-data directory %s" % Globals.rbdir.path) from_datadir = tuple(pathcomps[i:]) if not from_datadir or from_datadir[0] != "rdiff-backup-data": restore_index = from_datadir # in mirror, not increments else: assert (from_datadir[1] == "increments" or (len(from_datadir) == 2 and from_datadir[1].startswith('increments'))), from_datadir restore_index = from_datadir[2:] restore_root_set = 1 return 1
def ListChangedSince(rp): """List all the files under rp that have changed since restoretime""" rp = require_root_set(rp, 1) try: rest_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc)) mirror_rp = restore_root.new_index(restore_index) inc_rp = mirror_rp.append_path("increments", restore_index) for rorp in rp.conn.restore.ListChangedSince(mirror_rp, inc_rp, rest_time): # This is a hack, see restore.ListChangedSince for rationale print rorp.index[0]
def Restore(src_rp, dest_rp, restore_as_of = None): """Main restoring function Here src_rp should be the source file (either an increment or mirror file), dest_rp should be the target rp to be written. """ if not restore_root_set and not restore_set_root(src_rp): Log.FatalError("Could not find rdiff-backup repository at " + src_rp.path) restore_check_paths(src_rp, dest_rp, restore_as_of) try: dest_rp.conn.fs_abilities.restore_set_globals(dest_rp) except IOError, exc: if exc.errno == errno.EACCES: print "\n" Log.FatalError("Could not begin restore due to\n%s" % exc) else: raise
def Verify(dest_rp, verify_time = None): """Check the hashes of the regular files against mirror_metadata""" global return_val dest_rp = require_root_set(dest_rp, 1) if not verify_time: try: verify_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc)) mirror_rp = restore_root.new_index(restore_index) inc_rp = Globals.rbdir.append_path("increments", restore_index) return_val = dest_rp.conn.compare.Verify(mirror_rp, inc_rp, verify_time)
def backup_set_rbdir(rpin, rpout): """Initialize data dir and logging""" global incdir try: incdir = Globals.rbdir.append_path("increments") except IOError, exc: if exc.errno == errno.EACCES: print "\n" Log.FatalError("Could not begin backup due to\n%s" % exc) else: raise
def backup_check_dirs(rpin, rpout): """Make sure in and out dirs exist and are directories""" if rpout.lstat() and not rpout.isdir(): if not force: Log.FatalError("Destination %s exists and is not a " "directory" % rpout.path) else: Log("Deleting %s" % rpout.path, 3) rpout.delete() if not rpout.lstat(): try: if create_full_path: rpout.makedirs() else: rpout.mkdir() except os.error: Log.FatalError("Unable to create directory %s" % rpout.path) if not rpin.lstat(): Log.FatalError("Source directory %s does not exist" % rpin.path) elif not rpin.isdir(): Log.FatalError("Source %s is not a directory" % rpin.path) Globals.rbdir = rpout.append_path("rdiff-backup-data")
def backup_final_init(rpout): """Open the backup log and the error log, create increments dir""" global prevtime, incdir if Log.verbosity > 0: Log.open_logfile(Globals.rbdir.append("backup.log")) checkdest_if_necessary(rpout) prevtime = backup_get_mirrortime() if prevtime >= Time.curtime: Log.FatalError( """Time of Last backup is not in the past. This is probably caused by running two backups in less than a second. Wait a second a try again.""") ErrorLog.open(Time.curtimestr, compress = Globals.compression) if not incdir.lstat(): incdir.mkdir()
def error_check_Main(arglist): """Run Main on arglist, suppressing stack trace for routine errors""" try: Main(arglist) except SystemExit: raise except (Exception, KeyboardInterrupt), exc: errmsg = robust.is_routine_fatal(exc) if errmsg: Log.exception(2, 6) Log.FatalError(errmsg) else: Log.exception(2, 2) raise
def rot_check_time(time_string): """Check remove older than time_string, return time in seconds""" try: time = Time.genstrtotime(time_string) except Time.TimeException, exc: Log.FatalError(str(exc)) times_in_secs = [inc.getinctime() for inc in restore.get_inclist(Globals.rbdir.append_path("increments"))] times_in_secs = filter(lambda t: t < time, times_in_secs) if not times_in_secs: Log("No increments older than %s found, exiting." % (Time.timetopretty(time),), 3) return None times_in_secs.sort() inc_pretty_time = "\n".join(map(Time.timetopretty, times_in_secs)) if len(times_in_secs) > 1 and not force: Log.FatalError("Found %d relevant increments, dated:\n%s" "\nIf you want to delete multiple increments in this way, " "use the --force." % (len(times_in_secs), inc_pretty_time)) if len(times_in_secs) == 1: Log("Deleting increment at time:\n" + inc_pretty_time, 3) else: Log("Deleting increments at times:\n" + inc_pretty_time, 3) return times_in_secs[-1]+1 # make sure we don't delete current increment
def check_connection_version(conn, remote_cmd): """Log warning if connection has different version""" try: remote_version = conn.Globals.get('version') except connection.ConnectionError, exception: Log.FatalError("""%s Couldn't start up the remote connection by executing %s Remember that, under the default settings, rdiff-backup must be installed in the PATH on the remote system. See the man page for more information on this. This message may also be displayed if the remote version of rdiff-backup is quite different from the local version (%s).""" % (exception, remote_cmd, Globals.version))
def checkdest_need_check(dest_rp): """Return None if no dest dir found, 1 if dest dir needs check, 0 o/w""" if not dest_rp.isdir() or not Globals.rbdir.isdir(): return None for filename in Globals.rbdir.listdir(): if filename not in ['chars_to_quote', 'special_escapes', 'backup.log']: break else: # This may happen the first backup just after we test for quoting return None curmirroot = Globals.rbdir.append("current_mirror") curmir_incs = restore.get_inclist(curmirroot) if not curmir_incs: Log.FatalError( """Bad rdiff-backup-data dir on destination side The rdiff-backup data directory %s exists, but we cannot find a valid current_mirror marker. You can avoid this message by removing the rdiff-backup-data directory; however any data in it will be lost. Probably this error was caused because the first rdiff-backup session into a new directory failed. If this is the case it is safe to delete the rdiff-backup-data directory because there is no important information in it. """ % (Globals.rbdir.path,)) elif len(curmir_incs) == 1: return 0 else: if not force: try: curmir_incs[0].conn.regress.check_pids(curmir_incs) except (OSError, IOError), exc: Log.FatalError("Could not check if rdiff-backup is currently" "running due to\n%s" % exc) assert len(curmir_incs) == 2, "Found too many current_mirror incs!" return 1
def checkdest_if_necessary(dest_rp): """Check the destination dir if necessary. This can/should be run before an incremental backup. """ need_check = checkdest_need_check(dest_rp) if need_check == 1: Log("Previous backup seems to have failed, regressing " "destination now.", 2) try: dest_rp.conn.regress.Regress(dest_rp) except Security.Violation: Log.FatalError("Security violation while attempting to regress " "destination, perhaps due to --restrict-read-only " "or --restrict-update-only.")
def Compare(compare_type, src_rp, dest_rp, compare_time=None): """Compare metadata in src_rp with metadata of backup session Prints to stdout whenever a file in the src_rp directory has different metadata than what is recorded in the metadata for the appropriate session. Session time is read from restore_timestr if compare_time is None. """ global return_val dest_rp = require_root_set(dest_rp, 1) if not compare_time: try: compare_time = Time.genstrtotime(restore_timestr) except Time.TimeException, exc: Log.FatalError(str(exc))
%s Remember that, under the default settings, rdiff-backup must be installed in the PATH on the remote system. See the man page for more information on this. This message may also be displayed if the remote version of rdiff-backup is quite different from the local version (%s).""" % (exception, remote_cmd, Globals.version)) except OverflowError, exc: Log.FatalError( """Integer overflow while attempting to establish the remote connection by executing %s Please make sure that nothing is printed (e.g., by your login shell) when this command executes. Try running this command: %s which should only print out the text: rdiff-backup <version>""" % (remote_cmd, remote_cmd.replace("--server", "--version"))) if remote_version != Globals.version: Log( "Warning: Local version %s does not match remote version %s." % (Globals.version, remote_version), 2) def init_connection_routing(conn, conn_number, remote_cmd): """Called by init_connection, establish routing, conn dict"""
def fill_schema(host_info): """Fills host_info into the schema and returns remote command""" try: return __cmd_schema % host_info except TypeError: Log.FatalError("Invalid remote schema:\n\n%s\n" % __cmd_schema)
Globals.server = 1 elif opt == "--ssh-no-compression": Globals.set('ssh_compression', None) elif opt == "--tempdir": tempfile.tempdir = arg elif opt == "--terminal-verbosity": Log.setterm_verbosity(arg) elif opt == "--test-server": action = "test-server" elif opt == "--use-compatible-timestamps": Globals.set("use_compatible_timestamps", 1) elif opt == "--user-mapping-file": user_mapping_filename = arg elif opt == "-v" or opt == "--verbosity": Log.setverbosity(arg) elif opt == "--verify": action, restore_timestr = "verify", "now" elif opt == "--verify-at-time": action, restore_timestr = "verify", arg elif opt == "-V" or opt == "--version": print "rdiff-backup " + Globals.version sys.exit(0) else: Log.FatalError("Unknown option %s" % opt) Log("Using rdiff-backup version %s" % (Globals.version), 4) def check_action(): """Check to make sure action is compatible with args""" global action arg_action_dict = {0: ['server'], 1: ['list-increments', 'list-increment-sizes', 'remove-older-than', 'list-at-time', 'list-changed-since', 'check-destination-dir', 'verify'], 2: ['backup', 'restore', 'restore-as-of', 'compare', 'compare-hash', 'compare-full']} l = len(args) if l == 0 and action not in arg_action_dict[l]: commandline_error("No arguments given")
def commandline_error(message): Log.FatalError(message + "\nSee the rdiff-backup manual page for " "more information.")
def rot_require_rbdir_base(rootrp): """Make sure pointing to base of rdiff-backup dir""" if restore_index != (): Log.FatalError("Increments for directory %s cannot be removed " "separately.\nInstead run on entire directory %s." % (rootrp.path, restore_root.path))
def sel_fl(filename): """Helper function for including/excluding filelists below""" try: return open(filename, "r") except IOError: Log.FatalError("Error opening file %s" % filename)