Exemplo n.º 1
0
    def _fix_failed_initial_backup(self):
        """
        Clear the given rdiff-backup-data if possible, it's faster than
        trying to do a regression, which would probably anyway fail.
        """
        log.Log(
            "Found interrupted initial backup in data directory {dd}. "
            "Removing...".format(dd=self.data_dir), log.NOTE)
        rbdir_files = self.data_dir.listdir()

        # Try to delete the increments dir first
        if b'increments' in rbdir_files:
            rbdir_files.remove(b'increments')
            rp = self.data_dir.append(b'increments')
            # FIXME I don't really understand the logic here: either it's
            # a failed initial backup and we can remove everything, or we
            # should fail and not continue.
            try:
                rp.conn.rpath.delete_dir_no_files(rp)
            except rpath.RPathException:
                log.Log("Increments dir contains files", log.INFO)
                return
            except Security.Violation:
                log.Log("Server doesn't support resuming", log.WARNING)
                return

        # then delete all remaining files
        for file_name in rbdir_files:
            rp = self.data_dir.append_path(file_name)
            if not rp.isdir():  # Only remove files, not folders
                rp.delete()
Exemplo n.º 2
0
    def get_metas_at_time(self, time, restrict_index=None):
        """
        Return combined metadata iter with all available metadata info
        """
        meta_iters = []
        meta_main_iter = self._get_meta_main_at_time(time, restrict_index)
        if not meta_main_iter:
            log.Log(
                "Could not find mirror metadata file. "
                "Metadata will be read from filesystem instead", log.WARNING)
            return None
        # loop through the non-main meta classes
        for meta_class in get_meta_list()[1:]:
            if meta_class.is_active():
                meta_iter = self._iter_helper(time, restrict_index, meta_class)
                if meta_iter:
                    meta_iters.append((meta_class, meta_iter))
                else:
                    log.Log(
                        "{md} file not found".format(md=meta_class.get_desc()),
                        log.WARNING)
                    meta_iters.append((meta_class, iter([])))

        # join all iterators into the main iterator
        for meta_class, meta_iter in meta_iters:
            meta_main_iter = meta_class.join_iter(meta_main_iter, meta_iter)

        return meta_main_iter
Exemplo n.º 3
0
def restore_set_globals(rpout):
    """Set fsa related globals for restore session, given in/out rps"""
    assert rpout.conn is Globals.local_connection, (
        "Action only foreseen locally and not over {conn}.".format(
            conn=rpout.conn))
    src_fsa = Globals.rbdir.conn.fs_abilities.get_readonly_fsa(
        'rdiff-backup repository', Globals.rbdir)
    log.Log(str(src_fsa), log.INFO)
    dest_fsa = FSAbilities('restore target', rpout)
    log.Log(str(dest_fsa), log.INFO)

    rsg = RestoreSetGlobals(Globals.rbdir.conn, rpout.conn, src_fsa, dest_fsa)
    rsg.set_eas()
    rsg.set_acls()
    rsg.set_win_acls()
    rsg.set_resource_forks()
    rsg.set_carbonfile()
    rsg.set_hardlinks()
    # No need to fsync anything when restoring
    rsg.set_change_ownership()
    rsg.set_high_perms()
    rsg.set_symlink_perms()
    rsg.set_chars_to_quote(Globals.rbdir)
    rsg.set_special_escapes(Globals.rbdir)
    rsg.set_compatible_timestamps()
Exemplo n.º 4
0
    def _create(self):
        # create the underlying location/directory
        if not super()._create():
            return False

        # define a few essential subdirectories
        if not self.data_dir.lstat():
            try:
                self.data_dir.mkdir()
            except OSError as exc:
                log.Log(
                    "Could not create 'rdiff-backup-data' sub-directory "
                    "in base directory '{bd}' due to exception '{ex}'. "
                    "Please fix the access rights and retry.".format(
                        bd=self.base_dir, ex=exc), log.ERROR)
                return False
        elif self._is_failed_initial_backup():
            self._fix_failed_initial_backup()
        if not self.incs_dir.lstat():
            try:
                self.incs_dir.mkdir()
            except OSError as exc:
                log.Log(
                    "Could not create 'increments' sub-directory "
                    "in data directory '{dd}' due to exception '{ex}'. "
                    "Please fix the access rights and retry.".format(
                        dd=self.data_dir, ex=exc), log.ERROR)
                return False

        return True
Exemplo n.º 5
0
    def pre_check(self):
        """
        Validate that the values given look correct.

        This method isn't meant to try to access any file system and even less
        a remote location, it is really only meant to validate the values
        beyond what argparse can do.
        Return 0 if everything looked good, else an error code.

        Try to check everything before returning and not force the user to fix
        their entries step by step.
        """
        return_code = 0
        if self.values.action != self.name:
            log.Log(
                "Action value '{av}' doesn't fit name of action class "
                "'{ac}'.".format(av=self.values.action, ac=self.name),
                log.ERROR)
            return_code |= 1
        if self.values.tempdir and not os.path.isdir(self.values.tempdir):
            log.Log(
                "Temporary directory '{td}' doesn't exist.".format(
                    td=self.values.tempdir), log.ERROR)
            return_code |= 1
        if (self.security is None and "locations" in self.values
                and self.values.locations):
            log.Log(
                "Action '{ac}' must have a security class to handle "
                "locations".format(ac=self.name), log.ERROR)
            return_code |= 1
        return return_code
Exemplo n.º 6
0
 def _detect_hardlinks(self, testdir):
     """
     Set self.hardlinks to true if hard linked files can be created
     """
     if not Globals.preserve_hardlinks:
         log.Log(
             "Hard linking test skipped as rdiff-backup was started "
             "with --no-hard-links option", log.INFO)
         self.hardlinks = None
         return
     hl_source = testdir.append("hardlinked_file1")
     hl_dir = testdir.append("hl")
     hl_dir.mkdir()
     hl_dest = hl_dir.append("hardlinked_file2")
     hl_source.touch()
     try:
         hl_dest.hardlink(hl_source.path)
         if hl_source.getinode() != hl_dest.getinode():
             raise OSError(errno.EOPNOTSUPP, "Hard links don't compare")
     except (OSError, AttributeError):
         log.Log(
             "Hard linking not supported by filesystem at "
             "path {pa}, hard links will be copied instead".format(
                 pa=self.root_rp), log.NOTE)
         self.hardlinks = False
     else:
         self.hardlinks = True
Exemplo n.º 7
0
    def check(self):
        # we try to identify as many potential errors as possible before we
        # return, so we gather all potential issues and return only the final
        # result
        return_code = super().check()

        # we validate that the discovered restore type and the given options
        # fit together
        if self.source.restore_type == "inc":
            if self.values.at:
                log.Log(
                    "You can't give an increment file and a time to "
                    "restore at the same time.", log.ERROR)
                return_code |= 1
            elif not self.values.increment:
                self.values.increment = True
        elif self.source.restore_type in ("base", "subdir"):
            if self.values.increment:
                log.Log(
                    "You can't use the --increment option and _not_ "
                    "give an increment file", log.ERROR)
                return_code |= 1
            elif not self.values.at:
                self.values.at = "now"

        # we verify that source directory and target repository are correct
        return_code |= self.source.check()
        return_code |= self.target.check()

        return return_code
Exemplo n.º 8
0
    def set_special_escapes(self, repo):
        """
        Set escape_dos_devices and escape_trailing_spaces from
        rdiff-backup-data dir, just like chars_to_quote
        """
        se = repo.get_special_escapes()
        if se is not None:
            actual_edd = ("escape_dos_devices" in se)
            actual_ets = ("escape_trailing_spaces" in se)
        else:
            if getattr(self, "src_fsa", None) is not None:
                actual_edd = (self.src_fsa.escape_dos_devices
                              and not self.dest_fsa.escape_dos_devices)
                actual_ets = (self.src_fsa.escape_trailing_spaces
                              and not self.dest_fsa.escape_trailing_spaces)
            else:
                # Single filesystem operation
                actual_edd = self.dest_fsa.escape_dos_devices
                actual_ets = self.dest_fsa.escape_trailing_spaces

        Globals.set_all('escape_dos_devices', actual_edd)
        log.Log("Backup: escape_dos_devices = {dd}".format(dd=actual_edd),
                log.INFO)

        Globals.set_all('escape_trailing_spaces', actual_ets)
        log.Log("Backup: escape_trailing_spaces = {ts}".format(ts=actual_ets),
                log.INFO)
Exemplo n.º 9
0
    def setup(self):
        # in setup we return as soon as we detect an issue to avoid changing
        # too much
        return_code = super().setup()
        if return_code != 0:
            return return_code

        return_code = self.source.setup()
        if return_code != 0:
            return return_code

        return_code = self.target.setup()
        if return_code != 0:
            return return_code

        # TODO validate how much of the following lines and methods
        # should go into the directory/repository modules
        try:
            self.target.base_dir.conn.fs_abilities.restore_set_globals(
                self.target.base_dir)
        except OSError as exc:
            log.Log(
                "Could not begin restore due to exception '{ex}'".format(
                    ex=exc), log.ERROR)
            return 1
        self.source.init_quoting(self.values.chars_to_quote)
        self._init_user_group_mapping(self.target.base_dir.conn)
        if log.Log.verbosity > 0:
            try:  # the source repository could be read-only
                log.Log.open_logfile(
                    self.source.data_dir.append("restore.log"))
            except (log.LoggerError, Security.Violation) as exc:
                log.Log(
                    "Unable to open logfile due to exception '{ex}'".format(
                        ex=exc), log.WARNING)

        # we need now to identify the actual time of restore
        self.inc_rpath = self.source.data_dir.append_path(
            b'increments', self.source.restore_index)
        if self.values.at:
            self.action_time = self._get_parsed_time(self.values.at,
                                                     ref_rp=self.inc_rpath)
            if self.action_time is None:
                return 1
        elif self.values.increment:
            self.action_time = self.source.orig_path.getinctime()
        else:  # this should have been catched in the check method
            log.Log(
                "This shouldn't happen but neither restore time nor "
                "an increment have been identified so far", log.ERROR)
            return 1
        (select_opts, select_data) = selection.get_prepared_selections(
            self.values.selections)
        # We must set both sides because restore filtering is different from
        # select filtering.  For instance, if a file is excluded it should
        # not be deleted from the target directory.
        self.source.set_select(select_opts, select_data, self.target.base_dir)
        self.target.set_select(select_opts, select_data)

        return 0  # all is good
Exemplo n.º 10
0
    def run(self):

        # This is more a check than a part of run, but because backup does
        # the regress in the run section, we also do the check here...
        if self.source.needs_regress():
            # source could be read-only, so we don't try to regress it
            log.Log(
                "Previous backup to {rp} seems to have failed. "
                "Use rdiff-backup to 'regress' first the failed backup, "
                "then try again to restore".format(rp=self.source.base_dir),
                log.ERROR)
            return 1
        try:
            if Globals.get_api_version() < 201:  # compat200
                restore.Restore(
                    self.source.base_dir.new_index(self.source.restore_index),
                    self.inc_rpath, self.target.base_dir, self.action_time)
            else:
                self._operate_restore()
        except OSError as exc:
            log.Log(
                "Could not complete restore due to exception '{ex}'".format(
                    ex=exc), log.ERROR)
            return 1
        else:
            log.Log("Restore successfully finished", log.INFO)
            return 0
Exemplo n.º 11
0
    def set_special_escapes(self, rbdir):
        """Set escape_dos_devices and escape_trailing_spaces from
        rdiff-backup-data dir, just like chars_to_quote"""
        se_rp = rbdir.append("special_escapes")
        if se_rp.lstat():
            se = se_rp.get_string().split("\n")
            actual_edd = ("escape_dos_devices" in se)
            actual_ets = ("escape_trailing_spaces" in se)
        else:
            log.Log(
                "The special escapes file '{ef}' was not found, "
                "will assume need to escape DOS devices and trailing "
                "spaces based on file systems".format(ef=se_rp), log.WARNING)
            if getattr(self, "src_fsa", None) is not None:
                actual_edd = (self.src_fsa.escape_dos_devices
                              and not self.dest_fsa.escape_dos_devices)
                actual_ets = (self.src_fsa.escape_trailing_spaces
                              and not self.dest_fsa.escape_trailing_spaces)
            else:
                # Single filesystem operation
                actual_edd = self.dest_fsa.escape_dos_devices
                actual_ets = self.dest_fsa.escape_trailing_spaces

        Globals.set_all('escape_dos_devices', actual_edd)
        log.Log("Backup: escape_dos_devices = {dd}".format(dd=actual_edd),
                log.INFO)

        Globals.set_all('escape_trailing_spaces', actual_ets)
        log.Log("Backup: escape_trailing_spaces = {ts}".format(ts=actual_ets),
                log.INFO)
Exemplo n.º 12
0
 def end_process_directory(self):
     """Finish processing a directory"""
     rf = self.rf
     if rf.metadata_rorp.isdir():
         if rf.mirror_rp.isdir():
             rf.mirror_rp.setdata()
             if not rf.metadata_rorp.equal_loose(rf.mirror_rp):
                 log.Log("Regressing attributes of path {pa}".format(pa=rf),
                         log.INFO)
                 rpath.copy_attribs(rf.metadata_rorp, rf.mirror_rp)
         else:
             rf.mirror_rp.delete()
             log.Log("Regressing file {fi}".format(fi=rf.mirror_rp),
                     log.INFO)
             rpath.copy_with_attribs(rf.metadata_rorp, rf.mirror_rp)
     else:  # replacing a dir with some other kind of file
         assert rf.mirror_rp.isdir(), (
             "Mirror '{mrp!r}' can only be a directory.".format(
                 mrp=rf.mirror_rp))
         log.Log("Replacing directory {di}".format(di=rf), log.INFO)
         if rf.metadata_rorp.isreg():
             self._restore_orig_regfile(rf)
         else:
             rf.mirror_rp.delete()
             rpath.copy_with_attribs(rf.metadata_rorp, rf.mirror_rp)
     if rf.regress_inc:
         log.Log("Deleting increment {ic}".format(ic=rf), log.INFO)
         rf.regress_inc.delete()
Exemplo n.º 13
0
def backup_set_globals(rpin, force):
    """Given rps for source filesystem and repository, set fsa globals

    This should be run on the destination connection, because we may
    need to write a new chars_to_quote file.

    """
    assert Globals.rbdir.conn is Globals.local_connection, (
        "Action only foreseen locally and not over {conn}.".format(
            conn=Globals.rbdir.conn))
    src_fsa = rpin.conn.fs_abilities.get_readonly_fsa('source', rpin)
    log.Log(str(src_fsa), log.INFO)
    dest_fsa = FSAbilities('destination', Globals.rbdir)
    log.Log(str(dest_fsa), log.INFO)

    bsg = BackupSetGlobals(rpin.conn, Globals.rbdir.conn, src_fsa, dest_fsa)
    bsg.set_eas()
    bsg.set_acls()
    bsg.set_win_acls()
    bsg.set_resource_forks()
    bsg.set_carbonfile()
    bsg.set_hardlinks()
    bsg.set_fsync_directories()
    bsg.set_change_ownership()
    bsg.set_high_perms()
    bsg.set_symlink_perms()
    bsg.set_chars_to_quote(Globals.rbdir)
    bsg.set_special_escapes(Globals.rbdir)
    bsg.set_compatible_timestamps()
Exemplo n.º 14
0
 def _clear_rp(self, rp):
     """Delete all the extended attributes in rpath"""
     try:
         for name in xattr.list(rp.path, rp.issym()):
             try:
                 xattr.remove(rp.path, name, rp.issym())
             except PermissionError:  # errno.EACCES
                 # SELinux attributes cannot be removed, and we don't want
                 # to bail out or be too noisy at low log levels.
                 log.Log(
                     "Not allowed to remove extended attribute "
                     "{ea} from path {pa}".format(ea=name, pa=rp),
                     log.DEBUG)
                 continue
             except OSError as exc:
                 # can happen because trusted.SGI_ACL_FILE is deleted
                 # together with system.posix_acl_access on XFS file systems.
                 if exc.errno == errno.ENODATA:
                     continue
                 else:  # can be anything, just fail
                     raise
     except io.UnsupportedOperation:  # errno.EOPNOTSUPP or errno.EPERM
         return  # if not supported, consider empty
     except FileNotFoundError as exc:
         log.Log(
             "Unable to clear extended attributes on path {pa} due to "
             "exception '{ex}', ignoring".format(pa=rp, ex=exc), log.NOTE)
         return
Exemplo n.º 15
0
    def _print_reports(self, report_iter, parsable=False):
        """
        Given an iter of CompareReport objects, print them to screen.

        Output a list in YAML format if parsable is True.
        """
        assert not Globals.server, "This function shouldn't run as server."
        changed_files_found = 0
        reason_verify_list = []
        for report in report_iter:
            changed_files_found += 1
            indexpath = report.index and b"/".join(report.index) or b"."
            indexpath = indexpath.decode(errors="replace")
            if parsable:
                reason_verify_list.append({
                    "reason": report.reason,
                    "path": indexpath
                })
            else:
                print("{rr}: {ip}".format(rr=report.reason, ip=indexpath))

        if parsable:
            print(
                yaml.safe_dump(reason_verify_list,
                               explicit_start=True,
                               explicit_end=True))
        if not changed_files_found:
            log.Log("No changes found. Directory matches backup data",
                    log.NOTE)
            return 0
        else:
            log.Log(
                "Directory has {fd} file differences to backup".format(
                    fd=changed_files_found), log.WARNING)
            return 1
Exemplo n.º 16
0
def _files_rorp_eq(src_rorp,
                   dest_rorp,
                   compare_hardlinks=True,
                   compare_ownership=False,
                   compare_eas=False,
                   compare_acls=False):
    """Combined eq func returns true if two files compare same"""
    if not src_rorp:
        log.Log("Source rorp missing: %s" % str(dest_rorp), 3)
        return False
    if not dest_rorp:
        log.Log("Dest rorp missing: %s" % str(src_rorp), 3)
        return False
    if not src_rorp._equal_verbose(dest_rorp,
                                   compare_ownership=compare_ownership):
        return False
    if compare_hardlinks and not _hardlink_rorp_eq(src_rorp, dest_rorp):
        return False
    if compare_eas and not _ea_compare_rps(src_rorp, dest_rorp):
        log.Log(
            "Different EAs in files %s and %s" %
            (src_rorp.get_indexpath(), dest_rorp.get_indexpath()), 3)
        return False
    if compare_acls and not _acl_compare_rps(src_rorp, dest_rorp):
        log.Log(
            "Different ACLs in files %s and %s" %
            (src_rorp.get_indexpath(), dest_rorp.get_indexpath()), 3)
        return False
    return True
Exemplo n.º 17
0
 def _operate_regress(self, try_regress=True):
     """
     Check the given repository and regress it if necessary
     """
     if Globals.get_api_version() < 201:  # compat200
         if self.repo.needs_regress_compat200():
             if not try_regress:
                 return 1
             log.Log("Previous backup seems to have failed, regressing "
                     "destination now", log.WARNING)
             try:
                 self.repo.base_dir.conn.regress.Regress(self.repo.base_dir)
                 return 0
             except Security.Violation:
                 log.Log(
                     "Security violation while attempting to regress "
                     "destination, perhaps due to --restrict-read-only or "
                     "--restrict-update-only", log.ERROR)
                 return 1
         else:
             log.Log("Given repository doesn't need to be regressed",
                     log.NOTE)
             return 0  # all is good
     else:
         if self.repo.needs_regress():
             if not try_regress:
                 return 1
             log.Log("Previous backup seems to have failed, regressing "
                     "destination now", log.WARNING)
             return self.repo.regress()
         else:
             log.Log("Given repository doesn't need to be regressed",
                     log.NOTE)
             return 0  # all is good
Exemplo n.º 18
0
    def _detect_acls(self, rp):
        """
        Set self.acls based on rp.

        Does not write. Needs to be local
        """
        if not Globals.acls_active:
            log.Log(
                "POSIX ACLs test skipped as rdiff-backup was started "
                "with --no-acls option", log.INFO)
            self.acls = None
            return

        try:
            import posix1e
        except ImportError:
            log.Log(
                "Unable to import module posix1e from pylibacl package. "
                "POSIX ACLs not supported on filesystem at "
                "path {pa}".format(pa=rp), log.INFO)
            self.acls = False
            return

        try:
            posix1e.ACL(file=rp.path)
        except OSError as exc:
            log.Log(
                "POSIX ACLs not supported by filesystem at path {pa} "
                "due to exception '{ex}'".format(pa=rp, ex=exc), log.INFO)
            self.acls = False
        else:
            self.acls = True
Exemplo n.º 19
0
def get_acl_lists_from_rp(rp):
    """Returns (acl_list, def_acl_list) from an rpath.  Call locally"""
    assert rp.conn is Globals.local_connection, (
        "Get ACLs of path should only be done locally not over {conn}.".format(
            conn=rp.conn))
    try:
        acl = posix1e.ACL(file=rp.path)
    except (FileNotFoundError, UnicodeEncodeError) as exc:
        log.Log(
            "Unable to read ACL from path {pa} due to exception '{ex}'".format(
                pa=rp, ex=exc), log.NOTE)
        acl = None
    except OSError as exc:
        if exc.errno == errno.EOPNOTSUPP:
            acl = None
        else:
            raise
    if rp.isdir():
        try:
            def_acl = posix1e.ACL(filedef=os.fsdecode(rp.path))
        except (FileNotFoundError, UnicodeEncodeError) as exc:
            log.Log(
                "Unable to read default ACL from path {pa} due to "
                "exception '{ex}'".format(pa=rp, ex=exc), log.NOTE)
            def_acl = None
        except OSError as exc:
            if exc.errno == errno.EOPNOTSUPP:
                def_acl = None
            else:
                raise
    else:
        def_acl = None
    return (acl and _acl_to_list(acl), def_acl and _acl_to_list(def_acl))
Exemplo n.º 20
0
    def _detect_win_acls(self, dir_rp, write):
        """
        Test if windows access control lists are supported
        """
        assert dir_rp.conn is Globals.local_connection, (
            "Action only foreseen locally and not over {conn}.".format(
                conn=dir_rp.conn))
        assert dir_rp.lstat(), "Path '{rp}' must exist to test ACLs.".format(
            rp=dir_rp)
        if not Globals.win_acls_active:
            log.Log(
                "Windows ACLs test skipped as rdiff-backup was started "
                "with --no-acls option", log.INFO)
            self.win_acls = None
            return

        try:
            import win32security
            import pywintypes
        except ImportError:
            log.Log(
                "Unable to import win32security module. Windows ACLs not "
                "supported by filesystem at path {pa}".format(pa=dir_rp),
                log.INFO)
            self.win_acls = False
            return
        try:
            sd = win32security.GetNamedSecurityInfo(
                os.fsdecode(dir_rp.path), win32security.SE_FILE_OBJECT,
                win32security.OWNER_SECURITY_INFORMATION
                | win32security.GROUP_SECURITY_INFORMATION
                | win32security.DACL_SECURITY_INFORMATION)
            acl = sd.GetSecurityDescriptorDacl()
            acl.GetAceCount()  # to verify that it works
            if write:
                win32security.SetNamedSecurityInfo(
                    os.fsdecode(dir_rp.path), win32security.SE_FILE_OBJECT,
                    win32security.OWNER_SECURITY_INFORMATION
                    | win32security.GROUP_SECURITY_INFORMATION
                    | win32security.DACL_SECURITY_INFORMATION,
                    sd.GetSecurityDescriptorOwner(),
                    sd.GetSecurityDescriptorGroup(),
                    sd.GetSecurityDescriptorDacl(), None)
        except (OSError, AttributeError, pywintypes.error):
            log.Log(
                "Unable to load a Windows ACL. Windows ACLs not supported "
                "by filesystem at path {pa}".format(pa=dir_rp), log.INFO)
            self.win_acls = False
            return

        try:
            acl_win.init_acls()  # FIXME there should be no cross-dependency
        except (OSError, AttributeError, pywintypes.error):
            log.Log(
                "Unable to init win_acls. Windows ACLs not supported by "
                "filesystem at path {pa}".format(pa=dir_rp), log.INFO)
            self.win_acls = False
            return
        self.win_acls = True
Exemplo n.º 21
0
    def load_from_rp(self, rp, skip_inherit_only=True):
        self.index = rp.index

        # Sometimes, we are asked to load from an rpath when ACL's
        # are not supported. Ignore the request in this case.
        if not pywintypes:
            return

        try:
            sd = GetNamedSecurityInfo(os.fsdecode(rp.path), SE_FILE_OBJECT,
                                      ACL.flags)
        except (OSError, pywintypes.error) as exc:
            log.Log(
                "Unable to read ACL from path {pa} due to "
                "exception '{ex}'".format(pa=rp, ex=exc), log.INFO)
            return

        if skip_inherit_only:
            # skip the inherit_only aces
            acl = sd.GetSecurityDescriptorDacl()
            if acl:
                n = acl.GetAceCount()
                # traverse the ACL in reverse, so the indices stay correct
                while n:
                    n -= 1
                    ace_flags = acl.GetAce(n)[0][1]
                    if ace_flags & INHERIT_ONLY_ACE:
                        acl.DeleteAce(n)
            sd.SetSecurityDescriptorDacl(1, acl, 0)

            if ACL.flags & SACL_SECURITY_INFORMATION:
                acl = sd.GetSecurityDescriptorSacl()
                if acl:
                    n = acl.GetAceCount()
                    # traverse the ACL in reverse, so the indices stay correct
                    while n:
                        n -= 1
                        ace_flags = acl.GetAce(n)[0][1]
                        if ace_flags & INHERIT_ONLY_ACE:
                            acl.DeleteAce(n)
                    sd.SetSecurityDescriptorSacl(1, acl, 0)

        if not sd.GetSecurityDescriptorDacl():
            sd.SetSecurityDescriptorDacl(0, None, 0)
        if (ACL.flags & SACL_SECURITY_INFORMATION
            ) and not sd.GetSecurityDescriptorSacl():
            sd.SetSecurityDescriptorSacl(0, None, 0)

        try:
            self.__acl = ConvertSecurityDescriptorToStringSecurityDescriptor(
                sd, SDDL_REVISION_1, ACL.flags)
        except (OSError, pywintypes.error) as exc:
            log.Log(
                "Unable to convert ACL of path {pa} to string "
                "due to exception '{ex}'".format(pa=rp, ex=exc), log.INFO)
            self.__acl = ''
        self.__acl = os.fsencode(self.__acl)
Exemplo n.º 22
0
    def _compare_ctq_file(self, rbdir, suggested_ctq):
        """
        Compare chars_to_quote previous, enforced and suggested

        Returns the actual quoting to be used
        """
        ctq_rp = rbdir.append(b"chars_to_quote")
        if not ctq_rp.lstat():  # the chars_to_quote file doesn't exist
            if Globals.chars_to_quote is None:
                actual_ctq = suggested_ctq
            else:
                actual_ctq = Globals.chars_to_quote
                if actual_ctq != suggested_ctq:
                    log.Log(
                        "File system at '{fs}' suggested quoting '{sq}' "
                        "but override quoting '{oq}' will be used. "
                        "Assuming you know what you are doing".format(
                            fs=ctq_rp, sq=suggested_ctq, oq=actual_ctq),
                        log.NOTE)
            ctq_rp.write_bytes(actual_ctq)
            return actual_ctq

        previous_ctq = ctq_rp.get_bytes()

        if Globals.chars_to_quote is None:
            if suggested_ctq and suggested_ctq != previous_ctq:
                # the file system has new specific requirements
                actual_ctq = suggested_ctq
            else:
                actual_ctq = previous_ctq
                if previous_ctq and not suggested_ctq:
                    log.Log(
                        "File system at '{fs}' no longer needs quoting "
                        "but we will retain for backwards "
                        "compatibility".format(fs=ctq_rp), log.NOTE)
        else:
            actual_ctq = Globals.chars_to_quote  # Globals override
            if actual_ctq != suggested_ctq:
                log.Log(
                    "File system at '{fs}' suggested quoting '{sq}' "
                    "but override quoting '{oq}' will be used. "
                    "Assuming you know what you are doing".format(
                        fs=ctq_rp, sq=suggested_ctq, oq=actual_ctq), log.NOTE)

        # the quoting didn't change so all is good
        if actual_ctq == previous_ctq:
            return actual_ctq
        else:
            log.Log.FatalError(
                "The repository quoting '{rq}' would need to be migrated from "
                "old quoting chars '{oq}' to new quoting chars '{nq}'. "
                "This may mean that the repository has been moved between "
                "different file systems.".format(rq=ctq_rp,
                                                 oq=previous_ctq,
                                                 nq=actual_ctq))
Exemplo n.º 23
0
    def _create(self):
        # create the underlying location/directory
        if not super()._create():
            return False

        if self._is_failed_initial_backup():
            # poor man's locking mechanism to protect starting backup
            # independently from the API version
            self.lockfile.setdata()
            if self.lockfile.lstat():
                if self.force:
                    log.Log(
                        "An initial backup in a strange state with "
                        "lockfile {lf}. Enforcing continuation, "
                        "hopefully you know what you're doing".format(
                            lf=self.lockfile), log.WARNING)
                else:
                    log.Log(
                        "An initial backup in a strange state with "
                        "lockfile {lf}. Either it's just an initial backup "
                        "running, wait a bit and try again later, or "
                        "something is really wrong. --force will remove "
                        "the complete repo, at your own risk".format(
                            lf=self.lockfile), log.ERROR)
                    return False
            log.Log(
                "Found interrupted initial backup in data directory {dd}. "
                "Removing...".format(dd=self.data_dir), log.NOTE)
            self._clean_failed_initial_backup()

        # define a few essential subdirectories
        if not self.data_dir.lstat():
            try:
                self.data_dir.mkdir()
            except OSError as exc:
                log.Log(
                    "Could not create 'rdiff-backup-data' sub-directory "
                    "in base directory '{bd}' due to exception '{ex}'. "
                    "Please fix the access rights and retry.".format(
                        bd=self.base_dir, ex=exc), log.ERROR)
                return False
        if not self.incs_dir.lstat():
            try:
                self.incs_dir.mkdir()
            except OSError as exc:
                log.Log(
                    "Could not create 'increments' sub-directory "
                    "in data directory '{dd}' due to exception '{ex}'. "
                    "Please fix the access rights and retry.".format(
                        dd=self.data_dir, ex=exc), log.ERROR)
                return False

        return True
Exemplo n.º 24
0
    def setup(self):
        # in setup we return as soon as we detect an issue to avoid changing
        # too much
        return_code = super().setup()
        if return_code != 0:
            return return_code

        return_code = self._set_no_compression_regexp()
        if return_code != 0:
            return return_code

        return_code = self.dir.setup()
        if return_code != 0:
            return return_code

        owners_map = {
            "users_map": self.values.user_mapping_file,
            "groups_map": self.values.group_mapping_file,
            "preserve_num_ids": self.values.preserve_numerical_ids
        }
        return_code = self.repo.setup(self.dir, owners_map=owners_map)
        if return_code != 0:
            return return_code

        # TODO validate how much of the following lines and methods
        # should go into the directory/repository modules
        if Globals.get_api_version() < 201:  # compat200
            SetConnections.BackupInitConnections(self.dir.base_dir.conn,
                                                 self.repo.base_dir.conn)
            self.repo.base_dir.conn.fs_abilities.backup_set_globals(
                self.dir.base_dir, self.values.force)
            self.repo.setup_quoting()

        previous_time = self.repo.get_mirror_time()
        if previous_time >= Time.getcurtime():
            log.Log("The last backup is not in the past. Aborting.", log.ERROR)
            return 1
        if log.Log.verbosity > 0:
            try:  # the target repository must be writable
                log.Log.open_logfile(self.repo.data_dir.append("backup.log"))
            except (log.LoggerError, Security.Violation) as exc:
                log.Log("Unable to open logfile due to '{ex}'".format(ex=exc),
                        log.ERROR)
                return 1
        log.ErrorLog.open(Time.getcurtimestr(),
                          compress=self.values.compression)

        (select_opts, select_data) = selection.get_prepared_selections(
            self.values.selections)
        self.dir.set_select(select_opts, select_data)
        self._warn_if_infinite_recursion(self.dir.base_dir, self.repo.base_dir)

        return 0
Exemplo n.º 25
0
    def _compare_ctq_file(self, repo, suggested_ctq):
        """
        Compare chars_to_quote previous, enforced and suggested

        Returns the actual chars_to_quote string to be used
        """
        previous_ctq = repo.get_chars_to_quote()
        if previous_ctq is None:  # there was no previous chars_to_quote
            if Globals.chars_to_quote is None:
                actual_ctq = suggested_ctq
            else:
                actual_ctq = Globals.chars_to_quote
                if actual_ctq != suggested_ctq:
                    log.Log(
                        "File system at '{fs}' suggested quoting '{sq}' "
                        "but override quoting '{oq}' will be used. "
                        "Assuming you know what you are doing".format(
                            fs=repo, sq=suggested_ctq, oq=actual_ctq),
                        log.NOTE)
            repo.set_chars_to_quote(actual_ctq)
            return actual_ctq

        if Globals.chars_to_quote is None:
            if suggested_ctq and suggested_ctq != previous_ctq:
                # the file system has new specific requirements
                actual_ctq = suggested_ctq
            else:
                actual_ctq = previous_ctq
                if previous_ctq and not suggested_ctq:
                    log.Log(
                        "File system at '{fs}' no longer needs quoting "
                        "but we will retain for backwards "
                        "compatibility".format(fs=repo), log.NOTE)
        else:
            actual_ctq = Globals.chars_to_quote  # Globals override
            if actual_ctq != suggested_ctq:
                log.Log(
                    "File system at '{fs}' suggested quoting '{sq}' "
                    "but override quoting '{oq}' will be used. "
                    "Assuming you know what you are doing".format(
                        fs=repo, sq=suggested_ctq, oq=actual_ctq), log.NOTE)

        # the quoting didn't change so all is good
        if actual_ctq == previous_ctq:
            return actual_ctq
        else:
            log.Log.FatalError(
                "The repository quoting '{rq}' would need to be migrated from "
                "old quoting chars '{oq}' to new quoting chars '{nq}'. "
                "This may mean that the repository has been moved between "
                "different file systems, and isn't supported".format(
                    rq=repo, oq=previous_ctq, nq=actual_ctq))
Exemplo n.º 26
0
 def _is_existing(self):
     """
     check that the location exists and is a directory
     """
     if not self.base_dir.lstat():
         log.Log("Source path {sp} does not exist".format(
             sp=self.base_dir), log.ERROR)
         return False
     elif not self.base_dir.isdir():
         log.Log("Source path {sp} is not a directory".format(
             sp=self.base_dir), log.ERROR)
         return False
     return True
Exemplo n.º 27
0
    def write_to_rp(self, rp):
        if not self.__acl:
            return

        try:
            sd = ConvertStringSecurityDescriptorToSecurityDescriptor(
                os.fsdecode(self.__acl), SDDL_REVISION_1)
        except (OSError, pywintypes.error) as exc:
            log.Log(
                "Unable to convert ACL string '{st!r}' to security "
                "descriptor due to exception '{ex}'".format(st=self.__acl,
                                                            ex=exc), log.INFO)

        # Enable next block of code for dirs after we have a mechanism in
        # backup.py (and similar) to do a first pass to see if a directory
        # has SE_DACL_PROTECTED. In that case, we will need to
        #       1) dest_rorp.write_win_acl(source_rorp.get_win_acl())
        #               --> And clear existing dest_rorp one while doing so
        #       2) Check if backup user has Admin privs to write dest_rorp
        #               --> May need to use Win32 AccessCheck() API
        #       3) If not, add Admin write privs to dest_rorp and add dir
        #               to dir_perms_list-equivalent
        #       4) THEN, allow the pre_process() function to finish and the
        #               files be copied over. Those files which wish to
        #               will now inherit the correct ACE objects.
        #       5) If dir was on dir_perms_list-equivalent, drop the write
        #               write permission we added.
        #       6) When copy_attribs is called in end_process, make sure
        #               that the write_win_acl() call isn't made this time
        # The reason we will need to do this is because otherwise, the files
        # which are created during step 4 will reference the ACE entries
        # which we clear during step 6. We need to clear them *before* the
        # children files/subdirs are created and generate the appropriate
        # DACL so the inheritance magic can happen during step 4.

        (flags, revision) = sd.GetSecurityDescriptorControl()
        if (not rp.isdir() and flags & SE_DACL_PROTECTED):
            self._clear_rp(rp)

        try:
            SetNamedSecurityInfo(os.fsdecode(rp.path), SE_FILE_OBJECT,
                                 ACL.flags, sd.GetSecurityDescriptorOwner(),
                                 sd.GetSecurityDescriptorGroup(),
                                 sd.GetSecurityDescriptorDacl(),
                                 (ACL.flags & SACL_SECURITY_INFORMATION)
                                 and sd.GetSecurityDescriptorSacl() or None)
        except (OSError, pywintypes.error) as exc:
            log.Log(
                "Unable to set ACL on path '{pa}' "
                "due to exception '{ex}'".format(pa=rp, ex=exc), log.INFO)
Exemplo n.º 28
0
    def _detect_eas(self, rp, write):
        """
        Set extended attributes from rp. Tests writing if write is true.
        """
        assert rp.conn is Globals.local_connection, (
            "Action only foreseen locally and not over {conn}.".format(
                conn=rp.conn))
        assert rp.lstat(), "Path '{rp}' must exist to test EAs.".format(rp=rp)
        if not Globals.eas_active:
            log.Log(
                "Extended attributes test skipped as rdiff-backup was "
                "started with --no-eas option", log.INFO)
            self.eas = None
            return
        try:
            import xattr.pyxattr_compat as xattr
        except ImportError:
            try:
                import xattr
            except ImportError:
                log.Log(
                    "Unable to import module (py)xattr. Extended attributes "
                    "not supported on filesystem at path {pa}".format(pa=rp),
                    log.INFO)
                self.eas = False
                return

        test_ea = b"test val"
        try:
            xattr.list(rp.path)
            if write:
                xattr.set(rp.path, b"user.test", test_ea)
                read_ea = xattr.get(rp.path, b"user.test")
        except OSError as exc:
            log.Log(
                "Extended attributes not supported by filesystem at "
                "path {pa} due to exception '{ex}'".format(pa=rp, ex=exc),
                log.NOTE)
            self.eas = False
        else:
            if write and read_ea != test_ea:
                log.Log(
                    "Extended attributes support is broken on filesystem at "
                    "path {pa}. Please upgrade the filesystem driver, contact "
                    "the developers, or use the --no-eas option to disable "
                    "extended attributes support and suppress this "
                    "message".format(pa=rp), log.WARNING)
                self.eas = False
            else:
                self.eas = True
Exemplo n.º 29
0
    def set_special_escapes(self, repo):
        """
        Escaping DOS devices and trailing periods/spaces works like
        regular filename escaping.

        If only the destination requires it, then we do it.
        Otherwise, it is not necessary, since the files
        couldn't have been created in the first place. We also record
        whether we have done it in order to handle the case where a
        volume which was escaped is later restored by an OS that does
        not require it.
        """

        suggested_edd = (self.dest_fsa.escape_dos_devices
                         and not self.src_fsa.escape_dos_devices)
        suggested_ets = (self.dest_fsa.escape_trailing_spaces
                         and not self.src_fsa.escape_trailing_spaces)

        se = repo.get_special_escapes()
        if se is None:
            actual_edd, actual_ets = suggested_edd, suggested_ets
            se = set()
            if actual_edd:
                se.add("escape_dos_devices")
            if actual_ets:
                se.add("escape_trailing_spaces")
            repo.set_special_escapes(se)
        else:
            actual_edd = ("escape_dos_devices" in se)
            actual_ets = ("escape_trailing_spaces" in se)

            if actual_edd != suggested_edd and not suggested_edd:
                log.Log(
                    "System no longer needs DOS devices to be escaped, "
                    "but we will retain for backwards compatibility",
                    log.WARNING)
            if actual_ets != suggested_ets and not suggested_ets:
                log.Log(
                    "System no longer needs trailing spaces or periods to be "
                    "escaped, but we will retain for backwards compatibility",
                    log.WARNING)

        Globals.set_all('escape_dos_devices', actual_edd)
        log.Log("Backup: escape_dos_devices = {dd}".format(dd=actual_edd),
                log.INFO)

        Globals.set_all('escape_trailing_spaces', actual_ets)
        log.Log("Backup: escape_trailing_spaces = {ts}".format(ts=actual_ets),
                log.INFO)
Exemplo n.º 30
0
 def _check_hash(self, copy_report, diff_rorp):
     """Check the hash in the copy_report with hash in diff_rorp"""
     if not diff_rorp.isreg():
         return
     if not diff_rorp.has_sha1():
         log.Log("Hash for file {fi} missing, cannot check".format(
             fi=diff_rorp), log.WARNING)
     elif copy_report.sha1_digest == diff_rorp.get_sha1():
         log.Log("Hash {ha} of file {fi} verified".format(
             ha=diff_rorp.get_sha1(), fi=diff_rorp), log.DEBUG)
     else:
         log.Log("Calculated hash {ch} of file {fi} "
                 "doesn't match recorded hash {rh}".format(
                     ch=copy_report.sha1_digest, fi=diff_rorp,
                     rh=diff_rorp.get_sha1()), log.WARNING)