예제 #1
0
    def recreate_attr(self, regress_time):
        """
        Make regress_time mirror_metadata snapshot by patching

        We write to a tempfile first.  Otherwise, in case of a crash, it
        would seem we would have an intact snapshot and partial diff, not
        the reverse.
        """
        temprp = [self.data_dir.get_temp_rpath()]

        def callback(rp):
            temprp[0] = rp

        # Before API 201, metafiles couldn't be compressed
        writer = self._meta_main_class(
            temprp[0], 'wb',
            compress=(Globals.compression
                      or Globals.get_api_version() < 201),
            check_path=0, callback=callback)
        for rorp in self._get_meta_main_at_time(regress_time, None):
            writer.write_object(rorp)
        writer.close()

        finalrp = self.data_dir.append(
            b"mirror_metadata.%b.snapshot.gz" % Time.timetobytes(regress_time))
        assert not finalrp.lstat(), (
            "Metadata path '{mrp}' shouldn't exist.".format(mrp=finalrp))
        rpath.rename(temprp[0], finalrp)
        if Globals.fsync_directories:
            self.data_dir.fsync()
예제 #2
0
    def setup(self):
        # in setup we return as soon as we detect an issue to avoid changing
        # too much
        return_code = super().setup()
        if return_code != 0:
            return return_code

        return_code = self.repo.setup()
        if return_code != 0:
            return return_code

        # set the filesystem properties of the repository
        self.repo.base_dir.conn.fs_abilities.single_set_globals(
            self.repo.base_dir, 1)  # read_only=True
        self.repo.init_quoting(self.values.chars_to_quote)

        if Globals.get_api_version() < 201:  # compat200
            self.mirror_rpath = self.repo.base_dir.new_index(
                self.repo.restore_index)
        self.inc_rpath = self.repo.data_dir.append_path(
            b'increments', self.repo.restore_index)

        # FIXME move method _get_parsed_time to Repo and remove inc_rpath?
        self.action_time = self._get_parsed_time(self.values.at,
                                                 ref_rp=self.inc_rpath)
        if self.action_time is None:
            return 1

        return 0  # all is good
예제 #3
0
    def setup(self):
        # in setup we return as soon as we detect an issue to avoid changing
        # too much
        return_code = super().setup()
        if return_code != 0:
            return return_code

        return_code = self.repo.setup()
        if return_code != 0:
            return return_code

        # set the filesystem properties of the repository
        if Globals.get_api_version() < 201:  # compat200
            self.repo.base_dir.conn.fs_abilities.single_set_globals(
                self.repo.base_dir, 0)  # read_only=False
            self.repo.setup_quoting()

        # TODO validate how much of the following lines and methods
        # should go into the directory/repository modules
        if log.Log.verbosity > 0:
            try:  # the source repository must be writable
                log.Log.open_logfile(
                    self.repo.data_dir.append(self.name + ".log"))
            except (log.LoggerError, Security.Violation) as exc:
                log.Log(
                    "Unable to open logfile due to exception '{ex}'".format(
                        ex=exc), log.ERROR)
                return 1

        return 0
예제 #4
0
    def run(self):
        # do regress the target directory if necessary
        if self._operate_regress():
            # regress was necessary and failed
            return 1
        previous_time = self.repo.get_mirror_time(refresh=True)
        if previous_time < 0 or previous_time >= Time.getcurtime():
            log.Log(
                "Either there is more than one current_mirror or "
                "the last backup is not in the past. Aborting.", log.ERROR)
            return 1
        if Globals.get_api_version() < 201:  # compat200
            if previous_time:
                Time.setprevtime_compat200(previous_time)
                self.repo.base_dir.conn.Main.backup_touch_curmirror_local(
                    self.dir.base_dir, self.repo.base_dir)
                backup.mirror_and_increment_compat200(self.dir.base_dir,
                                                      self.repo.base_dir,
                                                      self.repo.incs_dir)
                self.repo.base_dir.conn.Main.backup_remove_curmirror_local()
            else:
                backup.mirror_compat200(self.dir.base_dir, self.repo.base_dir)
                self.repo.base_dir.conn.Main.backup_touch_curmirror_local(
                    self.dir.base_dir, self.repo.base_dir)
            self.repo.base_dir.conn.Main.backup_close_statistics(time.time())
        else:  # API 201 and higher
            self._operate_backup(previous_time)

        return 0
예제 #5
0
    def setup(self):
        # in setup we return as soon as we detect an issue to avoid changing
        # too much
        return_code = super().setup()
        if return_code & Globals.RET_CODE_ERR:
            return return_code

        return_code = self.repo.setup()
        if return_code & Globals.RET_CODE_ERR:
            return return_code

        # set the filesystem properties of the repository
        if Globals.get_api_version() < 201:  # compat200
            self.repo.base_dir.conn.fs_abilities.single_set_globals(
                self.repo.base_dir, 1)  # read_only=True
            self.repo.setup_quoting()

        if self.values.entity == "files":
            if self.values.changed_since:
                self.action_time = self._get_parsed_time(
                    self.values.changed_since, ref_rp=self.repo.ref_inc)
            elif self.values.at:
                self.action_time = self._get_parsed_time(
                    self.values.at, ref_rp=self.repo.ref_inc)
            if self.action_time is None:
                return Globals.RET_CODE_ERR

        return Globals.RET_CODE_OK
예제 #6
0
    def setup_quoting(self):
        """
        Set QuotedRPath versions of important RPaths if chars_to_quote is set.

        Return True if quoting needed to be done, False else.
        """
        # FIXME the problem is that the chars_to_quote can come from the command
        # line but can also be a value coming from the repository itself,
        # set globally by the fs_abilities.xxx_set_globals functions.
        if not Globals.chars_to_quote:
            return False

        if Globals.get_api_version() < 201:  # compat200
            FilenameMapping.set_init_quote_vals()
            self.base_dir = FilenameMapping.get_quotedrpath(self.base_dir)
            self.data_dir = FilenameMapping.get_quotedrpath(self.data_dir)
            self.incs_dir = FilenameMapping.get_quotedrpath(self.incs_dir)
        else:
            self.base_dir = map_filenames.get_quotedrpath(self.base_dir)
            self.data_dir = map_filenames.get_quotedrpath(self.data_dir)
            self.incs_dir = map_filenames.get_quotedrpath(self.incs_dir)

        Globals.set_all('rbdir', self.data_dir)  # compat200

        return True
예제 #7
0
    def run(self):

        # This is more a check than a part of run, but because backup does
        # the regress in the run section, we also do the check here...
        if self.source.needs_regress():
            # source could be read-only, so we don't try to regress it
            log.Log(
                "Previous backup to {rp} seems to have failed. "
                "Use rdiff-backup to 'regress' first the failed backup, "
                "then try again to restore".format(rp=self.source.base_dir),
                log.ERROR)
            return 1
        try:
            if Globals.get_api_version() < 201:  # compat200
                restore.Restore(
                    self.source.base_dir.new_index(self.source.restore_index),
                    self.inc_rpath, self.target.base_dir, self.action_time)
            else:
                self._operate_restore()
        except OSError as exc:
            log.Log(
                "Could not complete restore due to exception '{ex}'".format(
                    ex=exc), log.ERROR)
            return 1
        else:
            log.Log("Restore successfully finished", log.INFO)
            return 0
예제 #8
0
def get_cmd_pairs(locations,
                  remote_schema=None,
                  ssh_compression=True,
                  remote_tempdir=None,
                  term_verbosity=None):
    """Map the given file descriptions into command pairs

    Command pairs are tuples cmdpair with length 2.  cmdpair[0] is
    None iff it describes a local path, and cmdpair[1] is the path.

    """

    # This is the schema that determines how rdiff-backup will open a
    # pipe to the remote system.  If the file is given as A::B, {h}/%s will
    # be substituted with A in the schema.
    if remote_schema:
        cmd_schema = remote_schema
    else:
        if ssh_compression:
            cmd_schema = b"ssh -C {h} rdiff-backup"
        else:
            cmd_schema = b"ssh {h} rdiff-backup"
        if remote_tempdir:
            cmd_schema += (b" --tempdir=" + remote_tempdir)
        # we could wait until the verbosity is "transferred" to the remote side
        # but we might miss important messages at the beginning of the process
        if term_verbosity is not None:
            cmd_schema += b" --terminal-verbosity %d" % term_verbosity
        if Globals.get_api_version() > 200:  # compat200
            cmd_schema += b" server"
        else:
            cmd_schema += b" --server"

    if not locations:
        return []
    desc_triples = list(map(parse_location, locations))

    # was any error string be returned as third in the list?
    for err in [triple[2] for triple in desc_triples if triple[2]]:
        raise SetConnectionsException(err)

    if remote_schema and not [x for x in desc_triples if x[0]]:
        # remote schema defined but no remote location found
        log.Log("Remote schema option ignored - no remote file descriptions",
                log.WARNING)

    # strip the error field from the triples to get pairs
    desc_pairs = [triple[:2] for triple in desc_triples]

    def desc2cmd_pairs(desc_pair):
        """Return pair (remote_cmd, filename) from desc_pair"""
        host_info, filename = desc_pair
        if not host_info:
            return (None, filename)
        else:
            return (_fill_schema(host_info, cmd_schema), filename)

    cmd_pairs = list(map(desc2cmd_pairs, desc_pairs))

    return cmd_pairs
예제 #9
0
 def _operate_regress(self, try_regress=True):
     """
     Check the given repository and regress it if necessary
     """
     if Globals.get_api_version() < 201:  # compat200
         if self.repo.needs_regress_compat200():
             if not try_regress:
                 return 1
             log.Log("Previous backup seems to have failed, regressing "
                     "destination now", log.WARNING)
             try:
                 self.repo.base_dir.conn.regress.Regress(self.repo.base_dir)
                 return 0
             except Security.Violation:
                 log.Log(
                     "Security violation while attempting to regress "
                     "destination, perhaps due to --restrict-read-only or "
                     "--restrict-update-only", log.ERROR)
                 return 1
         else:
             log.Log("Given repository doesn't need to be regressed",
                     log.NOTE)
             return 0  # all is good
     else:
         if self.repo.needs_regress():
             if not try_regress:
                 return 1
             log.Log("Previous backup seems to have failed, regressing "
                     "destination now", log.WARNING)
             return self.repo.regress()
         else:
             log.Log("Given repository doesn't need to be regressed",
                     log.NOTE)
             return 0  # all is good
예제 #10
0
    def set_select(self, select_opts, select_data):
        """
        Set the selection and selection data on the directory

        Accepts a tuple of two lists:
        * one of selection tuple made of (selection method, parameter)
        * and one of the content of the selection files

        Saves the selections list and makes it ready for usage on the source
        side over its connection.
        """

        # FIXME not sure we couldn't support symbolic links nowadays on Windows
        # knowing that it would require specific handling when reading the link:
        #   File "rdiff_backup\rpath.py", line 771, in symlink
        #   TypeError: symlink: src should be string, bytes or os.PathLike, not NoneType
        # I suspect that not all users can read symlinks with os.readlink
        if (self.base_dir.conn.os.name == 'nt'
                and ("--exclude-symbolic-links", None) not in select_opts):
            log.Log("Symbolic links excluded by default on Windows", log.NOTE)
            select_opts.insert(0, ("--exclude-symbolic-links", None))
        if Globals.get_api_version() < 201:  # compat200
            self.base_dir.conn.backup.SourceStruct.set_source_select(
                self.base_dir, select_opts, *list(map(io.BytesIO,
                                                      select_data)))
        else:  # FIXME we're retransforming bytes into a file pointer
            self._shadow.set_select(self.base_dir, select_opts,
                                    *list(map(io.BytesIO, select_data)))
예제 #11
0
    def init_owners_mapping(self,
                            users_map=None,
                            groups_map=None,
                            preserve_num_ids=False):
        """
        initialize mapping of users and groups (aka owners)

        Shadow function for _repo_shadow.RepoShadow/_dir_shadow.DirShadow

        users_map and groups_map are file descriptors opened in text mode
        """
        if users_map is not None:
            users_map = users_map.read()
        if groups_map is not None:
            groups_map = groups_map.read()
        if Globals.get_api_version() < 201:  # compat200
            self.base_dir.conn.user_group.init_user_mapping(
                users_map, preserve_num_ids)
            self.base_dir.conn.user_group.init_group_mapping(
                groups_map, preserve_num_ids)
        else:
            self._shadow.init_owners_mapping(users_map, groups_map,
                                             preserve_num_ids)

        return 0  # all is good
예제 #12
0
    def setup(self, src_repo, owners_map=None):
        ret_code = super().setup()
        if ret_code != 0:
            return ret_code

        if Globals.get_api_version() >= 201:  # compat200
            if self.base_dir.conn is Globals.local_connection:
                # should be more efficient than going through the connection
                from rdiffbackup.locations import _dir_shadow
                self._shadow = _dir_shadow.WriteDirShadow
            else:
                self._shadow = self.base_dir.conn._dir_shadow.WriteDirShadow
            self.fs_abilities = self._shadow.get_fs_abilities(self.base_dir)
            if not self.fs_abilities:
                return 1  # something was wrong
            else:
                log.Log(
                    "--- Write directory file system capabilities ---\n" +
                    str(self.fs_abilities), log.INFO)

            return fs_abilities.Repo2DirSetGlobals(src_repo, self)()

        if owners_map is not None:
            ret_code = self.init_owners_mapping(**owners_map)
            if ret_code != 0:
                return ret_code

        return 0  # all is good
예제 #13
0
    def setup(self):
        # in setup we return as soon as we detect an issue to avoid changing
        # too much
        return_code = super().setup()
        if return_code & Globals.RET_CODE_ERR:
            return return_code

        return_code = self.dir.setup()
        if return_code & Globals.RET_CODE_ERR:
            return return_code

        return_code = self.repo.setup(self.dir)
        if return_code & Globals.RET_CODE_ERR:
            return return_code

        # set the filesystem properties of the repository
        if Globals.get_api_version() < 201:  # compat200
            self.repo.base_dir.conn.fs_abilities.single_set_globals(
                self.repo.base_dir, 1)  # read_only=True
            self.repo.setup_quoting()

        (select_opts, select_data) = selection.get_prepared_selections(
            self.values.selections)
        self.dir.set_select(select_opts, select_data)

        # FIXME move method _get_parsed_time to Repo?
        self.action_time = self._get_parsed_time(self.values.at,
                                                 ref_rp=self.repo.ref_inc)
        if self.action_time is None:
            return Globals.RET_CODE_ERR

        return Globals.RET_CODE_OK
예제 #14
0
    def _writer_helper(self, typestr, time, meta_class, force=False):
        """
        Returns a writer class or None if the meta class isn't active.

        For testing purposes, the force option allows to skip the activity
        validation.
        """
        if time is None:
            timestr = Time.getcurtimestr()
        else:
            timestr = Time.timetobytes(time)
        triple = map(os.fsencode, (meta_class.get_prefix(), timestr, typestr))
        filename = b'.'.join(triple)
        rp = self.data_dir.append(filename)
        assert not rp.lstat(), "File '{rp}' shouldn't exist.".format(rp=rp)
        assert rp.isincfile(), (
            "Path '{irp}' must be an increment file.".format(irp=rp))
        if meta_class.is_active() or force:
            # Before API 201, metafiles couldn't be compressed
            return meta_class(rp, 'w',
                              compress=(Globals.compression
                                        or Globals.get_api_version() < 201),
                              callback=self._add_incrp)
        else:
            return None
예제 #15
0
 def _list_files_at_time(self):
     """List files in archive under rp that are present at restoretime"""
     if Globals.get_api_version() < 201:
         rorp_iter = self.repo.base_dir.conn.restore.ListAtTime(
             self.mirror_rpath, self.inc_rpath, self.action_time)
     else:
         rorp_iter = self.repo.list_files_at_time(self.action_time)
     for rorp in rorp_iter:
         print(str(rorp))
예제 #16
0
 def test_default_actual_api(self):
     """validate that the default version is the actual one or the one explicitly set"""
     output = subprocess.check_output([RBBin, b'info'])
     api_version = yaml.safe_load(output)['exec']['api_version']
     self.assertEqual(Globals.get_api_version(), api_version['default'])
     api_param = os.fsencode(str(api_version['max']))
     output = subprocess.check_output([RBBin, b'--api-version', api_param, b'info'])
     out_info = yaml.safe_load(output)
     self.assertEqual(out_info['exec']['api_version']['actual'], api_version['max'])
예제 #17
0
 def _list_files_changed_since(self):
     """List all the files under rp that have changed since restoretime"""
     if Globals.get_api_version() < 201:
         rorp_iter = self.repo.base_dir.conn.restore.ListChangedSince(
             self.mirror_rpath, self.inc_rpath, self.action_time)
     else:
         rorp_iter = self.repo.list_files_changed_since(self.action_time)
     for rorp in rorp_iter:
         # This is a hack, see restore.ListChangedSince for rationale
         print(str(rorp))
예제 #18
0
    def setup(self, src_dir=None, owners_map=None):
        if self.must_be_writable and not self._create():
            return 1

        if (self.can_be_sub_path
                and self.base_dir.conn is Globals.local_connection):
            Security.reset_restrict_path(self.base_dir)

        Globals.set_all('rbdir', self.data_dir)  # compat200

        if Globals.get_api_version() >= 201:  # compat200
            if self.base_dir.conn is Globals.local_connection:
                # should be more efficient than going through the connection
                from rdiffbackup.locations import _repo_shadow
                self._shadow = _repo_shadow.RepoShadow
            else:
                self._shadow = self.base_dir.conn._repo_shadow.RepoShadow
            if self.must_be_writable:
                self.fs_abilities = self._shadow.get_fs_abilities_readwrite(
                    self.base_dir)
            else:
                self.fs_abilities = self._shadow.get_fs_abilities_readonly(
                    self.base_dir)
            if not self.fs_abilities:
                return 1  # something was wrong
            else:
                log.Log(
                    "--- Repository file system capabilities ---\n" +
                    str(self.fs_abilities), log.INFO)

            if src_dir is None:
                self.remote_transfer = None  # just in case
                ret_code = fs_abilities.SingleRepoSetGlobals(self)()
                if ret_code != 0:
                    return ret_code
            else:
                # FIXME this shouldn't be necessary, and the setting of variable
                # across the connection should happen through the shadow
                Globals.set_all("backup_writer", self.base_dir.conn)
                self.base_dir.conn.Globals.set_local("isbackup_writer", True)
                # this is the new way, more dedicated but not sufficient yet
                self.remote_transfer = (src_dir.base_dir.conn
                                        is not self.base_dir.conn)
                ret_code = fs_abilities.Dir2RepoSetGlobals(src_dir, self)()
                if ret_code != 0:
                    return ret_code
            self.setup_quoting()
            self.setup_paths()

        if owners_map is not None:
            ret_code = self.init_owners_mapping(**owners_map)
            if ret_code != 0:
                return ret_code

        return 0  # all is good
예제 #19
0
def get_meta_object(*params):
    """
    Returns a Metadata object as corresponds to the current type

    Necessary to guarantee compatibility between rdiff-backup 2.0 and 2.1+
    """
    if Globals.get_api_version() < 201:  # compat200
        from rdiff_backup import eas_acls
        return eas_acls.AccessControlLists(*params)
    else:
        return AccessControlLists(*params)
예제 #20
0
def set_current_time(reftime=None):
    """
    Sets the current time in curtime and curtimestr on all systems
    """
    if reftime is None:
        reftime = time.time()
    if Globals.get_api_version() < 201:  # compat200
        for conn in Globals.connections:
            conn.Time.setcurtime_local(int(reftime))
    else:
        Globals.set_all("current_time", reftime)
        Globals.set_all("current_time_string", timetostring(reftime))
예제 #21
0
    def setup(self):
        # in setup we return as soon as we detect an issue to avoid changing
        # too much
        return_code = super().setup()
        if return_code != 0:
            return return_code

        return_code = self._set_no_compression_regexp()
        if return_code != 0:
            return return_code

        return_code = self.dir.setup()
        if return_code != 0:
            return return_code

        owners_map = {
            "users_map": self.values.user_mapping_file,
            "groups_map": self.values.group_mapping_file,
            "preserve_num_ids": self.values.preserve_numerical_ids
        }
        return_code = self.repo.setup(self.dir, owners_map=owners_map)
        if return_code != 0:
            return return_code

        # TODO validate how much of the following lines and methods
        # should go into the directory/repository modules
        if Globals.get_api_version() < 201:  # compat200
            SetConnections.BackupInitConnections(self.dir.base_dir.conn,
                                                 self.repo.base_dir.conn)
            self.repo.base_dir.conn.fs_abilities.backup_set_globals(
                self.dir.base_dir, self.values.force)
            self.repo.setup_quoting()

        previous_time = self.repo.get_mirror_time()
        if previous_time >= Time.getcurtime():
            log.Log("The last backup is not in the past. Aborting.", log.ERROR)
            return 1
        if log.Log.verbosity > 0:
            try:  # the target repository must be writable
                log.Log.open_logfile(self.repo.data_dir.append("backup.log"))
            except (log.LoggerError, Security.Violation) as exc:
                log.Log("Unable to open logfile due to '{ex}'".format(ex=exc),
                        log.ERROR)
                return 1
        log.ErrorLog.open(Time.getcurtimestr(),
                          compress=self.values.compression)

        (select_opts, select_data) = selection.get_prepared_selections(
            self.values.selections)
        self.dir.set_select(select_opts, select_data)
        self._warn_if_infinite_recursion(self.dir.base_dir, self.repo.base_dir)

        return 0
예제 #22
0
    def _operate_regress(self, try_regress=True,
                         noticeable=False, force=False):
        """
        Check the given repository and regress it if necessary

        Parameter force enforces a regress even if the repo doesn't need it.
        """
        if noticeable:
            regress_verbosity = log.NOTE
        else:
            regress_verbosity = log.INFO
        if Globals.get_api_version() < 201:  # compat200
            if self.repo.needs_regress_compat200():
                if not try_regress:
                    return Globals.RET_CODE_ERR
                log.Log("Previous backup seems to have failed, regressing "
                        "destination now", log.WARNING)
                try:
                    self.repo.base_dir.conn.regress.Regress(self.repo.base_dir)
                    return Globals.RET_CODE_OK
                except Security.Violation:
                    log.Log(
                        "Security violation while attempting to regress "
                        "destination, perhaps due to --restrict-read-only or "
                        "--restrict-update-only", log.ERROR)
                    return Globals.RET_CODE_ERR
            else:
                log.Log("Given repository doesn't need to be regressed",
                        regress_verbosity)
                return Globals.RET_CODE_OK
        else:
            if self.repo.needs_regress():
                if not try_regress:
                    return Globals.RET_CODE_ERR
                log.Log("Previous backup seems to have failed, regressing "
                        "destination now", log.WARNING)
                return self.repo.regress()
            elif force:
                if self.repo.force_regress():
                    log.Log("Given repository doesn't need to be regressed, "
                            "but enforcing regression", log.WARNING)
                    return self.repo.regress()
                else:
                    log.Log("Given repository doesn't need and can't be "
                            "regressed even if forced", log.WARNING)
                    return Globals.RET_CODE_WARN
            else:
                log.Log("Given repository doesn't need to be regressed",
                        regress_verbosity)
                return Globals.RET_CODE_OK
예제 #23
0
    def setup(self):
        ret_code = super().setup()
        if ret_code != 0:
            return ret_code

        if Globals.get_api_version() >= 201:  # compat200
            if self.base_dir.conn is Globals.local_connection:
                # should be more efficient than going through the connection
                from rdiffbackup.locations import _dir_shadow
                self._shadow = _dir_shadow.ShadowReadDir
            else:
                self._shadow = self.base_dir.conn._dir_shadow.ShadowReadDir

        return 0  # all is good
예제 #24
0
 def get_mirror_time(self, must_exist=False, refresh=False):
     """
     Shadow function for RepoShadow.get_mirror_time
     """
     if Globals.get_api_version() < 201:  # compat200
         incbase = self.data_dir.append_path(b"current_mirror")
         mirror_rps = incbase.get_incfiles_list()
         if mirror_rps:
             if len(mirror_rps) == 1:
                 return mirror_rps[0].getinctime()
             else:  # there is a failed backup and 2+ current_mirror files
                 return -1
         else:  # it's the first backup
             return 0  # is always in the past
     else:
         return self._shadow.get_mirror_time(must_exist, refresh)
예제 #25
0
    def run(self):
        """
        Check the given repository and remove old increments
        """

        action_time = self._get_parsed_time(self.values.older_than)
        if action_time is None:
            return 1
        elif action_time < 0:  # no increment is old enough
            return 0
        if Globals.get_api_version() < 201:
            manage.delete_earlier_than(self.repo.base_dir, action_time)
        else:
            self.repo.remove_increments_older_than(action_time)

        return 0
예제 #26
0
 def _list_increments(self):
     """
     Print out a summary of the increments and their times
     """
     incs = self.repo.get_increments()
     if self.values.parsable_output:
         if Globals.get_api_version() < 201:
             for inc in incs:
                 print("{ti} {it}".format(ti=inc["time"], it=inc["type"]))
         else:
             print(yaml.safe_dump(incs,
                                  explicit_start=True, explicit_end=True))
     else:
         print("Found {ni} increments:".format(ni=len(incs) - 1))
         for inc in incs[:-1]:
             print("    {ib}   {ti}".format(
                 ib=inc["base"], ti=Time.timetopretty(inc["time"])))
         print("Current mirror: {ti}".format(
             ti=Time.timetopretty(incs[-1]["time"])))  # time of the mirror
예제 #27
0
    def setup(self):
        if self.must_be_writable and not self._create():
            return 1

        if (self.can_be_sub_path
                and self.base_dir.conn is Globals.local_connection):
            Security.reset_restrict_path(self.base_dir)

        SetConnections.UpdateGlobal('rbdir', self.data_dir)  # compat200

        if Globals.get_api_version() >= 201:  # compat200
            if self.base_dir.conn is Globals.local_connection:
                # should be more efficient than going through the connection
                from rdiffbackup.locations import _repo_shadow
                self._shadow = _repo_shadow.ShadowRepo
            else:
                self._shadow = self.base_dir.conn._repo_shadow.ShadowRepo

        return 0  # all is good
예제 #28
0
    def _get_parsed_time(self, timestr, ref_rp=None):
        """
        Parse time string, potentially using the given remote path as reference

        Returns None if the time string couldn't be parsed, else the time in
        seconds.
        The reference remote path is used when the time string consists in a
        number of past backups.
        """
        try:
            if Globals.get_api_version() < 201:  # compat200
                return Time.genstrtotime(timestr, rp=ref_rp)
            else:
                sessions = self.repo.get_increment_times(ref_rp)
                return Time.genstrtotime(timestr, session_times=sessions)
        except Time.TimeException as exc:
            log.Log("Time string '{ts}' couldn't be parsed "
                    "due to '{ex}'".format(ts=timestr, ex=exc), log.ERROR)
            return None
예제 #29
0
    def setup(self):
        ret_code = super().setup()
        if ret_code != 0:
            return ret_code

        if Globals.get_api_version() >= 201:  # compat200
            if self.base_dir.conn is Globals.local_connection:
                # should be more efficient than going through the connection
                from rdiffbackup.locations import _dir_shadow
                self._shadow = _dir_shadow.ReadDirShadow
            else:
                self._shadow = self.base_dir.conn._dir_shadow.ReadDirShadow
            self.fs_abilities = self._shadow.get_fs_abilities(self.base_dir)
            if not self.fs_abilities:
                return 1  # something was wrong
            else:
                log.Log(
                    "--- Read directory file system capabilities ---\n" +
                    str(self.fs_abilities), log.INFO)

        return 0  # all is good
예제 #30
0
    def set_select(self, select_opts, select_data, target_rp):
        """
        Set the selection and selection data on the repository

        Accepts a tuple of two lists:
        * one of selection tuple made of (selection method, parameter)
        * and one of the content of the selection files
        And an rpath of the target directory to map the selection criteria.

        Saves the selections list and makes it ready for usage on the source
        side over its connection.
        """

        # FIXME we're retransforming bytes into a file pointer
        if select_opts:
            if Globals.get_api_version() >= 201:  # compat200
                self._shadow.set_select(
                    target_rp, select_opts, *list(map(io.BytesIO, select_data)))
            else:
                self.base_dir.conn.restore.MirrorStruct.set_mirror_select(
                    target_rp, select_opts, *list(map(io.BytesIO, select_data)))