Ejemplo n.º 1
0
def patch_diff_tarfile(base_path, diff_tarfile, restrict_index=()):
    """Patch given Path object using delta tarfile (as in tarfile.TarFile)

    If restrict_index is set, ignore any deltas in diff_tarfile that
    don't start with restrict_index.

    """
    if base_path.exists():
        path_iter = selection.Select(base_path).set_iter()
    else:
        path_iter = empty_iter()  # probably untarring full backup

    diff_path_iter = difftar2path_iter(diff_tarfile)
    if restrict_index:
        diff_path_iter = filter_path_iter(diff_path_iter, restrict_index)
    collated = diffdir.collate2iters(path_iter, diff_path_iter)

    ITR = IterTreeReducer(PathPatcher, [base_path])
    for basis_path, diff_ropath in collated:
        if basis_path:
            log.Info(
                _("Patching %s") % (util.ufn(basis_path.get_relative_path())),
                log.InfoCode.patch_file_patching,
                util.escape(basis_path.get_relative_path()))
            ITR(basis_path.index, basis_path, diff_ropath)
        else:
            log.Info(
                _("Patching %s") % (util.ufn(diff_ropath.get_relative_path())),
                log.InfoCode.patch_file_patching,
                util.escape(diff_ropath.get_relative_path()))
            ITR(diff_ropath.index, basis_path, diff_ropath)
    ITR.Finish()
    base_path.setdata()
Ejemplo n.º 2
0
 def delete(self):
     """
     Remove all files in set, both local and remote
     """
     rfn = self.get_filenames()
     rfn.reverse()
     try:
         self.backend.delete(rfn)
     except Exception:
         log.Debug(
             _("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn])
         pass
     for lfn in globals.archive_dir.listdir():
         pr = file_naming.parse(lfn)
         if (pr and pr.time == self.time
                 and pr.start_time == self.start_time
                 and pr.end_time == self.end_time):
             try:
                 globals.archive_dir.append(lfn).delete()
             except Exception:
                 log.Debug(
                     _("BackupSet.delete: missing %s") %
                     [util.ufn(f) for f in lfn])
                 pass
     util.release_lockfile()
Ejemplo n.º 3
0
def process_local_dir(action, local_pathname):
    """Check local directory, set globals.local_path"""
    local_path = path.Path(path.Path(local_pathname).get_canonical())
    if action == "restore":
        if (local_path.exists() and not local_path.isemptydir()) and not globals.force:
            log.FatalError(
                _("Restore destination directory %s already " "exists.\nWill not overwrite.")
                % (util.ufn(local_path.name),),
                log.ErrorCode.restore_dir_exists,
            )
    elif action == "verify":
        if not local_path.exists():
            log.FatalError(
                _("Verify directory %s does not exist") % (util.ufn(local_path.name),),
                log.ErrorCode.verify_dir_doesnt_exist,
            )
    else:
        assert action == "full" or action == "inc"
        if not local_path.exists():
            log.FatalError(
                _("Backup source directory %s does not exist.") % (util.ufn(local_path.name),),
                log.ErrorCode.backup_dir_doesnt_exist,
            )

    globals.local_path = local_path
Ejemplo n.º 4
0
    def set_from_stat(self):
        """Set the value of self.type, self.mode from self.stat"""
        if not self.stat:
            self.type = None

        st_mode = self.stat.st_mode
        if stat.S_ISREG(st_mode):
            self.type = "reg"
        elif stat.S_ISDIR(st_mode):
            self.type = "dir"
        elif stat.S_ISLNK(st_mode):
            self.type = "sym"
        elif stat.S_ISFIFO(st_mode):
            self.type = "fifo"
        elif stat.S_ISSOCK(st_mode):
            raise PathException(util.ufn(self.get_relative_path()) +
                                u"is a socket, unsupported by tar")
            self.type = "sock"
        elif stat.S_ISCHR(st_mode):
            self.type = "chr"
        elif stat.S_ISBLK(st_mode):
            self.type = "blk"
        else:
            raise PathException("Unknown type")

        self.mode = stat.S_IMODE(st_mode)
        if self.type in ("chr", "blk"):
            try:
                self.devnums = (os.major(self.stat.st_rdev),
                                os.minor(self.stat.st_rdev))
            except:
                log.Warn(_("Warning: %s invalid devnums (0x%X), treating as (0, 0).")
                         % (util.ufn(self.get_relative_path()), self.stat.st_rdev))
                self.devnums = (0, 0)
Ejemplo n.º 5
0
    def set_from_stat(self):
        """Set the value of self.type, self.mode from self.stat"""
        if not self.stat:
            self.type = None

        st_mode = self.stat.st_mode
        if stat.S_ISREG(st_mode):
            self.type = "reg"
        elif stat.S_ISDIR(st_mode):
            self.type = "dir"
        elif stat.S_ISLNK(st_mode):
            self.type = "sym"
        elif stat.S_ISFIFO(st_mode):
            self.type = "fifo"
        elif stat.S_ISSOCK(st_mode):
            raise PathException(util.ufn(self.get_relative_path()) +
                                u"is a socket, unsupported by tar")
            self.type = "sock"
        elif stat.S_ISCHR(st_mode):
            self.type = "chr"
        elif stat.S_ISBLK(st_mode):
            self.type = "blk"
        else:
            raise PathException("Unknown type")

        self.mode = stat.S_IMODE(st_mode)
        if self.type in ("chr", "blk"):
            try:
                self.devnums = (os.major(self.stat.st_rdev),
                                os.minor(self.stat.st_rdev))
            except:
                log.Warn(_("Warning: %s invalid devnums (0x%X), treating as (0, 0).")
                         % (util.ufn(self.get_relative_path()), self.stat.st_rdev))
                self.devnums = (0, 0)
Ejemplo n.º 6
0
 def delete(self):
     """
     Remove all files in set, both local and remote
     """
     rfn = self.get_filenames()
     rfn.reverse()
     try:
         self.backend.delete(rfn)
     except Exception:
         log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn])
         pass
     if self.action not in ["collection-status", "replicate"]:
         local_filename_list = globals.archive_dir_path.listdir()
     else:
         local_filename_list = []
     for lfn in local_filename_list:
         pr = file_naming.parse(lfn)
         if (pr and pr.time == self.time and
                 pr.start_time == self.start_time and
                 pr.end_time == self.end_time):
             try:
                 globals.archive_dir_path.append(lfn).delete()
             except Exception:
                 log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in lfn])
                 pass
     util.release_lockfile()
Ejemplo n.º 7
0
    def cleanup(self):
        """
        Cleanup any files created in the temporary directory (that
        have not been forgotten), and clean up the temporary directory
        itself.

        On failure they are logged, but this method will not raise an
        exception.
        """
        self.__lock.acquire()
        try:
            if self.__dir is not None:
                for file in self.__pending.keys():
                    try:
                        log.Debug(_("Removing still remembered temporary file %s") % util.ufn(file))
                        util.ignore_missing(os.unlink, file)
                    except Exception:
                        log.Info(_("Cleanup of temporary file %s failed") % util.ufn(file))
                        pass
                try:
                    os.rmdir(self.__dir)
                except Exception:
                    log.Warn(_("Cleanup of temporary directory %s failed - this is probably a bug.") % util.ufn(self.__dir))
                    pass
                self.__pending = None
                self.__dir = None
        finally:
            self.__lock.release()
Ejemplo n.º 8
0
def patch_diff_tarfile(base_path, diff_tarfile, restrict_index=()):
    """Patch given Path object using delta tarfile (as in tarfile.TarFile)

    If restrict_index is set, ignore any deltas in diff_tarfile that
    don't start with restrict_index.

    """
    if base_path.exists():
        path_iter = selection.Select(base_path).set_iter()
    else:
        path_iter = empty_iter()  # probably untarring full backup

    diff_path_iter = difftar2path_iter(diff_tarfile)
    if restrict_index:
        diff_path_iter = filter_path_iter(diff_path_iter, restrict_index)
    collated = diffdir.collate2iters(path_iter, diff_path_iter)

    ITR = IterTreeReducer(PathPatcher, [base_path])
    for basis_path, diff_ropath in collated:
        if basis_path:
            log.Info(_("Patching %s") % (util.ufn(basis_path.get_relative_path())),
                     log.InfoCode.patch_file_patching,
                     util.escape(basis_path.get_relative_path()))
            ITR(basis_path.index, basis_path, diff_ropath)
        else:
            log.Info(_("Patching %s") % (util.ufn(diff_ropath.get_relative_path())),
                     log.InfoCode.patch_file_patching,
                     util.escape(diff_ropath.get_relative_path()))
            ITR(diff_ropath.index, basis_path, diff_ropath)
    ITR.Finish()
    base_path.setdata()
Ejemplo n.º 9
0
def process_local_dir(action, local_pathname):
    """Check local directory, set globals.local_path"""
    local_path = path.Path(path.Path(local_pathname).get_canonical())
    if action == "restore":
        if (local_path.exists()
                and not local_path.isemptydir()) and not globals.force:
            log.FatalError(
                _("Restore destination directory %s already "
                  "exists.\nWill not overwrite.") %
                (util.ufn(local_path.name), ),
                log.ErrorCode.restore_dir_exists)
    elif action == "verify":
        if not local_path.exists():
            log.FatalError(
                _("Verify directory %s does not exist") %
                (util.ufn(local_path.name), ),
                log.ErrorCode.verify_dir_doesnt_exist)
    else:
        assert action == "full" or action == "inc"
        if not local_path.exists():
            log.FatalError(
                _("Backup source directory %s does not exist.") %
                (util.ufn(local_path.name), ),
                log.ErrorCode.backup_dir_doesnt_exist)

    globals.local_path = local_path
Ejemplo n.º 10
0
 def delete(self):
     """
     Remove all files in set, both local and remote
     """
     rfn = self.get_filenames()
     rfn.reverse()
     try:
         self.backend.delete(rfn)
     except Exception:
         log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn])
         pass
     if self.action not in ["collection-status"]:
         local_filename_list = globals.archive_dir.listdir()
     else:
         local_filename_list = []
     for lfn in local_filename_list:
         pr = file_naming.parse(lfn)
         if (pr and pr.time == self.time and
                 pr.start_time == self.start_time and
                 pr.end_time == self.end_time):
             try:
                 globals.archive_dir.append(lfn).delete()
             except Exception:
                 log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in lfn])
                 pass
     util.release_lockfile()
Ejemplo n.º 11
0
 def mkdir(self):
     """Make directory(s) at specified path"""
     log.Info(_("Making directory %s") % util.ufn(self.name))
     try:
         os.makedirs(self.name)
     except OSError:
         if (not globals.force):
             raise PathException("Error creating directory %s" % util.ufn(self.name), 7)
     self.setdata()
Ejemplo n.º 12
0
 def mkdir(self):
     """Make directory(s) at specified path"""
     log.Info(_("Making directory %s") % util.ufn(self.name))
     try:
         os.makedirs(self.name)
     except OSError:
         if (not globals.force):
             raise PathException("Error creating directory %s" % util.ufn(self.name), 7)
     self.setdata()
Ejemplo n.º 13
0
 def get_remote_manifest(self):
     """
     Return manifest by reading remote manifest on backend
     """
     assert self.remote_manifest_name
     try:
         manifest_buffer = self.backend.get_data(self.remote_manifest_name)
     except GPGError as message:
         log.Error(_("Error processing remote manifest (%s): %s") %
                   (util.ufn(self.remote_manifest_name), util.uexc(message)))
         return None
     log.Info(_("Processing remote manifest %s (%s)") % (
         util.ufn(self.remote_manifest_name), len(manifest_buffer)))
     return manifest.Manifest().from_string(manifest_buffer)
Ejemplo n.º 14
0
 def get_remote_manifest(self):
     """
     Return manifest by reading remote manifest on backend
     """
     assert self.remote_manifest_name
     try:
         manifest_buffer = self.backend.get_data(self.remote_manifest_name)
     except GPGError as message:
         log.Error(_("Error processing remote manifest (%s): %s") %
                   (util.ufn(self.remote_manifest_name), util.uexc(message)))
         return None
     log.Info(_("Processing remote manifest %s (%s)") % (
         util.ufn(self.remote_manifest_name), len(manifest_buffer)))
     return manifest.Manifest().from_string(manifest_buffer)
Ejemplo n.º 15
0
 def add_to_sets(filename):
     """
     Try adding filename to existing sets, or make new one
     """
     for set in sets:
         if set.add_filename(filename):
             log.Debug(_("File %s is part of known set") % (util.ufn(filename),))
             break
     else:
         log.Debug(_("File %s is not part of a known set; creating new set") % (util.ufn(filename),))
         new_set = BackupSet(self.backend, self.action)
         if new_set.add_filename(filename):
             sets.append(new_set)
         else:
             log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.ufn(filename))
Ejemplo n.º 16
0
 def add_to_sets(filename):
     """
     Try adding filename to existing sets, or make new one
     """
     for set in sets:
         if set.add_filename(filename):
             log.Debug(_("File %s is part of known set") % (util.ufn(filename),))
             break
     else:
         log.Debug(_("File %s is not part of a known set; creating new set") % (util.ufn(filename),))
         new_set = BackupSet(self.backend)
         if new_set.add_filename(filename):
             sets.append(new_set)
         else:
             log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.ufn(filename))
Ejemplo n.º 17
0
    def mktemp(self):
        """
        Return a unique filename suitable for use for a temporary
        file. The file is not created.

        Subsequent calls to this method are guaranteed to never return
        the same filename again. As a result, it is safe to use under
        concurrent conditions.

        NOTE: mkstemp() is greatly preferred.
        """
        filename = None

        self.__lock.acquire()
        try:
            self.__tempcount = self.__tempcount + 1
            suffix = "-%d" % (self.__tempcount,)
            filename = tempfile.mktemp(suffix, "mktemp-", self.__dir)

            log.Debug(_("Registering (mktemp) temporary file %s") % util.ufn(filename))
            self.__pending[filename] = None
        finally:
            self.__lock.release()

        return filename
Ejemplo n.º 18
0
    def get_fileobj_write(self, filename, parseresults=None, sizelist=None):
        """
        Return fileobj opened for writing, which will cause the file
        to be written to the backend on close().

        The file will be encoded as specified in parseresults (or as
        read from the filename), and stored in a temp file until it
        can be copied over and deleted.

        If sizelist is not None, it should be set to an empty list.
        The number of bytes will be inserted into the list.
        """
        if not parseresults:
            parseresults = file_naming.parse(filename)
            assert parseresults, u"Filename %s not correctly parsed" % util.ufn(
                filename)
        tdp = dup_temp.new_tempduppath(parseresults)

        def close_file_hook():
            """This is called when returned fileobj is closed"""
            self.put(tdp, filename)
            if sizelist is not None:
                tdp.setdata()
                sizelist.append(tdp.getsize())
            tdp.delete()

        fh = dup_temp.FileobjHooked(tdp.filtered_open("wb"))
        fh.addhook(close_file_hook)
        return fh
Ejemplo n.º 19
0
    def get_fileobj_write(self, filename, parseresults=None, sizelist=None):
        """
        Return fileobj opened for writing, which will cause the file
        to be written to the backend on close().

        The file will be encoded as specified in parseresults (or as
        read from the filename), and stored in a temp file until it
        can be copied over and deleted.

        If sizelist is not None, it should be set to an empty list.
        The number of bytes will be inserted into the list.
        """
        if not parseresults:
            parseresults = file_naming.parse(filename)
            assert parseresults, u"Filename %s not correctly parsed" % util.ufn(filename)
        tdp = dup_temp.new_tempduppath(parseresults)

        def close_file_hook():
            """This is called when returned fileobj is closed"""
            self.put(tdp, filename)
            if sizelist is not None:
                tdp.setdata()
                sizelist.append(tdp.getsize())
            tdp.delete()

        fh = dup_temp.FileobjHooked(tdp.filtered_open("wb"))
        fh.addhook(close_file_hook)
        return fh
Ejemplo n.º 20
0
    def __init__(self, temproot=None):
        """
        Create a new TemporaryDirectory backed by a unique and
        securely created file system directory.

        tempbase - The temp root directory, or None to use system
        default (recommended).
        """
        if temproot is None:
            if globals.temproot:
                temproot = globals.temproot
            else:
                global _initialSystemTempRoot
                temproot = _initialSystemTempRoot
        self.__dir = tempfile.mkdtemp("-tempdir", "duplicity-", temproot)

        log.Info(_("Using temporary directory %s") % util.ufn(self.__dir))

        # number of mktemp()/mkstemp() calls served so far
        self.__tempcount = 0
        # dict of paths pending deletion; use dict even though we are
        # not concearned with association, because it is unclear whether
        # sets are O(1), while dictionaries are.
        self.__pending = {}

        self.__lock = threading.Lock()  # protect private resources *AND* mktemp/mkstemp calls
Ejemplo n.º 21
0
 def can_fast_process(self, index, ropath):
     """Can fast process (no recursion) if ropath isn't a directory"""
     log.Info(_("Writing %s of type %s") %
              (util.ufn(ropath.get_relative_path()), ropath.type),
              log.InfoCode.patch_file_writing,
              "%s %s" % (util.escape(ropath.get_relative_path()), ropath.type))
     return not ropath.isdir()
Ejemplo n.º 22
0
        def diryield(path):
            """Generate relevant files in directory path

            Returns (path, num) where num == 0 means path should be
            generated normally, num == 1 means the path is a directory
            and should be included iff something inside is included.

            """
            # todo: get around circular dependency issue by importing here
            from duplicity import robust  #@Reimport
            for filename in robust.listpath(path):
                new_path = robust.check_common_error(error_handler,
                                                     Path.append,
                                                     (path, filename))
                # make sure file is read accessible
                if (new_path and new_path.type in ["reg", "dir"]
                        and not os.access(new_path.name, os.R_OK)):
                    log.Warn(
                        _("Error accessing possibly locked file %s") %
                        util.ufn(new_path.name), log.WarningCode.cannot_read,
                        util.escape(new_path.name))
                    if diffdir.stats:
                        diffdir.stats.Errors += 1
                    new_path = None
                elif new_path:
                    s = self.Select(new_path)
                    if s == 1:
                        yield (new_path, 0)
                    elif s == 2 and new_path.isdir():
                        yield (new_path, 1)
Ejemplo n.º 23
0
        def diryield(path):
            """Generate relevant files in directory path

            Returns (path, num) where num == 0 means path should be
            generated normally, num == 1 means the path is a directory
            and should be included iff something inside is included.

            """
            # todo: get around circular dependency issue by importing here
            from duplicity import robust #@Reimport
            for filename in robust.listpath(path):
                new_path = robust.check_common_error(
                    error_handler, Path.append, (path, filename))
                # make sure file is read accessible
                if (new_path and new_path.type in ["reg", "dir"]
                    and not os.access(new_path.name, os.R_OK)):
                    log.Warn(_("Error accessing possibly locked file %s") % util.ufn(new_path.name),
                             log.WarningCode.cannot_read,
                             util.escape(new_path.name))
                    if diffdir.stats:
                        diffdir.stats.Errors +=1
                    new_path = None
                elif new_path:
                    s = self.Select(new_path)
                    if s == 1:
                        yield (new_path, 0)
                    elif s == 2 and new_path.isdir():
                        yield (new_path, 1)
Ejemplo n.º 24
0
    def Select(self, path):
        """Run through the selection functions and return dominant val 0/1/2"""
        # Only used by diryield and tests. Internal.
        log.Debug("Selection: examining path %s" % util.ufn(path.name))
        if not self.selection_functions:
            log.Debug("Selection:     + no selection functions found. Including")
            return 1
        scan_pending = False
        for sf in self.selection_functions:
            result = sf(path)
            log.Debug("Selection:     result: %4s from function: %s" % (str(result), sf.name))
            if result is 2:
                # Selection function says that the path should be scanned for matching files, but keep going
                # through the selection functions looking for a real match (0 or 1).
                scan_pending = True
            elif result == 0 or result == 1:
                # A real match found, no need to try other functions.
                break

        if scan_pending and result != 1:
            # A selection function returned 2 and either no real match was
            # found or the highest-priority match was 0
            result = 2
        if result is None:
            result = 1

        if result == 0:
            log.Debug("Selection:     - excluding file")
        elif result == 1:
            log.Debug("Selection:     + including file")
        else:
            assert result == 2
            log.Debug("Selection:     ? scanning directory for matches")

        return result
Ejemplo n.º 25
0
 def delete(self):
     """Remove this file"""
     log.Info(_("Deleting %s") % util.ufn(self.name))
     if self.isdir():
         util.ignore_missing(os.rmdir, self.name)
     else:
         util.ignore_missing(os.unlink, self.name)
     self.setdata()
Ejemplo n.º 26
0
 def error_handler(exc, path, filename):
     fullpath = os.path.join(path.name, filename)
     try:
         mode = os.stat(fullpath)[stat.ST_MODE]
         if stat.S_ISSOCK(mode):
             log.Info(_("Skipping socket %s") % util.ufn(fullpath),
                      log.InfoCode.skipping_socket,
                      util.escape(fullpath))
         else:
             log.Warn(_("Error initializing file %s") % util.ufn(fullpath),
                      log.WarningCode.cannot_iterate,
                      util.escape(fullpath))
     except OSError:
         log.Warn(_("Error accessing possibly locked file %s") % util.ufn(fullpath),
                  log.WarningCode.cannot_stat,
                  util.escape(fullpath))
     return None
Ejemplo n.º 27
0
 def can_fast_process(self, index, ropath):
     """Can fast process (no recursion) if ropath isn't a directory"""
     log.Info(
         _("Writing %s of type %s") %
         (util.ufn(ropath.get_relative_path()), ropath.type),
         log.InfoCode.patch_file_writing,
         "%s %s" % (util.escape(ropath.get_relative_path()), ropath.type))
     return not ropath.isdir()
Ejemplo n.º 28
0
 def delete(self):
     """Remove this file"""
     log.Info(_("Deleting %s") % util.ufn(self.name))
     if self.isdir():
         util.ignore_missing(os.rmdir, self.name)
     else:
         util.ignore_missing(os.unlink, self.name)
     self.setdata()
Ejemplo n.º 29
0
    def get_backup_chains(self, filename_list):
        """
        Split given filename_list into chains

        Return value will be tuple (list of chains, list of sets, list
        of incomplete sets), where the list of sets will comprise sets
        not fitting into any chain, and the incomplete sets are sets
        missing files.
        """
        log.Debug(_("Extracting backup chains from list of files: %s")
                  % [util.ufn(f) for f in filename_list])
        # First put filenames in set form
        sets = []

        def add_to_sets(filename):
            """
            Try adding filename to existing sets, or make new one
            """
            for set in sets:
                if set.add_filename(filename):
                    log.Debug(_("File %s is part of known set") % (util.ufn(filename),))
                    break
            else:
                log.Debug(_("File %s is not part of a known set; creating new set") % (util.ufn(filename),))
                new_set = BackupSet(self.backend)
                if new_set.add_filename(filename):
                    sets.append(new_set)
                else:
                    log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.ufn(filename))

        for f in filename_list:
            add_to_sets(f)
        sets, incomplete_sets = self.get_sorted_sets(sets)

        chains, orphaned_sets = [], []

        def add_to_chains(set):
            """
            Try adding set to existing chains, or make new one
            """
            if set.type == "full":
                new_chain = BackupChain(self.backend)
                new_chain.set_full(set)
                chains.append(new_chain)
                log.Debug(_("Found backup chain %s") % (new_chain.short_desc()))
            else:
                assert set.type == "inc"
                for chain in chains:
                    if chain.add_inc(set):
                        log.Debug(_("Added set %s to pre-existing chain %s") % (set.get_timestr(),
                                                                                chain.short_desc()))
                        break
                else:
                    log.Debug(_("Found orphaned set %s") % (set.get_timestr(),))
                    orphaned_sets.append(set)
        for s in sets:
            add_to_chains(s)
        return (chains, orphaned_sets, incomplete_sets)
Ejemplo n.º 30
0
    def get_backup_chains(self, filename_list):
        """
        Split given filename_list into chains

        Return value will be tuple (list of chains, list of sets, list
        of incomplete sets), where the list of sets will comprise sets
        not fitting into any chain, and the incomplete sets are sets
        missing files.
        """
        log.Debug(_("Extracting backup chains from list of files: %s")
                  % [util.ufn(f) for f in filename_list])
        # First put filenames in set form
        sets = []

        def add_to_sets(filename):
            """
            Try adding filename to existing sets, or make new one
            """
            for set in sets:
                if set.add_filename(filename):
                    log.Debug(_("File %s is part of known set") % (util.ufn(filename),))
                    break
            else:
                log.Debug(_("File %s is not part of a known set; creating new set") % (util.ufn(filename),))
                new_set = BackupSet(self.backend, self.action)
                if new_set.add_filename(filename):
                    sets.append(new_set)
                else:
                    log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.ufn(filename))

        for f in filename_list:
            add_to_sets(f)
        sets, incomplete_sets = self.get_sorted_sets(sets)

        chains, orphaned_sets = [], []

        def add_to_chains(set):
            """
            Try adding set to existing chains, or make new one
            """
            if set.type == "full":
                new_chain = BackupChain(self.backend)
                new_chain.set_full(set)
                chains.append(new_chain)
                log.Debug(_("Found backup chain %s") % (new_chain.short_desc()))
            else:
                assert set.type == "inc"
                for chain in chains:
                    if chain.add_inc(set):
                        log.Debug(_("Added set %s to pre-existing chain %s") % (set.get_timestr(),
                                                                                chain.short_desc()))
                        break
                else:
                    log.Debug(_("Found orphaned set %s") % (set.get_timestr(),))
                    orphaned_sets.append(set)
        for s in sets:
            add_to_chains(s)
        return (chains, orphaned_sets, incomplete_sets)
Ejemplo n.º 31
0
 def copy_file(self, op, source, target, raise_errors=False):
     log.Info(_("Writing %s") % util.ufn(target.get_parse_name()))
     try:
         source.copy(target,
                     Gio.FileCopyFlags.OVERWRITE | Gio.FileCopyFlags.NOFOLLOW_SYMLINKS,
                     None, self.copy_progress, None)
     except Exception, e:
         self.handle_error(raise_errors, e, op, source.get_parse_name(),
                           target.get_parse_name())
Ejemplo n.º 32
0
 def log_prev_error(self, index):
     """Call function if no pending exception"""
     if not index:
         index_str = "."
     else:
         index_str = os.path.join(*index)
     log.Warn(_("Skipping %s because of previous error") % util.ufn(index_str),
              log.WarningCode.process_skipped,
              util.escape(index_str))
Ejemplo n.º 33
0
 def deltree(self):
     """Remove self by recursively deleting files under it"""
     from duplicity import selection  # todo: avoid circ. dep. issue
     log.Info(_("Deleting tree %s") % util.ufn(self.name))
     itr = IterTreeReducer(PathDeleter, [])
     for path in selection.Select(self).set_iter():
         itr(path.index, path)
     itr.Finish()
     self.setdata()
Ejemplo n.º 34
0
 def incr_warnings(exc):
     """Warn if prefix is incorrect"""
     prefix_warnings[0] += 1
     if prefix_warnings[0] < 6:
         log.Warn(_("Warning: file specification '%s' in filelist %s\n"
                    "doesn't start with correct prefix %s.  Ignoring.") %
                  (exc, filelist_name, util.ufn(self.prefix)))
         if prefix_warnings[0] == 5:
             log.Warn(_("Future prefix errors will not be logged."))
Ejemplo n.º 35
0
 def error_handler(exc, path, filename):
     fullpath = os.path.join(path.name, filename)
     try:
         mode = os.stat(fullpath)[stat.ST_MODE]
         if stat.S_ISSOCK(mode):
             log.Info(
                 _("Skipping socket %s") % util.ufn(fullpath),
                 log.InfoCode.skipping_socket, util.escape(fullpath))
         else:
             log.Warn(
                 _("Error initializing file %s") % util.ufn(fullpath),
                 log.WarningCode.cannot_iterate, util.escape(fullpath))
     except OSError:
         log.Warn(
             _("Error accessing possibly locked file %s") %
             util.ufn(fullpath), log.WarningCode.cannot_stat,
             util.escape(fullpath))
     return None
Ejemplo n.º 36
0
 def read(self, length=-1):
     try:
         buf = self.infile.read(length)
     except IOError as ex:
         buf = ""
         log.Warn(_("Error %s getting delta for %s") % (str(ex), util.ufn(self.infile.name)))
     if stats:
         stats.SourceFileSize += len(buf)
     return buf
Ejemplo n.º 37
0
 def deltree(self):
     """Remove self by recursively deleting files under it"""
     from duplicity import selection # todo: avoid circ. dep. issue
     log.Info(_("Deleting tree %s") % util.ufn(self.name))
     itr = IterTreeReducer(PathDeleter, [])
     for path in selection.Select(self).set_iter():
         itr(path.index, path)
     itr.Finish()
     self.setdata()
Ejemplo n.º 38
0
def log_delta_path(delta_path, new_path=None, stats=None):
    """
    Look at delta path and log delta.  Add stats if new_path is set
    """
    if delta_path.difftype == "snapshot":
        if new_path and stats:
            stats.add_new_file(new_path)
        log.Info(_("A %s") %
                 (util.ufn(delta_path.get_relative_path())),
                 log.InfoCode.diff_file_new,
                 util.escape(delta_path.get_relative_path()))
    else:
        if new_path and stats:
            stats.add_changed_file(new_path)
        log.Info(_("M %s") %
                 (util.ufn(delta_path.get_relative_path())),
                 log.InfoCode.diff_file_changed,
                 util.escape(delta_path.get_relative_path()))
Ejemplo n.º 39
0
 def incr_warnings(exc):
     """Warn if prefix is incorrect"""
     prefix_warnings[0] += 1
     if prefix_warnings[0] < 6:
         log.Warn(
             _("Warning: file specification '%s' in filelist %s\n"
               "doesn't start with correct prefix %s.  Ignoring.") %
             (exc, filelist_name, util.ufn(self.prefix)))
         if prefix_warnings[0] == 5:
             log.Warn(_("Future prefix errors will not be logged."))
Ejemplo n.º 40
0
 def get(self, remote_filename, local_path):
     """Retrieve remote_filename and place in local_path"""
     if hasattr(self.backend, '_get'):
         self.backend._get(remote_filename, local_path)
         local_path.setdata()
         if not local_path.exists():
             raise BackendException(_("File %s not found locally after get "
                                      "from backend") % util.ufn(local_path.name))
     else:
         raise NotImplementedError()
Ejemplo n.º 41
0
 def copy_file(self, op, source, target, raise_errors=False):
     log.Info(_("Writing %s") % util.ufn(target.get_parse_name()))
     try:
         source.copy(
             target, Gio.FileCopyFlags.OVERWRITE
             | Gio.FileCopyFlags.NOFOLLOW_SYMLINKS, None,
             self.copy_progress, None)
     except Exception, e:
         self.handle_error(raise_errors, e, op, source.get_parse_name(),
                           target.get_parse_name())
Ejemplo n.º 42
0
 def next(self):
     try:
         res = Block(self.fp.read(self.get_read_size()))
     except Exception:
         log.FatalError(_("Failed to read %s: %s") %
                        (util.ufn(self.src.name), sys.exc_info()),
                        log.ErrorCode.generic)
     if not res.data:
         self.fp.close()
         raise StopIteration
     return res
Ejemplo n.º 43
0
    def next(self):
        """Write next file, return filename"""
        if self.finished:
            raise StopIteration

        filename = "%s.%d" % (self.prefix, self.current_index)
        log.Info(_("Starting to write %s") % util.ufn(filename))
        outfp = open(filename, "wb")

        if not self.write_volume(outfp):
            # end of input
            self.finished = 1
            if self.current_index == 1:
                # special case first index
                log.Notice(_("One only volume required.\n"
                             "Renaming %s to %s") % (util.ufn(filename), util.ufn(self.prefix)))
                os.rename(filename, self.prefix)
                return self.prefix
        else:
            self.current_index += 1
        return filename
Ejemplo n.º 44
0
 def next(self):
     try:
         res = Block(self.fp.read(self.get_read_size()))
     except Exception:
         log.FatalError(
             _("Failed to read %s: %s") %
             (util.ufn(self.src.name), sys.exc_info()),
             log.ErrorCode.generic)
     if not res.data:
         self.fp.close()
         raise StopIteration
     return res
Ejemplo n.º 45
0
 def on_error(self, exc, *args):
     """This is run on any exception in start/end-process"""
     self.caught_exception = 1
     if args and args[0] and isinstance(args[0], tuple):
         filename = os.path.join(*args[0])
     elif self.index:
         filename = os.path.join(*self.index)  # pylint: disable=not-an-iterable
     else:
         filename = "."
     log.Warn(_("Error '%s' processing %s") % (exc, util.ufn(filename)),
              log.WarningCode.cannot_process,
              util.escape(filename))
Ejemplo n.º 46
0
 def get_temp_in_same_dir(self):
     """Return temp non existent path in same directory as self"""
     global _tmp_path_counter
     parent_dir = self.get_parent_dir()
     while 1:
         temp_path = parent_dir.append("duplicity_temp." +
                                       str(_tmp_path_counter))
         if not temp_path.type:
             return temp_path
         _tmp_path_counter += 1
         assert _tmp_path_counter < 10000, \
             u"Warning too many temp files created for " + util.ufn(self.name)
Ejemplo n.º 47
0
 def get_temp_in_same_dir(self):
     """Return temp non existent path in same directory as self"""
     global _tmp_path_counter
     parent_dir = self.get_parent_dir()
     while 1:
         temp_path = parent_dir.append("duplicity_temp." +
                                       str(_tmp_path_counter))
         if not temp_path.type:
             return temp_path
         _tmp_path_counter += 1
         assert _tmp_path_counter < 10000, \
                u"Warning too many temp files created for " + util.ufn(self.name)
Ejemplo n.º 48
0
def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None):
    """
    Called by get_delta_iter, report error in getting delta
    """
    if new_path:
        index_string = new_path.get_relative_path()
    elif sig_path:
        index_string = sig_path.get_relative_path()
    else:
        assert 0, "Both new and sig are None for some reason"
    log.Warn(_("Error %s getting delta for %s") % (str(exc), util.ufn(index_string)))
    return None
Ejemplo n.º 49
0
def get_delta_iter(new_iter, sig_iter, sig_fileobj=None):
    """
    Generate delta iter from new Path iter and sig Path iter.

    For each delta path of regular file type, path.difftype with be
    set to "snapshot", "diff".  sig_iter will probably iterate ROPaths
    instead of Paths.

    If sig_fileobj is not None, will also write signatures to sig_fileobj.
    """
    collated = collate2iters(new_iter, sig_iter)
    if sig_fileobj:
        sigTarFile = util.make_tarfile("w", sig_fileobj)
    else:
        sigTarFile = None
    for new_path, sig_path in collated:
        log.Debug(
            _("Comparing %s and %s") %
            (new_path and util.uindex(new_path.index), sig_path
             and util.uindex(sig_path.index)))
        if not new_path or not new_path.type:
            # File doesn't exist (but ignore attempts to delete base dir;
            # old versions of duplicity could have written out the sigtar in
            # such a way as to fool us; LP: #929067)
            if sig_path and sig_path.exists() and sig_path.index != ():
                # but signature says it did
                log.Info(
                    _("D %s") % (util.ufn(sig_path.get_relative_path())),
                    log.InfoCode.diff_file_deleted,
                    util.escape(sig_path.get_relative_path()))
                if sigTarFile:
                    ti = ROPath(sig_path.index).get_tarinfo()
                    ti.name = "deleted/" + "/".join(sig_path.index)
                    sigTarFile.addfile(ti)
                stats.add_deleted_file(sig_path)
                yield ROPath(sig_path.index)
        elif not sig_path or new_path != sig_path:
            # Must calculate new signature and create delta
            delta_path = robust.check_common_error(
                delta_iter_error_handler, get_delta_path,
                (new_path, sig_path, sigTarFile))
            if delta_path:
                # log and collect stats
                log_delta_path(delta_path, new_path, stats)
                yield delta_path
            else:
                # if not, an error must have occurred
                stats.Errors += 1
        else:
            stats.add_unchanged_file(new_path)
    stats.close()
    if sigTarFile:
        sigTarFile.close()
Ejemplo n.º 50
0
def set_archive_dir(dirstring):
    """Check archive dir and set global"""
    if not os.path.exists(dirstring):
        try:
            os.makedirs(dirstring)
        except Exception:
            pass
    archive_dir = path.Path(dirstring)
    if not archive_dir.isdir():
        log.FatalError(_("Specified archive directory '%s' does not exist, "
                         "or is not a directory") % (util.ufn(archive_dir.name),),
                       log.ErrorCode.bad_archive_dir)
    globals.archive_dir = archive_dir
Ejemplo n.º 51
0
def set_archive_dir(dirstring):
    """Check archive dir and set global"""
    if not os.path.exists(dirstring):
        try:
            os.makedirs(dirstring)
        except Exception:
            pass
    archive_dir = path.Path(dirstring)
    if not archive_dir.isdir():
        log.FatalError(_("Specified archive directory '%s' does not exist, "
                         "or is not a directory") % (util.ufn(archive_dir.name),),
                       log.ErrorCode.bad_archive_dir)
    globals.archive_dir = archive_dir
Ejemplo n.º 52
0
    def init_from_tarinfo(self, tarinfo):
        """Set data from tarinfo object (part of tarfile module)"""
        # Set the typepp
        type = tarinfo.type
        if type == tarfile.REGTYPE or type == tarfile.AREGTYPE:
            self.type = "reg"
        elif type == tarfile.LNKTYPE:
            raise PathException("Hard links not supported yet")
        elif type == tarfile.SYMTYPE:
            self.type = "sym"
            self.symtext = tarinfo.linkname
        elif type == tarfile.CHRTYPE:
            self.type = "chr"
            self.devnums = (tarinfo.devmajor, tarinfo.devminor)
        elif type == tarfile.BLKTYPE:
            self.type = "blk"
            self.devnums = (tarinfo.devmajor, tarinfo.devminor)
        elif type == tarfile.DIRTYPE:
            self.type = "dir"
        elif type == tarfile.FIFOTYPE:
            self.type = "fifo"
        else:
            raise PathException("Unknown tarinfo type %s" % (type,))

        self.mode = tarinfo.mode
        self.stat = StatResult()

        """ Set user and group id 
        use numeric id if name lookup fails
        OR
        --numeric-owner is set 
        """
        try:
            if globals.numeric_owner:
                raise KeyError
            self.stat.st_uid = cached_ops.getpwnam(tarinfo.uname)[2]
        except KeyError:
            self.stat.st_uid = tarinfo.uid
        try:
            if globals.numeric_owner:
                raise KeyError
            self.stat.st_gid = cached_ops.getgrnam(tarinfo.gname)[2]
        except KeyError:
            self.stat.st_gid = tarinfo.gid

        self.stat.st_mtime = int(tarinfo.mtime)
        if self.stat.st_mtime < 0:
            log.Warn(_("Warning: %s has negative mtime, treating as 0.")
                     % (util.ufn(tarinfo.name)))
            self.stat.st_mtime = 0
        self.stat.st_size = tarinfo.size
Ejemplo n.º 53
0
def get_delta_iter(new_iter, sig_iter, sig_fileobj=None):
    """
    Generate delta iter from new Path iter and sig Path iter.

    For each delta path of regular file type, path.difftype with be
    set to "snapshot", "diff".  sig_iter will probably iterate ROPaths
    instead of Paths.

    If sig_fileobj is not None, will also write signatures to sig_fileobj.
    """
    collated = collate2iters(new_iter, sig_iter)
    if sig_fileobj:
        sigTarFile = util.make_tarfile("w", sig_fileobj)
    else:
        sigTarFile = None
    for new_path, sig_path in collated:
        log.Debug(_("Comparing %s and %s") % (new_path and util.uindex(new_path.index),
                                              sig_path and util.uindex(sig_path.index)))
        if not new_path or not new_path.type:
            # File doesn't exist (but ignore attempts to delete base dir;
            # old versions of duplicity could have written out the sigtar in
            # such a way as to fool us; LP: #929067)
            if sig_path and sig_path.exists() and sig_path.index != ():
                # but signature says it did
                log.Info(_("D %s") %
                         (util.ufn(sig_path.get_relative_path())),
                         log.InfoCode.diff_file_deleted,
                         util.escape(sig_path.get_relative_path()))
                if sigTarFile:
                    ti = ROPath(sig_path.index).get_tarinfo()
                    ti.name = "deleted/" + "/".join(sig_path.index)
                    sigTarFile.addfile(ti)
                stats.add_deleted_file(sig_path)
                yield ROPath(sig_path.index)
        elif not sig_path or new_path != sig_path:
            # Must calculate new signature and create delta
            delta_path = robust.check_common_error(delta_iter_error_handler,
                                                   get_delta_path,
                                                   (new_path, sig_path, sigTarFile))
            if delta_path:
                # log and collect stats
                log_delta_path(delta_path, new_path, stats)
                yield delta_path
            else:
                # if not, an error must have occurred
                stats.Errors += 1
        else:
            stats.add_unchanged_file(new_path)
    stats.close()
    if sigTarFile:
        sigTarFile.close()