示例#1
0
    def test_basic(self):
        """Check get/parse cycle"""
        dup_time.setprevtime(10)
        dup_time.setcurtime(20)

        file_naming.prepare_regex(force = True)
        filename = file_naming.get("inc", volume_number = 23)
        log.Info(u"Inc filename: " + filename)
        pr = file_naming.parse(filename)
        assert pr and pr.type == "inc", pr
        assert pr.start_time == 10
        assert pr.end_time == 20
        assert pr.volume_number == 23
        assert not pr.partial

        filename = file_naming.get("full-sig")
        log.Info(u"Full sig filename: " + filename)
        pr = file_naming.parse(filename)
        assert pr.type == "full-sig"
        assert pr.time == 20
        assert not pr.partial

        filename = file_naming.get("new-sig")
        pr = file_naming.parse(filename)
        assert pr.type == "new-sig"
        assert pr.start_time == 10
        assert pr.end_time == 20
        assert not pr.partial
示例#2
0
    def test_more(self):
        """More file_parsing tests"""
        file_naming.prepare_regex(force=True)
        pr = file_naming.parse(globals.file_prefix +
                               globals.file_prefix_signature +
                               "dns.h112bi.h14rg0.st.g")
        assert pr, pr
        assert pr.type == "new-sig"
        assert pr.end_time == 1029826800

        if not globals.short_filenames:
            pr = file_naming.parse(
                globals.file_prefix + globals.file_prefix_signature +
                "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg"
            )
            assert pr, pr
            assert pr.type == "new-sig"
            assert pr.end_time == 1029826800

        pr = file_naming.parse(globals.file_prefix +
                               globals.file_prefix_signature +
                               "dfs.h5dixs.st.g")
        assert pr, pr
        assert pr.type == "full-sig"
        assert pr.time == 1036954144, repr(pr.time)
示例#3
0
    def test_partial(self):
        u"""Test addition of partial flag"""
        file_naming.prepare_regex(force=True)
        pr = file_naming.parse(config.file_prefix +
                               config.file_prefix_signature +
                               b"dns.h112bi.h14rg0.st.p.g")
        assert pr, pr
        assert pr.partial
        assert pr.type == u"new-sig"
        assert pr.end_time == 1029826800

        if not config.short_filenames:
            pr = file_naming.parse(
                config.file_prefix + config.file_prefix_signature +
                b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg"
            )
            assert pr, pr
            assert pr.partial
            assert pr.type == u"new-sig"
            assert pr.end_time == 1029826800

        pr = file_naming.parse(config.file_prefix +
                               config.file_prefix_signature +
                               b"dfs.h5dixs.st.p.g")
        assert pr, pr
        assert pr.partial
        assert pr.type == u"full-sig"
        assert pr.time == 1036954144, repr(pr.time)
示例#4
0
    def test_basic(self):
        u"""Check get/parse cycle"""
        dup_time.setprevtime(10)
        dup_time.setcurtime(20)

        file_naming.prepare_regex(force=True)
        filename = file_naming.get(u"inc", volume_number=23)
        log.Info(u"Inc filename: " + util.fsdecode(filename))
        pr = file_naming.parse(filename)
        assert pr and pr.type == u"inc", pr
        assert pr.start_time == 10
        assert pr.end_time == 20
        assert pr.volume_number == 23
        assert not pr.partial

        filename = file_naming.get(u"full-sig")
        log.Info(u"Full sig filename: " + util.fsdecode(filename))
        pr = file_naming.parse(filename)
        assert pr.type == u"full-sig"
        assert pr.time == 20
        assert not pr.partial

        filename = file_naming.get(u"new-sig")
        pr = file_naming.parse(filename)
        assert pr.type == u"new-sig"
        assert pr.start_time == 10
        assert pr.end_time == 20
        assert not pr.partial
示例#5
0
    def test_basic(self):
        """Check get/parse cycle"""
        dup_time.setprevtime(10)
        dup_time.setcurtime(20)

        filename = file_naming.get("inc", volume_number=23)
        log.Info("Inc filename: " + filename)
        pr = file_naming.parse(filename)
        assert pr and pr.type == "inc", pr
        assert pr.start_time == 10
        assert pr.end_time == 20
        assert pr.volume_number == 23
        assert not pr.partial

        filename = file_naming.get("full-sig")
        log.Info("Full sig filename: " + filename)
        pr = file_naming.parse(filename)
        assert pr.type == "full-sig"
        assert pr.time == 20
        assert not pr.partial

        filename = file_naming.get("new-sig")
        pr = file_naming.parse(filename)
        assert pr.type == "new-sig"
        assert pr.start_time == 10
        assert pr.end_time == 20
        assert not pr.partial
示例#6
0
    def test_suffix(self):
        """Test suffix (encrypt/compressed) encoding and generation"""
        filename = file_naming.get("inc", manifest = 1, gzipped = 1)
        pr = file_naming.parse(filename)
        assert pr and pr.compressed == 1
        assert pr.manifest

        filename2 = file_naming.get("full", volume_number = 23, encrypted = 1)
        pr = file_naming.parse(filename2)
        assert pr and pr.encrypted == 1
        assert pr.volume_number == 23
示例#7
0
    def test_suffix(self):
        """Test suffix (encrypt/compressed) encoding and generation"""
        filename = file_naming.get("inc", manifest=1, gzipped=1)
        pr = file_naming.parse(filename)
        assert pr and pr.compressed == 1
        assert pr.manifest

        filename2 = file_naming.get("full", volume_number=23, encrypted=1)
        pr = file_naming.parse(filename2)
        assert pr and pr.encrypted == 1
        assert pr.volume_number == 23
示例#8
0
    def get_signature_chains(self, local, filelist=None):
        u"""
        Find chains in archive_dir_path (if local is true) or backend

        Use filelist if given, otherwise regenerate.  Return value is
        pair (list of chains, list of signature paths not in any
        chains).
        """
        def get_filelist():
            if filelist is not None:
                return filelist
            elif local:
                if self.action != u"replicate":
                    return self.archive_dir_path.listdir()
                else:
                    return []
            else:
                return self.backend.list()

        def get_new_sigchain():
            u"""
            Return new empty signature chain
            """
            if local:
                return SignatureChain(True, self.archive_dir_path)
            else:
                return SignatureChain(False, self.backend)

        # Build initial chains from full sig filenames
        chains, new_sig_filenames = [], []
        for filename in get_filelist():
            pr = file_naming.parse(filename)
            if pr:
                if pr.type == u"full-sig":
                    new_chain = get_new_sigchain()
                    assert new_chain.add_filename(filename, pr)
                    chains.append(new_chain)
                elif pr.type == u"new-sig":
                    new_sig_filenames.append(filename)

        # Try adding new signatures to existing chains
        orphaned_filenames = []
        new_sig_filenames.sort(
            key=lambda x: int(file_naming.parse(x).start_time))
        for sig_filename in new_sig_filenames:
            for chain in chains:
                if chain.add_filename(sig_filename):
                    break
            else:
                orphaned_filenames.append(sig_filename)
        return (chains, orphaned_filenames)
示例#9
0
 def delete(self):
     """
     Remove all files in set, both local and remote
     """
     rfn = self.get_filenames()
     rfn.reverse()
     try:
         self.backend.delete(rfn)
     except Exception:
         log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn])
         pass
     if self.action not in ["collection-status", "replicate"]:
         local_filename_list = globals.archive_dir_path.listdir()
     else:
         local_filename_list = []
     for lfn in local_filename_list:
         pr = file_naming.parse(lfn)
         if (pr and pr.time == self.time and
                 pr.start_time == self.start_time and
                 pr.end_time == self.end_time):
             try:
                 globals.archive_dir_path.append(lfn).delete()
             except Exception:
                 log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in lfn])
                 pass
     util.release_lockfile()
示例#10
0
    def get_fileobj_write(self, filename,
                          parseresults = None,
                          sizelist = None):
        """
        Return fileobj opened for writing, which will cause the file
        to be written to the backend on close().

        The file will be encoded as specified in parseresults (or as
        read from the filename), and stored in a temp file until it
        can be copied over and deleted.

        If sizelist is not None, it should be set to an empty list.
        The number of bytes will be inserted into the list.
        """
        if not parseresults:
            parseresults = file_naming.parse(filename)
            assert parseresults, "Filename %s not correctly parsed" % filename
        tdp = dup_temp.new_tempduppath(parseresults)

        def close_file_hook():
            """This is called when returned fileobj is closed"""
            self.put(tdp, filename)
            if sizelist is not None:
                tdp.setdata()
                sizelist.append(tdp.getsize())
            tdp.delete()

        fh = dup_temp.FileobjHooked(tdp.filtered_open("wb"))
        fh.addhook(close_file_hook)
        return fh
示例#11
0
 def delete(self):
     """
     Remove all files in set, both local and remote
     """
     rfn = self.get_filenames()
     rfn.reverse()
     try:
         self.backend.delete(rfn)
     except Exception:
         log.Debug(
             _("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn])
         pass
     for lfn in globals.archive_dir.listdir():
         pr = file_naming.parse(lfn)
         if (pr and pr.time == self.time
                 and pr.start_time == self.start_time
                 and pr.end_time == self.end_time):
             try:
                 globals.archive_dir.append(lfn).delete()
             except Exception:
                 log.Debug(
                     _("BackupSet.delete: missing %s") %
                     [util.ufn(f) for f in lfn])
                 pass
     util.release_lockfile()
示例#12
0
def get_fileobj_duppath(dirpath, partname, permname, remname):
    """
    Return a file object open for writing, will write to filename

    Data will be processed and written to a temporary file.  When the
    return fileobject is closed, rename to final position.  filename
    must be a recognizable duplicity data file.
    """
    if not globals.restart:
        td = tempdir.TemporaryDirectory(dirpath.name)
        tdpname = td.mktemp()
        tdp = TempDupPath(tdpname, parseresults = file_naming.parse(partname))
        fh = FileobjHooked(tdp.filtered_open("wb"), tdp = tdp, dirpath = dirpath,
                           partname = partname, permname = permname, remname = remname)
    else:
        dp = path.DupPath(dirpath.name, index = (partname,))
        fh = FileobjHooked(dp.filtered_open("ab"), tdp = None, dirpath = dirpath,
                           partname = partname, permname = permname, remname = remname)

    def rename_and_forget():
        tdp.rename(dirpath.append(partname))
        td.forget(tdpname)

    if not globals.restart:
        fh.addhook(rename_and_forget)

    return fh
示例#13
0
 def delete(self):
     """
     Remove all files in set, both local and remote
     """
     rfn = self.get_filenames()
     rfn.reverse()
     try:
         self.backend.delete(rfn)
     except Exception:
         log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn])
         pass
     if self.action not in ["collection-status"]:
         local_filename_list = globals.archive_dir.listdir()
     else:
         local_filename_list = []
     for lfn in local_filename_list:
         pr = file_naming.parse(lfn)
         if (pr and pr.time == self.time and
                 pr.start_time == self.start_time and
                 pr.end_time == self.end_time):
             try:
                 globals.archive_dir.append(lfn).delete()
             except Exception:
                 log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in lfn])
                 pass
     util.release_lockfile()
示例#14
0
    def add_filename(self, filename, pr = None):
        """
        Add new sig filename to current chain.  Return true if fits
        """
        if not pr:
            pr = file_naming.parse(filename)
        if not pr:
            return None

        if self.fullsig:
            if pr.type != "new-sig":
                return None
            if pr.start_time != self.end_time:
                return None
            self.inclist.append(filename)
            self.check_times([pr.end_time])
            self.end_time = pr.end_time
            return 1
        else:
            if pr.type != "full-sig":
                return None
            self.fullsig = filename
            self.check_times([pr.time, pr.time])
            self.start_time, self.end_time = pr.time, pr.time
            return 1
示例#15
0
    def add_filename(self, filename):
        """
        Add a filename to given set.  Return true if it fits.

        The filename will match the given set if it has the right
        times and is of the right type.  The information will be set
        from the first filename given.

        @param filename: name of file to add
        @type filename: string
        """
        pr = file_naming.parse(filename)
        if not pr or not (pr.type == "full" or pr.type == "inc"):
            return False

        if not self.info_set:
            self.set_info(pr)
        else:
            if pr.type != self.type:
                return False
            if pr.time != self.time:
                return False
            if (pr.start_time != self.start_time
                    or pr.end_time != self.end_time):
                return False

        if pr.manifest:
            self.set_manifest(filename)
        else:
            assert pr.volume_number is not None
            assert not self.volume_name_dict.has_key(pr.volume_number), \
                   (self.volume_name_dict, filename)
            self.volume_name_dict[pr.volume_number] = filename
        return True
示例#16
0
    def set_manifest(self, remote_filename):
        u"""
        Add local and remote manifest filenames to backup set
        """
        assert not self.remote_manifest_name, \
            u"Cannot set filename of remote manifest to %s; already set to %s." % (
                remote_filename,
                self.remote_manifest_name,
            )
        self.remote_manifest_name = remote_filename

        if self.action != u"replicate":
            local_filename_list = config.archive_dir_path.listdir()
        else:
            local_filename_list = []
        for local_filename in local_filename_list:
            pr = file_naming.parse(local_filename)
            if (pr and pr.manifest and pr.type == self.type
                    and pr.time == self.time
                    and pr.start_time == self.start_time
                    and pr.end_time == self.end_time):
                self.local_manifest_path = \
                    config.archive_dir_path.append(local_filename)

                self.set_files_changed()
                break
示例#17
0
 def delete(self):
     """
     Remove all files in set, both local and remote
     """
     rfn = self.get_filenames()
     rfn.reverse()
     try:
         self.backend.delete(rfn)
     except Exception:
         log.Debug("BackupSet.delete: missing %s" % rfn)
         pass
     for lfn in globals.archive_dir.listdir():
         pr = file_naming.parse(lfn)
         if (pr and pr.time == self.time
                 and pr.start_time == self.start_time
                 and pr.end_time == self.end_time and pr.type != "new-sig"):
             # do not remove new sigs from the cache:
             # they aren't removed from the remote archive,
             # and subsequent backups will have to resync
             # which is bad if running non-interactive with encrypt-key
             try:
                 globals.archive_dir.append(lfn).delete()
             except Exception:
                 log.Debug("BackupSet.delete: missing %s" % lfn)
                 pass
示例#18
0
    def add_filename(self, filename, pr=None):
        """
        Add new sig filename to current chain.  Return true if fits
        """
        if not pr:
            pr = file_naming.parse(filename)
        if not pr:
            return None

        if self.fullsig:
            if pr.type != "new-sig":
                return None
            if pr.start_time != self.end_time:
                return None
            self.inclist.append(filename)
            self.check_times([pr.end_time])
            self.end_time = pr.end_time
            return 1
        else:
            if pr.type != "full-sig":
                return None
            self.fullsig = filename
            self.check_times([pr.time, pr.time])
            self.start_time, self.end_time = pr.time, pr.time
            return 1
示例#19
0
def get_fileobj_duppath(dirpath, partname, permname, remname, overwrite=False):
    """
    Return a file object open for writing, will write to filename

    Data will be processed and written to a temporary file.  When the
    return fileobject is closed, rename to final position.  filename
    must be a recognizable duplicity data file.
    """
    if not globals.restart:
        td = tempdir.TemporaryDirectory(dirpath.name)
        tdpname = td.mktemp()
        tdp = TempDupPath(tdpname, parseresults=file_naming.parse(partname))
        fh = FileobjHooked(tdp.filtered_open("wb"), tdp=tdp, dirpath=dirpath,
                           partname=partname, permname=permname, remname=remname)
    else:
        dp = path.DupPath(dirpath.name, index=(partname,))
        mode = "ab"
        if overwrite:
            mode = "wb"
        fh = FileobjHooked(dp.filtered_open(mode), tdp=None, dirpath=dirpath,
                           partname=partname, permname=permname, remname=remname)

    def rename_and_forget():
        tdp.rename(dirpath.append(partname))
        td.forget(tdpname)

    if not globals.restart:
        fh.addhook(rename_and_forget)

    return fh
示例#20
0
 def delete(self):
     u"""
     Remove all files in set, both local and remote
     """
     rfn = self.get_filenames()
     rfn.reverse()
     try:
         self.backend.delete(rfn)
     except Exception:
         log.Debug(
             _(u"BackupSet.delete: missing %s") %
             [util.fsdecode(f) for f in rfn])
         pass
     if self.action != u"replicate":
         local_filename_list = config.archive_dir_path.listdir()
     else:
         local_filename_list = []
     for lfn in local_filename_list:
         pr = file_naming.parse(lfn)
         if (pr and pr.time == self.time
                 and pr.start_time == self.start_time
                 and pr.end_time == self.end_time):
             try:
                 config.archive_dir_path.append(lfn).delete()
             except Exception:
                 log.Debug(
                     _(u"BackupSet.delete: missing %s") %
                     [util.fsdecode(f) for f in lfn])
                 pass
     util.release_lockfile()
示例#21
0
    def get_fileobj_write(self, filename, parseresults=None, sizelist=None):
        """
        Return fileobj opened for writing, which will cause the file
        to be written to the backend on close().

        The file will be encoded as specified in parseresults (or as
        read from the filename), and stored in a temp file until it
        can be copied over and deleted.

        If sizelist is not None, it should be set to an empty list.
        The number of bytes will be inserted into the list.
        """
        if not parseresults:
            parseresults = file_naming.parse(filename)
            assert parseresults, "Filename %s not correctly parsed" % filename
        tdp = dup_temp.new_tempduppath(parseresults)

        def close_file_hook():
            """This is called when returned fileobj is closed"""
            self.put(tdp, filename)
            if sizelist is not None:
                tdp.setdata()
                sizelist.append(tdp.getsize())
            tdp.delete()

        fh = dup_temp.FileobjHooked(tdp.filtered_open("wb"))
        fh.addhook(close_file_hook)
        return fh
示例#22
0
    def add_filename(self, filename):
        """
        Add a filename to given set.  Return true if it fits.

        The filename will match the given set if it has the right
        times and is of the right type.  The information will be set
        from the first filename given.

        @param filename: name of file to add
        @type filename: string
        """
        pr = file_naming.parse(filename)
        if not pr or not (pr.type == "full" or pr.type == "inc"):
            return False

        if not self.info_set:
            self.set_info(pr)
        else:
            if pr.type != self.type:
                return False
            if pr.time != self.time:
                return False
            if (pr.start_time != self.start_time or
                pr.end_time != self.end_time):
                return False

        if pr.manifest:
            self.set_manifest(filename)
        else:
            assert pr.volume_number is not None
            assert not self.volume_name_dict.has_key(pr.volume_number), \
                   (self.volume_name_dict, filename)
            self.volume_name_dict[pr.volume_number] = filename
        return True
示例#23
0
 def delete(self):
     """
     Remove all files in set, both local and remote
     """
     rfn = self.get_filenames()
     rfn.reverse()
     try:
         self.backend.delete(rfn)
     except Exception:
         log.Debug("BackupSet.delete: missing %s" % rfn)
         pass
     for lfn in globals.archive_dir.listdir():
         pr = file_naming.parse(lfn)
         if (pr
             and pr.time == self.time
             and pr.start_time == self.start_time
             and pr.end_time == self.end_time
             and pr.type != "new-sig" ):
             # do not remove new sigs from the cache:
             # they aren't removed from the remote archive,
             # and subsequent backups will have to resync
             # which is bad if running non-interactive with encrypt-key
             try:
                 globals.archive_dir.append(lfn).delete()
             except Exception:
                 log.Debug("BackupSet.delete: missing %s" % lfn)
                 pass
示例#24
0
    def test_more(self):
        """More file_parsing tests"""
        pr = file_naming.parse("dns.h112bi.h14rg0.st.g")
        assert pr, pr
        assert pr.type == "new-sig"
        assert pr.end_time == 1029826800L

        if not globals.short_filenames:
            pr = file_naming.parse("duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
            assert pr, pr
            assert pr.type == "new-sig"
            assert pr.end_time == 1029826800L

        pr = file_naming.parse("dfs.h5dixs.st.g")
        assert pr, pr
        assert pr.type == "full-sig"
        assert pr.time == 1036954144, repr(pr.time)
示例#25
0
    def get_signature_chains(self, local, filelist=None):
        """
        Find chains in archive_dir (if local is true) or backend

        Use filelist if given, otherwise regenerate.  Return value is
        pair (list of chains, list of signature paths not in any
        chains).
        """
        def get_filelist():
            if filelist is not None:
                return filelist
            elif local:
                if self.action not in ["collection-status"]:
                    return self.archive_dir.listdir()
                else:
                    return []
            else:
                return self.backend.list()

        def get_new_sigchain():
            """
            Return new empty signature chain
            """
            if local:
                return SignatureChain(True, self.archive_dir)
            else:
                return SignatureChain(False, self.backend)

        # Build initial chains from full sig filenames
        chains, new_sig_filenames = [], []
        for filename in get_filelist():
            pr = file_naming.parse(filename)
            if pr:
                if pr.type == "full-sig":
                    new_chain = get_new_sigchain()
                    assert new_chain.add_filename(filename, pr)
                    chains.append(new_chain)
                elif pr.type == "new-sig":
                    new_sig_filenames.append(filename)

        # compare by file time
        def by_start_time(a, b):
            return int(file_naming.parse(a).start_time) - int(file_naming.parse(b).start_time)

        # Try adding new signatures to existing chains
        orphaned_filenames = []
        new_sig_filenames.sort(by_start_time)
        for sig_filename in new_sig_filenames:
            for chain in chains:
                if chain.add_filename(sig_filename):
                    break
            else:
                orphaned_filenames.append(sig_filename)
        return (chains, orphaned_filenames)
示例#26
0
    def test_partial(self):
        """Test addition of partial flag"""
        file_naming.prepare_regex(force = True)
        pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dns.h112bi.h14rg0.st.p.g")
        assert pr, pr
        assert pr.partial
        assert pr.type == "new-sig"
        assert pr.end_time == 1029826800L

        if not globals.short_filenames:
            pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg")
            assert pr, pr
            assert pr.partial
            assert pr.type == "new-sig"
            assert pr.end_time == 1029826800L

        pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dfs.h5dixs.st.p.g")
        assert pr, pr
        assert pr.partial
        assert pr.type == "full-sig"
        assert pr.time == 1036954144, repr(pr.time)
示例#27
0
    def set_values(self, sig_chain_warning=1):
        """
        Set values from archive_dir and backend.

        Returns self for convenience.  If sig_chain_warning is set to None,
        do not warn about unnecessary sig chains.  This is because there may
        naturally be some unecessary ones after a full backup.
        """
        self.values_set = 1

        # get remote filename list
        backend_filename_list = self.backend.list()
        log.Debug(ngettext("%d file exists on backend",
                           "%d files exist on backend",
                           len(backend_filename_list)) %
                  len(backend_filename_list))

        # get local filename list
        if self.action not in ["collection-status"]:
            local_filename_list = self.archive_dir.listdir()
        else:
            local_filename_list = []
        log.Debug(ngettext("%d file exists in cache",
                           "%d files exist in cache",
                           len(local_filename_list)) %
                  len(local_filename_list))

        # check for partial backups
        partials = []
        for local_filename in local_filename_list:
            pr = file_naming.parse(local_filename)
            if pr and pr.partial:
                partials.append(local_filename)

        # get various backup sets and chains
        (backup_chains, self.orphaned_backup_sets,
         self.incomplete_backup_sets) = \
            self.get_backup_chains(partials + backend_filename_list)
        backup_chains = self.get_sorted_chains(backup_chains)
        self.all_backup_chains = backup_chains

        assert len(backup_chains) == len(self.all_backup_chains), \
            "get_sorted_chains() did something more than re-ordering"

        local_sig_chains, self.local_orphaned_sig_names = \
            self.get_signature_chains(True)
        remote_sig_chains, self.remote_orphaned_sig_names = \
            self.get_signature_chains(False, filelist=backend_filename_list)
        self.set_matched_chain_pair(local_sig_chains + remote_sig_chains,
                                    backup_chains)
        self.warn(sig_chain_warning)
        return self
示例#28
0
    def set_values(self, sig_chain_warning=1):
        """
        Set values from archive_dir_path and backend.

        Returns self for convenience.  If sig_chain_warning is set to None,
        do not warn about unnecessary sig chains.  This is because there may
        naturally be some unecessary ones after a full backup.
        """
        self.values_set = 1

        # get remote filename list
        backend_filename_list = self.backend.list()
        log.Debug(ngettext("%d file exists on backend",
                           "%d files exist on backend",
                           len(backend_filename_list)) %
                  len(backend_filename_list))

        # get local filename list
        if self.action not in ["collection-status", "replicate"]:
            local_filename_list = self.archive_dir_path.listdir()
        else:
            local_filename_list = []
        log.Debug(ngettext("%d file exists in cache",
                           "%d files exist in cache",
                           len(local_filename_list)) %
                  len(local_filename_list))

        # check for partial backups
        partials = []
        for local_filename in local_filename_list:
            pr = file_naming.parse(local_filename)
            if pr and pr.partial:
                partials.append(local_filename)

        # get various backup sets and chains
        (backup_chains, self.orphaned_backup_sets,
         self.incomplete_backup_sets) = \
            self.get_backup_chains(partials + backend_filename_list)
        backup_chains = self.get_sorted_chains(backup_chains)
        self.all_backup_chains = backup_chains

        assert len(backup_chains) == len(self.all_backup_chains), \
            "get_sorted_chains() did something more than re-ordering"

        local_sig_chains, self.local_orphaned_sig_names = \
            self.get_signature_chains(True)
        remote_sig_chains, self.remote_orphaned_sig_names = \
            self.get_signature_chains(False, filelist=backend_filename_list)
        self.set_matched_chain_pair(local_sig_chains + remote_sig_chains,
                                    backup_chains)
        self.warn(sig_chain_warning)
        return self
示例#29
0
    def test_partial(self):
        """Test addition of partial flag"""
        pr = file_naming.parse("dns.h112bi.h14rg0.st.p.g")
        assert pr, pr
        assert pr.partial
        assert pr.type == "new-sig"
        assert pr.end_time == 1029826800L

        if not globals.short_filenames:
            pr = file_naming.parse(
                "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg"
            )
            assert pr, pr
            assert pr.partial
            assert pr.type == "new-sig"
            assert pr.end_time == 1029826800L

        pr = file_naming.parse("dfs.h5dixs.st.p.g")
        assert pr, pr
        assert pr.partial
        assert pr.type == "full-sig"
        assert pr.time == 1036954144, repr(pr.time)
示例#30
0
 def to_final(self):
     """
     We are finished, rename to final, gzip if needed.
     """
     src = self.dirpath.append(self.partname)
     tgt = self.dirpath.append(self.permname)
     src_iter = SrcIter(src)
     pr = file_naming.parse(self.permname)
     if pr.compressed:
         gpg.GzipWriteFile(src_iter, tgt.name, size=sys.maxint)
         os.unlink(src.name)
     else:
         os.rename(src.name, tgt.name)
示例#31
0
 def to_final(self):
     """
     We are finished, rename to final, gzip if needed.
     """
     src = self.dirpath.append(self.partname)
     tgt = self.dirpath.append(self.permname)
     src_iter = SrcIter(src)
     pr = file_naming.parse(self.permname)
     if pr.compressed:
         gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxint)
         os.unlink(src.name)
     else:
         os.rename(src.name, tgt.name)
示例#32
0
    def get_fileobj_read(self, filename, parseresults=None):
        """
        Return fileobject opened for reading of filename on backend

        The file will be downloaded first into a temp file.  When the
        returned fileobj is closed, the temp file will be deleted.
        """
        if not parseresults:
            parseresults = file_naming.parse(filename)
            assert parseresults, "Filename not correctly parsed"
        tdp = dup_temp.new_tempduppath(parseresults)
        self.get(filename, tdp)
        tdp.setdata()
        return tdp.filtered_open_with_delete("rb")
示例#33
0
    def get_filenames(self, time=None):
        u"""
        Return ordered list of filenames in set, up to a provided time
        """
        if self.fullsig:
            l = [self.fullsig]
        else:
            l = []

        inclist = self.inclist
        if time:
            inclist = [n for n in inclist if file_naming.parse(n).end_time <= time]

        l.extend(inclist)
        return l
示例#34
0
    def get_filenames(self, time=None):
        """
        Return ordered list of filenames in set, up to a provided time
        """
        if self.fullsig:
            l = [self.fullsig]
        else:
            l = []

        inclist = self.inclist
        if time:
            inclist = filter(lambda n: file_naming.parse(n).end_time <= time, inclist)

        l.extend(inclist)
        return l
示例#35
0
    def add_filename(self, filename, pr=None):
        u"""
        Add a filename to given set.  Return true if it fits.

        The filename will match the given set if it has the right
        times and is of the right type.  The information will be set
        from the first filename given.

        @param filename: name of file to add
        @type filename: string

        @param pr: pre-computed result of file_naming.parse(filename)
        @type pr: Optional[ParseResults]
        """
        if not pr:
            pr = file_naming.parse(filename)
        if not pr or not (pr.type == u"full" or pr.type == u"inc"):
            return False

        if not self.info_set:
            self.set_info(pr)
        else:
            if pr.type != self.type:
                return False
            if pr.time != self.time:
                return False
            if (pr.start_time != self.start_time
                    or pr.end_time != self.end_time):
                return False
            if bool(pr.encrypted) != bool(self.encrypted):
                if self.partial and pr.encrypted:
                    self.encrypted = pr.encrypted

        if pr.manifest:
            self.set_manifest(filename)
        else:
            assert pr.volume_number is not None
            assert pr.volume_number not in self.volume_name_dict, \
                u"""Volume %d is already in the volume list as "%s".
                "%s" has the same volume number.
                Please check your command line and retry.""" % (
                    pr.volume_number,
                    util.fsdecode(self.volume_name_dict[pr.volume_number]),
                    util.fsdecode(filename)
                )
            self.volume_name_dict[pr.volume_number] = filename

        return True
示例#36
0
    def __init__(self, base, index=(), parseresults=None):
        """
        DupPath initializer

        The actual filename (no directory) must be the single element
        of the index, unless parseresults is given.

        """
        if parseresults:
            self.pr = parseresults
        else:
            assert len(index) == 1
            self.pr = file_naming.parse(index[0])
            assert self.pr, "must be a recognizable duplicity file"

        Path.__init__(self, base, index)
示例#37
0
 def to_remote(self):
     """
     We have written the last checkpoint, now encrypt or compress
     and send a copy of it to the remote for final storage.
     """
     pr = file_naming.parse(self.remname)
     src = self.dirpath.append(self.partname)
     tgt = self.dirpath.append(self.remname)
     src_iter = SrcIter(src)
     if pr.compressed:
         gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxint)
     elif pr.encrypted:
         gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size = sys.maxint)
     else:
         os.system("cp -p \"%s\" \"%s\"" % (src.name, tgt.name))
     globals.backend.move(tgt) #@UndefinedVariable
示例#38
0
def restore_get_enc_fileobj(backend, filename, volume_info):
    """Return plaintext fileobj from encrypted filename on backend """
    parseresults = file_naming.parse(filename)
    if filename in filename_tdp:
        tdp = filename_tdp[filename]
    else:
        tdp = dup_temp.new_tempduppath(parseresults)
        filename_tdp[filename] = tdp

    backend.get(filename, tdp)
    if not restore_check_hash(volume_info, tdp):
        return None
    fileobj = tdp.filtered_open_with_delete("rb")
    if parseresults.encrypted and globals.gpg_profile.sign_key:
        restore_add_sig_check(fileobj)
    return fileobj
示例#39
0
 def to_remote(self):
     """
     We have written the last checkpoint, now encrypt or compress
     and send a copy of it to the remote for final storage.
     """
     pr = file_naming.parse(self.remname)
     src = self.dirpath.append(self.partname)
     tgt = self.dirpath.append(self.remname)
     src_iter = SrcIter(src)
     if pr.compressed:
         gpg.GzipWriteFile(src_iter, tgt.name, size=sys.maxsize)
     elif pr.encrypted:
         gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size=sys.maxsize)
     else:
         shutil.copyfile(src.name, tgt.name)
     globals.backend.move(tgt)  # @UndefinedVariable
示例#40
0
 def add_to_sets(filename):
     u"""
     Try adding filename to existing sets, or make new one
     """
     pr = file_naming.parse(filename)
     for set in sets:  # pylint: disable=redefined-builtin
         if set.add_filename(filename, pr):
             log.Debug(_(u"File %s is part of known set") % (util.fsdecode(filename),))
             break
     else:
         log.Debug(_(u"File %s is not part of a known set; creating new set") % (util.fsdecode(filename),))
         new_set = BackupSet(self.backend, self.action)
         if new_set.add_filename(filename, pr):
             sets.append(new_set)
         else:
             log.Debug(_(u"Ignoring file (rejected by backup set) '%s'") % util.fsdecode(filename))
示例#41
0
    def __init__(self, base, index = (), parseresults = None):
        """
        DupPath initializer

        The actual filename (no directory) must be the single element
        of the index, unless parseresults is given.

        """
        if parseresults:
            self.pr = parseresults
        else:
            assert len(index) == 1
            self.pr = file_naming.parse(index[0])
            assert self.pr, "must be a recognizable duplicity file"

        Path.__init__(self, base, index)
示例#42
0
 def get_filenames(self):
     """
     Return sorted list of (remote) filenames of files in set
     """
     assert self.info_set
     volume_num_list = self.volume_name_dict.keys()
     volume_num_list.sort()
     volume_filenames = [self.volume_name_dict[x] for x in volume_num_list]
     if self.remote_manifest_name:
         # For convenience of implementation for restart support, we treat
         # local partial manifests as this set's remote manifest.  But
         # when specifically asked for a list of remote filenames, we
         # should not include it.
         pr = file_naming.parse(self.remote_manifest_name)
         if pr and not pr.partial:
             volume_filenames.append(self.remote_manifest_name)
     return volume_filenames
示例#43
0
    def set_manifest(self, remote_filename):
        """
        Add local and remote manifest filenames to backup set
        """
        assert not self.remote_manifest_name, (self.remote_manifest_name,
                                               remote_filename)
        self.remote_manifest_name = remote_filename

        for local_filename in globals.archive_dir.listdir():
            pr = file_naming.parse(local_filename)
            if (pr and pr.manifest and pr.type == self.type
                    and pr.time == self.time
                    and pr.start_time == self.start_time
                    and pr.end_time == self.end_time):
                self.local_manifest_path = \
                              globals.archive_dir.append(local_filename)
                break
示例#44
0
 def get_filenames(self):
     """
     Return sorted list of (remote) filenames of files in set
     """
     assert self.info_set
     volume_num_list = self.volume_name_dict.keys()
     volume_num_list.sort()
     volume_filenames = [self.volume_name_dict[x] for x in volume_num_list]
     if self.remote_manifest_name:
         # For convenience of implementation for restart support, we treat
         # local partial manifests as this set's remote manifest.  But
         # when specifically asked for a list of remote filenames, we
         # should not include it.
         pr = file_naming.parse(self.remote_manifest_name)
         if not pr or not pr.partial:
             volume_filenames.append(self.remote_manifest_name)
     return volume_filenames
示例#45
0
    def set_manifest(self, remote_filename):
        """
        Add local and remote manifest filenames to backup set
        """
        assert not self.remote_manifest_name, (self.remote_manifest_name,
                                               remote_filename)
        self.remote_manifest_name = remote_filename

        for local_filename in globals.archive_dir.listdir():
            pr = file_naming.parse(local_filename)
            if (pr and pr.manifest
                and pr.type == self.type
                and pr.time == self.time
                and pr.start_time == self.start_time
                and pr.end_time == self.end_time):
                self.local_manifest_path = \
                              globals.archive_dir.append(local_filename)
                break
示例#46
0
 def delete(self):
     """
     Remove all files in set, both local and remote
     """
     rfn = self.get_filenames()
     rfn.reverse()
     try:
         self.backend.delete(rfn)
     except Exception:
         log.Debug("BackupSet.delete: missing %s" % rfn)
         pass
     for lfn in globals.archive_dir.listdir():
         pr = file_naming.parse(lfn)
         if pr and pr.time == self.time and pr.start_time == self.start_time and pr.end_time == self.end_time:
             try:
                 globals.archive_dir.append(lfn).delete()
             except Exception:
                 log.Debug("BackupSet.delete: missing %s" % lfn)
                 pass
示例#47
0
 def to_remote(self):
     """
     We have written the last checkpoint, now encrypt or compress
     and send a copy of it to the remote for final storage.
     """
     pr = file_naming.parse(self.remname)
     src = self.dirpath.append(self.partname)
     tgt = self.dirpath.append(self.remname)
     src_iter = SrcIter(src)
     if pr.compressed:
         gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxint)
     elif pr.encrypted:
         gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size = sys.maxint)
     else:
         os.system("cp -p \"%s\" \"%s\"" % (src.name, tgt.name))
     globals.backend.put(tgt) #@UndefinedVariable
     try:
         util.ignore_missing(os.unlink, tgt.name)
     except Exception, e:
         log.Warn(_("Unable to delete %s: %s" % (tgt.name, str(e))))
示例#48
0
 def to_remote(self):
     u"""
     We have written the last checkpoint, now encrypt or compress
     and send a copy of it to the remote for final storage.
     """
     log.Debug(u"TO_REMOTE")
     pr = file_naming.parse(self.remname)
     src = self.dirpath.append(self.partname)
     tgt = self.dirpath.append(self.remname)
     src_iter = SrcIter(src)
     if pr.compressed:
         gpg.GzipWriteFile(src_iter, tgt.name, size=sys.maxsize)
     elif pr.encrypted:
         gpg.GPGWriteFile(src_iter,
                          tgt.name,
                          config.gpg_profile,
                          size=sys.maxsize)
     else:
         shutil.copyfile(src.name, tgt.name)
     config.backend.move(tgt)
示例#49
0
    def set_manifest(self, remote_filename):
        """
        Add local and remote manifest filenames to backup set
        """
        assert not self.remote_manifest_name, (self.remote_manifest_name,
                                               remote_filename)
        self.remote_manifest_name = remote_filename

        if self.action not in ["collection-status"]:
            local_filename_list = globals.archive_dir.listdir()
        else:
            local_filename_list = []
        for local_filename in local_filename_list:
            pr = file_naming.parse(local_filename)
            if (pr and pr.manifest and pr.type == self.type and
                    pr.time == self.time and
                    pr.start_time == self.start_time and
                    pr.end_time == self.end_time):
                self.local_manifest_path = \
                    globals.archive_dir.append(local_filename)
                break
示例#50
0
    def set_manifest(self, remote_filename):
        """
        Add local and remote manifest filenames to backup set
        """
        assert not self.remote_manifest_name, (self.remote_manifest_name,
                                               remote_filename)
        self.remote_manifest_name = remote_filename

        if self.action not in ["collection-status"]:
            local_filename_list = globals.archive_dir.listdir()
        else:
            local_filename_list = []
        for local_filename in local_filename_list:
            pr = file_naming.parse(local_filename)
            if (pr and pr.manifest and pr.type == self.type
                    and pr.time == self.time
                    and pr.start_time == self.start_time
                    and pr.end_time == self.end_time):
                self.local_manifest_path = \
                    globals.archive_dir.append(local_filename)
                break
示例#51
0
 def to_remote(self):
     """
     We have written the last checkpoint, now encrypt or compress
     and send a copy of it to the remote for final storage.
     """
     pr = file_naming.parse(self.remname)
     src = self.dirpath.append(self.partname)
     tgt = self.dirpath.append(self.remname)
     src_iter = SrcIter(src)
     if pr.compressed:
         gpg.GzipWriteFile(src_iter, tgt.name, size=sys.maxint)
     elif pr.encrypted:
         gpg.GPGWriteFile(src_iter,
                          tgt.name,
                          globals.gpg_profile,
                          size=sys.maxint)
     else:
         os.system("cp -p \"%s\" \"%s\"" % (src.name, tgt.name))
     globals.backend.put(tgt)  #@UndefinedVariable
     try:
         util.ignore_missing(os.unlink, tgt.name)
     except Exception, e:
         log.Warn(_("Unable to delete %s: %s" % (tgt.name, str(e))))
示例#52
0
 def by_start_time(a, b):
     return int(file_naming.parse(a).start_time) - int(file_naming.parse(b).start_time)