Пример #1
0
    def _delete(self, filename):
        # Store an indication on whether any passed
        passed = False

        stores = self._eligible_stores(filename)

        # since the backend operations will be retried, we can't
        # simply try to get from the store, if not found, move to the
        # next store (since each failure will be retried n times
        # before finally giving up).  So we need to get the list first
        # before we try to delete
        # ENHANCEME: maintain a cached list for each store
        for s in stores:
            list = s.list()
            if filename in list:
                s._do_delete(filename)
                passed = True
                # In stripe mode, only one item will have the file
                if self.__mode == 'stripe':
                    return
            log.Log(
                _("MultiBackend: failed to delete %s from %s") %
                (filename, s.backend.parsed_url.url_string), log.INFO)
        if not passed:
            log.Log(
                _("MultiBackend: failed to delete %s. Tried all backing stores and none succeeded"
                  ) % (filename), log.ERROR)
Пример #2
0
    def __init__(self, parsed_url):
        u"""
        Authorize to B2 api and set up needed variables
        """
        duplicity.backend.Backend.__init__(self, parsed_url)

        global DownloadDestLocalFile, FileVersionInfoFactory
        try:  # try to import the new b2sdk if available
            from b2sdk.api import B2Api
            from b2sdk.account_info import InMemoryAccountInfo
            from b2sdk.download_dest import DownloadDestLocalFile
            from b2sdk.exception import NonExistentBucket
            from b2sdk.file_version import FileVersionInfoFactory
        except ImportError as e:
            if u'b2sdk' in getattr(e, u'name', u'b2sdk'):
                raise
            try:  # fall back to import the old b2 client
                from b2.api import B2Api
                from b2.account_info import InMemoryAccountInfo
                from b2.download_dest import DownloadDestLocalFile
                from b2.exception import NonExistentBucket
                from b2.file_version import FileVersionInfoFactory
            except ImportError:
                if u'b2' in getattr(e, u'name', u'b2'):
                    raise
                raise BackendException(
                    u'B2 backend requires B2 Python SDK (pip install b2sdk)')

        self.service = B2Api(InMemoryAccountInfo())
        self.parsed_url.hostname = u'B2'

        account_id = parsed_url.username
        account_key = self.get_password()

        self.url_parts = [
            x for x in parsed_url.path.replace(u"@", u"/").split(u'/')
            if x != u''
        ]
        if self.url_parts:
            self.username = self.url_parts.pop(0)
            bucket_name = self.url_parts.pop(0)
        else:
            raise BackendException(u"B2 requires a bucket name")
        self.path = u"".join([url_part + u"/" for url_part in self.url_parts])
        self.service.authorize_account(u'production', account_id, account_key)

        log.Log(
            u"B2 Backend (path= %s, bucket= %s, minimum_part_size= %s)" %
            (self.path, bucket_name,
             self.service.account_info.get_minimum_part_size()), log.INFO)
        try:
            self.bucket = self.service.get_bucket_by_name(bucket_name)
            log.Log(u"Bucket found", log.INFO)
        except NonExistentBucket:
            try:
                log.Log(u"Bucket not found, creating one", log.INFO)
                self.bucket = self.service.create_bucket(
                    bucket_name, u'allPrivate')
            except:
                raise FatalBackendException(u"Bucket cannot be created")
Пример #3
0
def addtotree(root, path, perm, size, mtime, uid, gid, type):
    if len(path) == 1:
        c = path[0]
        ec = pathencode(c)
        e = None
        for f in root.getchildren():
            if f.tag == ec:
                e = f
                break
        if e is None:
            e = SubElement(root, ec)
            log.Log("add " + c + "(" + ec + ") to " + str(root), 5)
        else:
            log.Log("found " + c + "(" + ec + ") in " + str(root), 7)
        e.set("perm", perm)
        e.set("size", -1)
        e.set("mtime", mtime)
        e.set("uid", uid)
        e.set("gid", gid)
        e.set("type", type)
        e.set("name", c)
    else:
        c = path[0]
        ec = pathencode(c)
        for f in root.getchildren():
            if f.tag == ec:
                log.Log(
                    "adding " + path[1] + " to " + c + "(" + ec + ") in " +
                    str(root), 7)
                addtotree(f, path[1:], perm, size, mtime, uid, gid, type)
                return
        f = SubElement(root, ec)
        log.Log("new " + c + "(" + ec + ") in " + str(root), 5)
        addtotree(f, path[1:], perm, size, mtime, uid, gid, type)
Пример #4
0
    def get_query_params(parsed_url):
        # Reparse so the query string is available
        reparsed_url = urlparse.urlparse(parsed_url.geturl())
        if len(reparsed_url.query) == 0:
            return dict()
        try:
            queryMultiDict = urlparse.parse_qs(reparsed_url.query,
                                               strict_parsing=True)
        except ValueError as e:
            log.Log(
                _("MultiBackend: Could not parse query string %s: %s ") %
                (reparsed_url.query, e), log.ERROR)
            raise BackendException('Could not parse query string')
        queryDict = dict()
        # Convert the multi-dict to a single dictionary
        # while checking to make sure that no unrecognized values are found
        for name, valueList in queryMultiDict.items():
            if len(valueList) != 1:
                log.Log(
                    _("MultiBackend: Invalid query string %s: more than one value for %s"
                      ) % (reparsed_url.query, name), log.ERROR)
                raise BackendException('Invalid query string')
            if name not in MultiBackend.__knownQueryParameters:
                log.Log(
                    _("MultiBackend: Invalid query string %s: unknown parameter %s"
                      ) % (reparsed_url.query, name), log.ERROR)
                raise BackendException('Invalid query string')

            queryDict[name] = valueList[0]
        return queryDict
Пример #5
0
    def _list(self):
        """
        List files on remote server
        """
        log.Log("Listing files", 9)
        endpoint = 'b2_list_file_names'
        url = self.formatted_url(endpoint)
        params = {
            'bucketId': self.bucket_id,
            'maxFileCount': 1000,
        }
        try:
            resp = self.get_or_post(url, params)
        except urllib2.HTTPError:
            return []

        files = [x['fileName'].split('/')[-1] for x in resp['files']
                 if os.path.dirname(x['fileName']) == self.path]

        next_file = resp['nextFileName']
        while next_file:
            log.Log("There are still files, getting next list", 9)
            params['startFileName'] = next_file
            try:
                resp = self.get_or_post(url, params)
            except urllib2.HTTPError:
                return files

            files += [x['fileName'].split('/')[-1] for x in resp['files']
                      if os.path.dirname(x['fileName']) == self.path]
            next_file = resp['nextFileName']

        return files
Пример #6
0
    def _put(self, source_path, remote_filename):
        first = self.__write_cursor
        while True:
            store = self.__stores[self.__write_cursor]
            try:
                next = self.__write_cursor + 1
                if (next > len(self.__stores) - 1):
                    next = 0
                log.Log(_("MultiBackend: _put: write to store #%s (%s)")
                        % (self.__write_cursor, store.backend.parsed_url.url_string),
                        log.DEBUG)
                store.put(source_path, remote_filename)
                self.__write_cursor = next
                break
            except Exception as e:
                log.Log(_("MultiBackend: failed to write to store #%s (%s), try #%s, Exception: %s")
                        % (self.__write_cursor, store.backend.parsed_url.url_string, next, e),
                        log.INFO)
                self.__write_cursor = next

                if (self.__write_cursor == first):
                    log.Log(_("MultiBackend: failed to write %s. Tried all backing stores and none succeeded")
                            % (source_path),
                            log.ERROR)
                    raise BackendException("failed to write")
Пример #7
0
 def getattr(self, path):
     log.Log("getattr " + path, 5)
     st = DuplicityStat()
     p = path[1:].split(os.path.sep)
     if path == '/':
         st.st_mode = stat.S_IFDIR | 0o755
         st.st_nlink = 1 + len(self.dircache.keys())
         return st
     if len(p) == 1:
         if not self.dircache.has_key(p[0]):
             return -errno.ENOENT
         st.st_mode = stat.S_IFDIR | 0o755
         st.st_nlink = 2
         return st
     e = findpath(self.fillcache(p[0]), p[1:])
     if e is None:
         return -errno.ENOENT
     mode = int(
         (3 * '{:b}').format(*[int(x) for x in e.get("perm").split()[-1]]),
         base=2)
     if e.get("type") == 'dir':
         st.st_mode = stat.S_IFDIR | mode
         e.set("size", 0)
     else:
         st.st_mode = stat.S_IFREG | mode
     if int(self.filemode) == 0 and e.get(
             "size"
     ) < 0:  # need to read size from filearch? not in signature?
         ds = [d[0] for d in self.date_types if date2str(d[0]) in p[0]]
         np = os.path.join(*p[1:])
         files = restore_get_patched_rop_iter(self.col_stats,
                                              date2num(ds[0]), tuple(p[1:]))
         for x in files[0]:
             lp = os.path.join(*[
                 y for y in (np + os.path.sep +
                             x.get_relative_path()).split(os.path.sep)
                 if y != '.'
             ])
             log.Log("looking at %s for %s" % (lp, np), 5)
             le = findpath(self.dircache[p[0]], lp.split(os.path.sep))
             if le is None:
                 log.Log("not found in dircache: " + str(le), 7)
                 continue
             if le.get("size") < 0:
                 le.set("size", x.getsize())
             if lp == np:
                 log.Log("found " + np, 5)
                 break
         for x in files[1]:
             x.close()
     elif e.get("size") < 0:
         e.set("size", 0)
     st.st_size = e.get("size")
     st.st_uid = e.get("uid")
     st.st_gid = e.get("gid")
     st.st_mtime = e.get("mtime")
     st.st_nlink = 1 + len(e.getchildren())
     return st
Пример #8
0
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Init each of the wrapped stores
        #
        # config file is a json formatted collection of values, one for
        # each backend.  We will 'stripe' data across all the given stores:
        #
        #  'url'  - the URL used for the backend store
        #  'env' - an optional list of enviroment variable values to set
        #      during the intialization of the backend
        #
        # Example:
        #
        # [
        #  {
        #   "url": "abackend://[email protected]/backup",
        #   "env": [
        #     {
        #      "name" : "MYENV",
        #      "value" : "xyz"
        #     },
        #     {
        #      "name" : "FOO",
        #      "value" : "bar"
        #     }
        #    ]
        #  },
        #  {
        #   "url": "file:///path/to/dir"
        #  }
        # ]

        try:
            with open(parsed_url.path) as f:
                configs = json.load(f)
        except IOError as e:
            log.Log(_("MultiBackend: Could not load config file %s: %s ")
                    % (parsed_url.path, e),
                    log.ERROR)
            raise BackendException('Could not load config file')

        for config in configs:
            url = config['url']
            log.Log(_("MultiBackend: use store %s")
                    % (url),
                    log.INFO)
            if 'env' in config:
                for env in config['env']:
                    log.Log(_("MultiBackend: set env %s = %s")
                            % (env['name'], env['value']),
                            log.INFO)
                    os.environ[env['name']] = env['value']

            store = duplicity.backend.get_backend(url)
            self.__stores.append(store)
Пример #9
0
 def fillcache(self, p):
     if self.dircache[p] is None:
         log.Log("filling cache "+p, 5)
         for s in xrange(0, len(self.date_types)):
             if date2str(self.date_types[s][0]) in p:
                 log.Log("filling cache for "+str(s)+" "+p, 5)
                 t = date2num(self.date_types[s][0])
                 self.dircache[p] = getfiletree(p, self.col_stats.get_signature_chain_at_time(t).get_fileobjs(t))
                 break
     return self.dircache[p]
Пример #10
0
 def _list(self):
     lists = []
     for s in self.__stores:
         l = s.list()
         log.Log(
             _("MultiBackend: list from %s: %s") %
             (s.backend.parsed_url.url_string, l), log.DEBUG)
         lists.append(s.list())
     # combine the lists into a single flat list w/o duplicates via set:
     result = list({item for sublist in lists for item in sublist})
     log.Log(_("MultiBackend: combined list: %s") % (result), log.DEBUG)
     return result
Пример #11
0
    def _put(self, source_path, remote_filename):
        # Store an indication of whether any of these passed
        passed = False

        # Eligibile stores for this action
        stores = self._eligible_stores(remote_filename)

        # Mirror mode always starts at zero
        if self.__mode == 'mirror':
            self.__write_cursor = 0

        first = self.__write_cursor
        while True:
            store = stores[self.__write_cursor]
            try:
                next = self.__write_cursor + 1
                if (next > len(stores) - 1):
                    next = 0
                log.Log(
                    _("MultiBackend: _put: write to store #%s (%s)") %
                    (self.__write_cursor, store.backend.parsed_url.url_string),
                    log.DEBUG)
                store.put(source_path, remote_filename)
                passed = True
                self.__write_cursor = next
                # No matter what, if we loop around, break this loop
                if next == 0:
                    break
                # If in stripe mode, don't continue to the next
                if self.__mode == 'stripe':
                    break
            except Exception as e:
                log.Log(
                    _("MultiBackend: failed to write to store #%s (%s), try #%s, Exception: %s"
                      ) % (self.__write_cursor,
                           store.backend.parsed_url.url_string, next, e),
                    log.INFO)
                self.__write_cursor = next

                # If we consider write failure as abort, abort
                if self.__onfail_mode == 'abort':
                    log.Log(
                        _("MultiBackend: failed to write %s. Aborting process."
                          ) % (source_path), log.ERROR)
                    raise BackendException("failed to write")

                # If we've looped around, and none of them passed, fail
                if (self.__write_cursor == first) and not passed:
                    log.Log(
                        _("MultiBackend: failed to write %s. Tried all backing stores and none succeeded"
                          ) % (source_path), log.ERROR)
                    raise BackendException("failed to write")
    def __init__(self, parsed_url):
        u"""
        Authorize to B2 api and set up needed variables
        """
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import B2 API
        try:
            global b2
            import b2
            global b2sdk
            import b2sdk
            import b2.api
            import b2.account_info
            import b2.download_dest
            import b2.file_version
        except ImportError:
            raise BackendException(
                u'B2 backend requires B2 Python APIs (pip install b2)')

        self.service = b2.api.B2Api(b2.account_info.InMemoryAccountInfo())
        self.parsed_url.hostname = u'B2'

        account_id = parsed_url.username
        account_key = self.get_password()

        self.url_parts = [
            x for x in parsed_url.path.replace(u"@", u"/").split(u'/')
            if x != u''
        ]
        if self.url_parts:
            self.username = self.url_parts.pop(0)
            bucket_name = self.url_parts.pop(0)
        else:
            raise BackendException(u"B2 requires a bucket name")
        self.path = u"".join([url_part + u"/" for url_part in self.url_parts])
        self.service.authorize_account(u'production', account_id, account_key)

        log.Log(
            u"B2 Backend (path= %s, bucket= %s, minimum_part_size= %s)" %
            (self.path, bucket_name,
             self.service.account_info.get_minimum_part_size()), log.INFO)
        try:
            self.bucket = self.service.get_bucket_by_name(bucket_name)
            log.Log(u"Bucket found", log.INFO)
        except b2.exception.NonExistentBucket:
            try:
                log.Log(u"Bucket not found, creating one", log.INFO)
                self.bucket = self.service.create_bucket(
                    bucket_name, u'allPrivate')
            except:
                raise FatalBackendException(u"Bucket cannot be created")
Пример #13
0
def findpath(root, path):
    if len(path) == 0:
        return root
    c = path[0]
    ec = pathencode(c)
    s = root.find(ec)
    if s is None:
        log.Log("node "+c+"("+ec+") in "+str(root)+" not found", 5)
        return None
    if len(path) == 1:
        log.Log("node "+c+"("+ec+") in "+str(root)+" found", 5)
        return s
    log.Log("search "+path[1]+" in "+c+"("+ec+") in "+str(root), 5)
    return findpath(s, path[1:])
    def schedule_task(self, fn, params):
        """
        Schedule the given task (callable, typically function) for
        execution. Pass the given parameters to the function when
        calling it. Returns a callable which can optionally be used
        to wait for the task to complete, either by returning its
        return value or by propagating any exception raised by said
        task.

        This method may block or return immediately, depending on the
        configuration and state of the scheduler.

        This method may also raise an exception in order to trigger
        failures early, if the task (if run synchronously) or a previous
        task has already failed.

        NOTE: Pay particular attention to the scope in which this is
        called. In particular, since it will execute concurrently in
        the background, assuming fn is a closure, any variables used
        most be properly bound in the closure. This is the reason for
        the convenience feature of being able to give parameters to
        the call, to avoid having to wrap the call itself in a
        function in order to "fixate" variables in, for example, an
        enclosing loop.
        """
        assert fn is not None

        # Note: It is on purpose that we keep track of concurrency in
        # the front end and launch threads for each task, rather than
        # keep a pool of workers. The overhead is not relevant in the
        # situation this will be used, and it removes complexity in
        # terms of ensuring the scheduler is garbage collected/shut
        # down properly when no longer referenced/needed by calling
        # code.

        if self.__concurrency == 0:
            # special case this to not require any platform support for
            # threading at all
            log.Log("%s: %s" % (self.__class__.__name__,
                     _("running task synchronously (asynchronicity disabled)")),
                     log.DEBUG, log.InfoCode.synchronous_upload_begin)

            return self.__run_synchronously(fn, params)
        else:
            log.Log("%s: %s" % (self.__class__.__name__,
                     _("scheduling task for asynchronous execution")),
                    log.DEBUG,
                    log.InfoCode.asynchronous_upload_begin)

            return self.__run_asynchronously(fn, params)
Пример #15
0
 def __enter__(self):
     request = urllib2.Request(self.url, self.data, self.headers)
     self.file = urllib2.urlopen(request)
     log.Log(
         "Request of %s returned with status %s" %
         (self.url, self.file.code), 9)
     return self.file
Пример #16
0
def import_backends():
    """
    Import files in the duplicity/backends directory where
    the filename ends in 'backend.py' and ignore the rest.

    @rtype: void
    @return: void
    """
    path = duplicity.backends.__path__[0]
    assert path.endswith("duplicity/backends"), duplicity.backends.__path__

    files = os.listdir(path)
    for fn in files:
        if fn.endswith("backend.py"):
            fn = fn[:-3]
            imp = "duplicity.backends.%s" % (fn, )
            # ignore gio as it is explicitly loaded in commandline.parse_cmdline_options()
            if fn == "giobackend": continue
            try:
                __import__(imp)
                res = "Succeeded"
                level = log.INFO
            except Exception:
                res = "Failed: " + str(sys.exc_info()[1])
                level = log.WARNING
            log.Log("Import of %s %s" % (imp, res), level)
        else:
            continue
Пример #17
0
    def _delete_list(self, filenames):
        # Store an indication on whether any passed
        passed = False

        stores = self.__stores

        # since the backend operations will be retried, we can't
        # simply try to get from the store, if not found, move to the
        # next store (since each failure will be retried n times
        # before finally giving up).  So we need to get the list first
        # before we try to delete
        # ENHANCEME: maintain a cached list for each store
        for s in stores:
            flist = s.list()
            cleaned = [f for f in filenames if f in flist]
            if hasattr(s.backend, u'_delete_list'):
                s._do_delete_list(cleaned)
            elif hasattr(s.backend, u'_delete'):
                for filename in cleaned:
                    s._do_delete(filename)
            passed = True
            # In stripe mode, only one item will have the file
            if self.__mode == u'stripe':
                return
        if not passed:
            log.Log(
                _(u"MultiBackend: failed to delete %s. Tried all backing stores and none succeeded"
                  ) % (filenames), log.ERROR)
Пример #18
0
 def _delete(self, filename):
     """
     Delete filename from remote server
     """
     log.Log("Delete: %s" % self.path + filename, log.INFO)
     file_version_info = self.file_info(self.path + filename)
     self.bucket.delete_file_version(file_version_info.id_, file_version_info.file_name)
Пример #19
0
def import_backends():
    """
    Import files in the duplicity/backends directory where
    the filename ends in 'backend.py' and ignore the rest.

    @rtype: void
    @return: void
    """
    path = duplicity.backends.__path__[0]
    assert path.endswith("duplicity/backends"), duplicity.backends.__path__

    files = os.listdir(path)
    files.sort()
    for fn in files:
        if fn.endswith("backend.py"):
            fn = fn[:-3]
            imp = "duplicity.backends.%s" % (fn,)
            try:
                __import__(imp)
                res = "Succeeded"
            except Exception:
                res = "Failed: " + str(sys.exc_info()[1])
            log.Log(_("Import of %s %s") % (imp, res), log.INFO)
        else:
            continue
Пример #20
0
def old_globbing_filelist_deprecation(opt):
    log.Log(_(
        "Warning: Option %s is pending deprecation and will be removed in a future release.\n"
        "--include-filelist and --exclude-filelist now accept globbing characters and should "
        "be used instead.") % opt,
            log.ERROR,
            force_print=True)
Пример #21
0
 def _get(self, remote_filename, local_path):
     """
     Download remote_filename to local_path
     """
     log.Log("Get: %s -> %s" % (self.path + remote_filename, local_path.name), log.INFO)
     self.bucket.download_file_by_name(self.path + remote_filename,
                                       b2.download_dest.DownloadDestLocalFile(local_path.name))
Пример #22
0
 def _query(self, filename):
     u"""
     Get size info of filename
     """
     log.Log(u"Query: %s" % self.path + util.fsdecode(filename), log.INFO)
     file_version_info = self.file_info(quote_plus(self.path + util.fsdecode(filename), u'/'))
     return {u'size': file_version_info.size
             if file_version_info is not None and file_version_info.size is not None else -1}
Пример #23
0
 def _put(self, source_path, remote_filename):
     """
     Copy source_path to remote_filename
     """
     log.Log("Put: %s -> %s" % (source_path.name, self.path + remote_filename), log.INFO)
     self.bucket.upload_local_file(source_path.name, self.path + remote_filename,
                                   content_type='application/pgp-encrypted',
                                   progress_listener=B2ProgressListener())
Пример #24
0
 def _query(self, filename):
     """
     Get size info of filename
     """
     log.Log("Query: %s" % self.path + filename, log.INFO)
     file_version_info = self.file_info(self.path + filename)
     return {'size': file_version_info.size
             if file_version_info is not None and file_version_info.size is not None else -1}
Пример #25
0
 def _delete(self, filename):
     # since the backend operations will be retried, we can't
     # simply try to get from the store, if not found, move to the
     # next store (since each failure will be retried n times
     # before finally giving up).  So we need to get the list first
     # before we try to delete
     # ENHANCEME: maintain a cached list for each store
     for s in self.__stores:
         list = s.list()
         if filename in list:
             s._do_delete(filename)
             return
         log.Log(_("MultiBackend: failed to delete %s from %s")
                 % (filename, s.backend.parsed_url.url_string),
                 log.INFO)
     log.Log(_("MultiBackend: failed to delete %s. Tried all backing stores and none succeeded")
             % (filename),
             log.ERROR)
Пример #26
0
 def _get(self, remote_filename, local_path):
     u"""
     Download remote_filename to local_path
     """
     log.Log(u"Get: %s -> %s" % (self.path + util.fsdecode(remote_filename),
                                 util.fsdecode(local_path.name)),
             log.INFO)
     self.bucket.download_file_by_name(quote_plus(self.path + util.fsdecode(remote_filename), u'/'),
                                       DownloadDestLocalFile(local_path.name))
Пример #27
0
def stdin_deprecation(opt):
    # See https://bugs.launchpad.net/duplicity/+bug/1423367
    # In almost all Linux distros stdin is a file represented by /dev/stdin,
    # so --exclude-file=/dev/stdin will work as a substitute.
    log.Log(_("Warning: Option %s is pending deprecation and will be removed in a future release.\n"
              "On many GNU/Linux systems, stdin is represented by /dev/stdin and\n"
              "--include-filelist=/dev/stdin or --exclude-filelist=/dev/stdin could\n"
              "be used as a substitute.") % opt,
            log.ERROR, force_print=True)
Пример #28
0
    def _query(self, filename):
        """
        Get size info of filename
        """
        log.Log("Querying file %s" % filename, 9)
        info = self.get_file_info(filename)
        if not info:
            return {'size': -1}

        return {'size': info['size']}
Пример #29
0
 def _get(self, remote_filename, local_path):
     # since the backend operations will be retried, we can't
     # simply try to get from the store, if not found, move to the
     # next store (since each failure will be retried n times
     # before finally giving up).  So we need to get the list first
     # before we try to fetch
     # ENHANCEME: maintain a cached list for each store
     for s in self.__stores:
         list = s.list()
         if remote_filename in list:
             s.get(remote_filename, local_path)
             return
         log.Log(_("MultiBackend: failed to get %s to %s from %s")
                 % (remote_filename, local_path, s.backend.parsed_url.url_string),
                 log.INFO)
     log.Log(_("MultiBackend: failed to get %s. Tried all backing stores and none succeeded")
             % (remote_filename),
             log.ERROR)
     raise BackendException("failed to get")
Пример #30
0
 def runduplicity(self):
     if self.url is None:
         return
     log.setup()
     log.setverbosity(int(self.debuglevel))
     if self.passphrasefd:
         self.passphrasefd = int(self.passphrasefd)
     if self.passwordfd:
         self.passwordfd = int(self.passwordfd)
     if self.url.find("file:/") != 0:
         get_backendpassphrase(self.passwordfd)
     opts = []
     for i in self.options:
         try:
             v = eval("self." + i.replace("-", ""))
             if v:
                 opts.append("--%s=%s" % (i, v))
         except:
             pass
     for i in self.no_options:
         try:
             v = eval("self." + i.replace("-", ""))
             if v:
                 opts.append("--%s" % (i))
         except:
             pass
     self.options = []
     parameter = ["list-current-files", "--ssh-askpass"] + opts + [self.url]
     log.Log("processing %s" % (" ".join(parameter)), 5)
     sys.argv = ["duplicity"] + parameter
     action = commandline.ProcessCommandLine(parameter)
     log.Log("running action %s" % (action), 5)
     globals.gpg_profile.passphrase = get_passphrase(self.passphrasefd)
     self.col_stats = collections.CollectionsStatus(
         globals.backend, globals.archive_dir,
         "collection-status").set_values()
     self.date_types = []
     for chain in self.col_stats.all_backup_chains:
         for s in chain.get_all_sets():
             self.date_types.append(
                 (datetime.fromtimestamp(s.get_time()), s.type))
     for s in self.date_types:
         self.dircache[date2str(s[0]) + '_' + s[1]] = None