Exemple #1
0
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # we expect an error return, so go low-level and ignore it
        try:
            p = os.popen(u"ncftpls -v")
            fout = p.read()
            ret = p.close()
        except Exception:
            pass
        # the expected error is 8 in the high-byte and some output
        if ret != 0x0800 or not fout:
            log.FatalError(u"NcFTP not found:  Please install NcFTP version 3.1.9 or later",
                           log.ErrorCode.ftp_ncftp_missing)

        # version is the second word of the first line
        version = fout.split(u'\n')[0].split()[1]
        if version < u"3.1.9":
            log.FatalError(u"NcFTP too old:  Duplicity requires NcFTP version 3.1.9,"
                           u"3.2.1 or later.  Version 3.2.0 will not work properly.",
                           log.ErrorCode.ftp_ncftp_too_old)
        elif version == u"3.2.0":
            log.Warn(u"NcFTP (ncftpput) version 3.2.0 may fail with duplicity.\n"
                     u"see: http://www.ncftpd.com/ncftp/doc/changelog.html\n"
                     u"If you have trouble, please upgrade to 3.2.1 or later",
                     log.WarningCode.ftp_ncftp_v320)
        log.Notice(u"NcFTP version is %s" % version)

        self.parsed_url = parsed_url

        self.url_string = duplicity.backend.strip_auth_from_url(self.parsed_url)

        # strip ncftp+ prefix
        self.url_string = duplicity.backend.strip_prefix(self.url_string, u'ncftp')

        # This squelches the "file not found" result from ncftpls when
        # the ftp backend looks for a collection that does not exist.
        # version 3.2.2 has error code 5, 1280 is some legacy value
        self.popen_breaks[u'ncftpls'] = [5, 1280]

        # Use an explicit directory name.
        if self.url_string[-1] != u'/':
            self.url_string += u'/'

        self.password = self.get_password()

        if globals.ftp_connection == u'regular':
            self.conn_opt = u'-E'
        else:
            self.conn_opt = u'-F'

        self.tempfile, self.tempname = tempdir.default().mkstemp()
        os.write(self.tempfile, u"host %s\n" % self.parsed_url.hostname)
        os.write(self.tempfile, u"user %s\n" % self.parsed_url.username)
        os.write(self.tempfile, u"pass %s\n" % self.password)
        os.close(self.tempfile)
        self.flags = u"-f %s %s -t %s -o useCLNT=0,useHELP_SITE=0 " % \
            (self.tempname, self.conn_opt, globals.timeout)
        if parsed_url.port is not None and parsed_url.port != 21:
            self.flags += u" -P '%s'" % (parsed_url.port)
Exemple #2
0
    def get_and_set_token(self, email, password, hostname):
        """Acquire an Ubuntu One access token via OAuth with the Ubuntu SSO service.
        See https://one.ubuntu.com/developer/account_admin/auth/otherplatforms for details.
        """

        # Request new access token from the Ubuntu SSO service
        self.client.add_credentials(email, password)
        resp, content = self.client.request(
            'https://login.ubuntu.com/api/1.0/authentications?' +
            'ws.op=authenticate&token_name=Ubuntu%%20One%%20@%%20%s' %
            hostname)
        if resp.status != 200:
            log.FatalError(
                "Token request failed: Incorrect Ubuntu One credentials",
                log.ErrorCode.backend_permission_denied)
            self.client.clear_credentials()

        tokendata = loads(content)
        self.set_consumer(tokendata['consumer_key'],
                          tokendata['consumer_secret'])
        self.set_token(tokendata['token'], tokendata['token_secret'])

        # and finally tell Ubuntu One about the token
        resp, content = self.request(
            'https://one.ubuntu.com/oauth/sso-finished-so-get-tokens/')
        if resp.status != 200:
            log.FatalError("Ubuntu One token was not accepted: %s %s" %
                           (resp.status, resp.reason))

        return tokendata
Exemple #3
0
    def _put(self, source_path, remote_filename=None):
        """Transfer source_path to remote_filename"""
        if not remote_filename:
            remote_filename = source_path.get_filename()

        # WORKAROUND for acd_cli: cannot specify remote filename
        # Link tmp file to the desired remote filename locally and upload
        remote_path = urllib.unquote(self.parsed_url.path.replace('///', '/'))
        local_real_duplicity_file = os.path.join(
            os.path.dirname(source_path.name), remote_filename.rstrip())

        deleteFile = False
        if (source_path.name != local_real_duplicity_file):
            try:
                os.symlink(source_path.name, local_real_duplicity_file)
                deleteFile = True
            except IOError as e:
                log.FatalError("Unable to copy " + source_path.name + " to " +
                               local_real_duplicity_file)

        commandline = self.acd_cmd + " upload --force --overwrite '%s' '%s'" % \
            (local_real_duplicity_file, remote_path)

        try:
            l = self.subprocess_popen(commandline)
        finally:
            if (deleteFile):
                try:
                    os.remove(local_real_duplicity_file)
                except OSError as e:
                    log.FatalError("Unable to remove file %s" % e)
Exemple #4
0
    def check_manifests(self):
        """
        Make sure remote manifest is equal to local one
        """
        if not self.remote_manifest_name and not self.local_manifest_path:
            log.FatalError(_("Fatal Error: No manifests found for most recent backup"),
                           log.ErrorCode.no_manifests)
        assert self.remote_manifest_name, "if only one, should be remote"

        remote_manifest = self.get_remote_manifest()
        if self.local_manifest_path:
            local_manifest = self.get_local_manifest()
        if remote_manifest and self.local_manifest_path and local_manifest:
            if remote_manifest != local_manifest:
                log.FatalError(_("Fatal Error: Remote manifest does not match "
                                 "local one.  Either the remote backup set or "
                                 "the local archive directory has been corrupted."),
                               log.ErrorCode.mismatched_manifests)
        if not remote_manifest:
            if self.local_manifest_path:
                remote_manifest = local_manifest
            else:
                log.FatalError(_("Fatal Error: Neither remote nor local "
                                 "manifest is readable."),
                               log.ErrorCode.unreadable_manifests)
        remote_manifest.check_dirinfo()
def process_local_dir(action, local_pathname):
    """Check local directory, set globals.local_path"""
    local_path = path.Path(path.Path(local_pathname).get_canonical())
    if action == "restore":
        if (local_path.exists()
                and not local_path.isemptydir()) and not globals.force:
            log.FatalError(
                _("Restore destination directory %s already "
                  "exists.\nWill not overwrite.") %
                (util.ufn(local_path.name), ),
                log.ErrorCode.restore_dir_exists)
    elif action == "verify":
        if not local_path.exists():
            log.FatalError(
                _("Verify directory %s does not exist") %
                (util.ufn(local_path.name), ),
                log.ErrorCode.verify_dir_doesnt_exist)
    else:
        assert action == "full" or action == "inc"
        if not local_path.exists():
            log.FatalError(
                _("Backup source directory %s does not exist.") %
                (util.ufn(local_path.name), ),
                log.ErrorCode.backup_dir_doesnt_exist)

    globals.local_path = local_path
Exemple #6
0
 def login(self):
     if not self.sess.is_linked():
       try: # to login to the box
         self.sess.link()
       except rest.ErrorResponse, e:
         log.FatalError('dpbx Error: %s\n' % str(e), log.ErrorCode.dpbx_nologin)
       if not self.sess.is_linked(): # stil not logged in
         log.FatalError("dpbx Cannot login: check your credentials",log.ErrorCode.dpbx_nologin)
Exemple #7
0
        def wrapper(self, *args):
            if login_required and not self.sess.is_linked():
              log.FatalError("dpbx Cannot login: check your credentials",log.ErrorCode.dpbx_nologin)
              return

            try:
                return f(self, *args)
            except TypeError, e:
                log_exception(e)
                log.FatalError('dpbx type error "%s"' % (e,), log.ErrorCode.backend_code_error)
Exemple #8
0
    def present_get_sf(self, filename, include):
        """Return selection function given by existence of a file in a directory"""
        assert include == 0 or include == 1

        def exclude_sel_func(path):
            # do not follow symbolic links when checking for file existence!
            # path.append creates a new path object, which in turn uses setdata
            # which in turn follows symbolic links...
            if path.issym():
                return None
            if path.append(filename).exists():
                return 0
            else:
                return None

        if include == 0:
            sel_func = exclude_sel_func
        else:
            log.FatalError(
                u"--include-if-present not implemented (would it make sense?).",
                log.ErrorCode.not_implemented)

        sel_func.exclude = not include
        sel_func.name = "Command-line %s filename: %s" % \
                        (include and "include-if-present" or "exclude-if-present", filename)
        return sel_func
Exemple #9
0
def set_sign_key(sign_key):
    """Set globals.sign_key assuming proper key given"""
    if not re.search("^(0x)?([0-9A-Fa-f]{8}|[0-9A-Fa-f]{16}|[0-9A-Fa-f]{40})$", sign_key):
        log.FatalError(_("Sign key should be an 8, 16 alt. 40 character hex string, like "
                         "'AA0E73D2'.\nReceived '%s' instead.") % (sign_key,),
                       log.ErrorCode.bad_sign_key)
    globals.gpg_profile.sign_key = sign_key
Exemple #10
0
 def add_filelist(o, s, filename, p):
     select_opts.append((util.fsdecode(s), util.fsdecode(filename)))
     try:
         select_files.append(io.open(filename, "rt", encoding="UTF-8"))
     except IOError:
         log.FatalError(_("Error opening file %s") % filename,
                        log.ErrorCode.cant_open_filelist)
Exemple #11
0
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        try:
            import pyrax
        except ImportError as e:
            raise BackendException("""\
Hubic backend requires the pyrax library available from Rackspace.
Exception: %s""" % str(e))

        # Inform Pyrax that we're talking to Hubic
        pyrax.set_setting(
            "identity_type",
            "duplicity.backends.pyrax_identity.hubic.HubicIdentity")

        CREDENTIALS_FILE = os.path.expanduser("~/.hubic_credentials")
        if os.path.exists(CREDENTIALS_FILE):
            try:
                pyrax.set_credential_file(CREDENTIALS_FILE)
            except Exception as e:
                log.FatalError(
                    "Connection failed, please check your credentials: %s %s" %
                    (e.__class__.__name__, util.uexc(e)),
                    log.ErrorCode.connection_failed)

        else:
            raise BackendException("No ~/.hubic_credentials file found.")

        container = parsed_url.path.lstrip('/')

        self.client_exc = pyrax.exceptions.ClientException
        self.nso_exc = pyrax.exceptions.NoSuchObject
        self.container = pyrax.cloudfiles.create_container(container)
Exemple #12
0
 def put(self, source_path, remote_filename=None):
     from boto.s3.connection import Location
     if globals.s3_european_buckets:
         if not globals.s3_use_new_style:
             log.FatalError("European bucket creation was requested, but not new-style "
                            "bucket addressing (--s3-use-new-style)",
                            log.ErrorCode.s3_bucket_not_style)
     #Network glitch may prevent first few attempts of creating/looking up a bucket
     for n in range(1, globals.num_retries+1):
         if self.bucket:
             break
         if n > 1:
             time.sleep(30)
         try:
             try:
                 self.bucket = self.conn.get_bucket(self.bucket_name, validate=True)
             except Exception, e:
                 if "NoSuchBucket" in str(e):
                     if globals.s3_european_buckets:
                         self.bucket = self.conn.create_bucket(self.bucket_name,
                                                               location=Location.EU)
                     else:
                         self.bucket = self.conn.create_bucket(self.bucket_name)
                 else:
                     raise e
         except Exception, e:
             log.Warn("Failed to create bucket (attempt #%d) '%s' failed (reason: %s: %s)"
                      "" % (n, self.bucket_name,
                            e.__class__.__name__,
                            str(e)))
             self.resetConnection()
Exemple #13
0
 def use_gio(*args):
     try:
         import duplicity.backends.giobackend
         backend.force_backend(duplicity.backends.giobackend.GIOBackend)
     except ImportError:
         log.FatalError(_("Unable to load gio module"),
                        log.ErrorCode.gio_not_available)
Exemple #14
0
    def file_by_name(self, filename):
        from pydrive.files import ApiRequestError
        if filename in self.id_cache:
            # It might since have been locally moved, renamed or deleted, so we
            # need to validate the entry.
            file_id = self.id_cache[filename]
            drive_file = self.drive.CreateFile({'id': file_id})
            try:
                if drive_file['title'] == filename and not drive_file['labels']['trashed']:
                    for parent in drive_file['parents']:
                        if parent['id'] == self.folder:
                            log.Info("PyDrive backend: found file '%s' with id %s in ID cache" % (filename, file_id))
                            return drive_file
            except ApiRequestError as error:
                # A 404 occurs if the ID is no longer valid
                if error.args[0].resp.status != 404:
                    raise
            # If we get here, the cache entry is invalid
            log.Info("PyDrive backend: invalidating '%s' (previously ID %s) from ID cache" % (filename, file_id))
            del self.id_cache[filename]

        # Not found in the cache, so use directory listing. This is less
        # reliable because there is no strong consistency.
        q = "title='%s' and '%s' in parents and trashed=false" % (filename, self.folder)
        fields = 'items(title,id,fileSize,downloadUrl,exportLinks),nextPageToken'
        flist = self.drive.ListFile({'q': q, 'fields': fields}).GetList()
        if len(flist) > 1:
            log.FatalError(_("PyDrive backend: multiple files called '%s'.") % (filename,))
        elif flist:
            file_id = flist[0]['id']
            self.id_cache[filename] = flist[0]['id']
            log.Info("PyDrive backend: found file '%s' with id %s on server, adding to cache" % (filename, file_id))
            return flist[0]
        log.Info("PyDrive backend: file '%s' not found in cache or on server" % (filename,))
        return None
Exemple #15
0
    def obtain_access_token(self):
        log.Info("dpbx: trying to obtain access token")
        for env_var in ['DPBX_APP_KEY', 'DPBX_APP_SECRET']:
            if env_var not in os.environ:
                raise BackendException(
                    'dpbx: %s environment variable not set' % env_var)

        app_key = os.environ['DPBX_APP_KEY']
        app_secret = os.environ['DPBX_APP_SECRET']

        if not sys.stdout.isatty() or not sys.stdin.isatty():
            log.FatalError(
                'dpbx error: cannot interact, but need human attention',
                log.ErrorCode.backend_command_error)

        auth_flow = DropboxOAuth2FlowNoRedirect(app_key, app_secret)
        log.Debug('dpbx,auth_flow.start()')
        authorize_url = auth_flow.start()
        print
        print '-' * 72
        print "1. Go to: " + authorize_url
        print "2. Click \"Allow\" (you might have to log in first)."
        print "3. Copy the authorization code."
        print '-' * 72
        auth_code = raw_input("Enter the authorization code here: ").strip()
        try:
            log.Debug('dpbx,auth_flow.finish(%s)' % auth_code)
            authresult = auth_flow.finish(auth_code)
        except Exception as e:
            raise BackendException('dpbx: Unable to obtain access token: %s' %
                                   e)
        log.Info("dpbx: Authentication successfull")
        self.save_access_token(authresult.access_token)
Exemple #16
0
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        self.metadata_url = u'https://drive.amazonaws.com/drive/v1/'
        self.content_url = u'https://content-na.drive.amazonaws.com/cdproxy/'

        self.names_to_ids = {}
        self.backup_target_id = None
        self.backup_target = parsed_url.path.lstrip(u'/')

        if globals.volsize > (10 * 1024 * 1024 * 1024):
            # https://forums.developer.amazon.com/questions/22713/file-size-limits.html
            # https://forums.developer.amazon.com/questions/22038/support-for-chunked-transfer-encoding.html
            log.FatalError(
                u'Your --volsize is bigger than 10 GiB, which is the maximum '
                u'file size on Amazon Drive that does not require work arounds.'
            )

        try:
            global requests
            global OAuth2Session
            import requests
            from requests_oauthlib import OAuth2Session
        except ImportError:
            raise BackendException(
                u'Amazon Drive backend requires python-requests and '
                u'python-requests-oauthlib to be installed.\n\n'
                u'For Debian and derivates use:\n'
                u'  apt-get install python-requests python-requests-oauthlib\n'
                u'For Fedora and derivates use:\n'
                u'  yum install python-requests python-requests-oauthlib')

        self.initialize_oauth2_session()
        self.resolve_backup_target()
Exemple #17
0
def set_sign_key(sign_key):
    """Set globals.sign_key assuming proper key given"""
    if not len(sign_key) == 8 or not re.search("^[0-9A-F]*$", sign_key):
        log.FatalError(_("Sign key should be an 8 character hex string, like "
                         "'AA0E73D2'.\nReceived '%s' instead.") % (sign_key,),
                       log.ErrorCode.bad_sign_key)
    globals.gpg_profile.sign_key = sign_key
Exemple #18
0
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)
        self.parsed_url = parsed_url
        self.remote_path = self.parsed_url.path
        self.rclone_cmd = u"rclone"

        try:
            rc, o, e = self._subprocess_safe_popen(self.rclone_cmd + u" version")
        except Exception:
            log.FatalError(u"rclone not found: please install rclone", log.ErrorCode.backend_error)

        verb = log.getverbosity()
        if verb >= log.DEBUG:
            os.environ[u"RCLONE_LOG_LEVEL"] = u"DEBUG"
        elif verb >= log.INFO:
            os.environ[u"RCLONE_LOG_LEVEL"] = u"INFO"
        elif verb >= log.NOTICE:
            os.environ[u"RCLONE_LOG_LEVEL"] = u"NOTICE"
        elif verb >= log.ERROR:
            os.environ[u"RCLONE_LOG_LEVEL"] = u"ERROR"

        if parsed_url.path.startswith(u"//"):
            self.remote_path = self.remote_path[2:].replace(u":/", u":", 1)

        self.remote_path = util.fsdecode(self.remote_path)
Exemple #19
0
    def resolve_backup_target(self):
        u"""Resolve node id for remote backup target folder"""

        response = self.http_client.get(
            self.metadata_url + u'nodes?filters=kind:FOLDER AND isRoot:true')
        parent_node_id = response.json()[u'data'][0][u'id']

        for component in [x for x in self.backup_target.split(u'/') if x]:
            # There doesn't seem to be escaping support, so cut off filter
            # after first unsupported character
            query = re.search(u'^[A-Za-z0-9_-]*', component).group(0)
            if component != query:
                query = query + u'*'

            matches = self.read_all_pages(
                self.metadata_url + u'nodes?filters=kind:FOLDER AND name:%s '
                u'AND parents:%s' % (query, parent_node_id))
            candidates = [f for f in matches if f.get(u'name') == component]

            if len(candidates) >= 2:
                log.FatalError(
                    u'There are multiple folders with the same name '
                    u'below one parent.\nParentID: %s\nFolderName: '
                    u'%s' % (parent_node_id, component))
            elif len(candidates) == 1:
                parent_node_id = candidates[0][u'id']
            else:
                log.Debug(u'Folder %s does not exist yet. Creating.' %
                          component)
                parent_node_id = self.mkdir(parent_node_id, component)

        log.Debug(u"Backup target folder has id: %s" % parent_node_id)
        self.backup_target_id = parent_node_id
    def delete(self, filename_list):
        """deletes all files in the list on the remote side. In scp mode unavoidable quoting issues
        will cause failures if filenames containing single quotes are encountered."""
        for fn in filename_list:
            # Try to delete each file several times before giving up completely.
            for n in range(1, globals.num_retries + 1):
                try:
                    if (globals.use_scp):
                        self.runremote("rm '%s/%s'" % (self.remote_dir, fn),
                                       False, "scp rm ")
                    else:
                        try:
                            self.sftp.remove(fn)
                        except Exception, e:
                            raise BackendException("sftp rm %s failed: %s" %
                                                   (fn, e))

                    # If we get here, we deleted this file successfully. Move on to the next one.
                    break
                except Exception, e:
                    if n == globals.num_retries:
                        log.FatalError(str(e), log.ErrorCode.backend_error)
                    else:
                        log.Warn(
                            "%s (Try %d of %d) Will retry in %d seconds." %
                            (e, n, globals.num_retries, self.retry_delay))
                        time.sleep(self.retry_delay)
Exemple #21
0
    def __init__(self, parsed_url):
        try:
            import pyrax
        except ImportError:
            raise BackendException("This backend requires the pyrax "
                                   "library available from Rackspace.")

        # Inform Pyrax that we're talking to Rackspace
        # per Jesus Monzon (gsusmonzon)
        pyrax.set_setting("identity_type", "rackspace")

        conn_kwargs = {}

        if not os.environ.has_key('CLOUDFILES_USERNAME'):
            raise BackendException('CLOUDFILES_USERNAME environment variable'
                                   'not set.')

        if not os.environ.has_key('CLOUDFILES_APIKEY'):
            raise BackendException('CLOUDFILES_APIKEY environment variable not set.')

        conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME']
        conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY']

        if os.environ.has_key('CLOUDFILES_REGION'):
            conn_kwargs['region'] = os.environ['CLOUDFILES_REGION']

        container = parsed_url.path.lstrip('/')

        try:
            pyrax.set_credentials(**conn_kwargs)
        except Exception, e:
            log.FatalError("Connection failed, please check your credentials: %s %s"
                           % (e.__class__.__name__, str(e)),
                           log.ErrorCode.connection_failed)
    def __init__(self, parsed_url):
        try:
            from cloudfiles import Connection
            from cloudfiles.errors import ResponseError
            from cloudfiles import consts
        except ImportError:
            raise BackendException("This backend requires the cloudfiles "
                                   "library available from Rackspace.")

        self.resp_exc = ResponseError
        conn_kwargs = {}

        if not os.environ.has_key('CLOUDFILES_USERNAME'):
            raise BackendException('CLOUDFILES_USERNAME environment variable'
                                   'not set.')

        if not os.environ.has_key('CLOUDFILES_APIKEY'):
            raise BackendException('CLOUDFILES_APIKEY environment variable not set.')

        conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME']
        conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY']

        if os.environ.has_key('CLOUDFILES_AUTHURL'):
            conn_kwargs['authurl'] = os.environ['CLOUDFILES_AUTHURL']
        else:
            conn_kwargs['authurl'] = consts.default_authurl

        container = parsed_url.path.lstrip('/')

        try:
            conn = Connection(**conn_kwargs)
        except Exception, e:
            log.FatalError("Connection failed, please check your credentials: %s %s"
                           % (e.__class__.__name__, str(e)),
                           log.ErrorCode.connection_failed)
Exemple #23
0
 def use_gio(*args):
     try:
         import duplicity.backends.giobackend
         backend.force_backend(duplicity.backends.giobackend.GIOBackend)
     except ImportError:
         log.FatalError(
             _("Unable to load gio backend: %s") % str(sys.exc_info()[1]),
             log.ErrorCode.gio_not_available)
Exemple #24
0
 def done_with_mount(self, fileobj, result, loop):
     try:
         fileobj.mount_enclosing_volume_finish(result)
     except GLib.GError, e:
         # check for NOT_SUPPORTED because some schemas (e.g. file://) validly don't
         if e.code != Gio.IOErrorEnum.ALREADY_MOUNTED and e.code != Gio.IOErrorEnum.NOT_SUPPORTED:
             log.FatalError(_("Connection failed, please check your password: %s")
                            % str(e), log.ErrorCode.connection_failed)
Exemple #25
0
    def parse_catch_error(self, exc):
        u"""Deal with selection error exc"""
        # Internal, used by ParseArgs.
        if isinstance(exc, FilePrefixError):
            log.FatalError(_(u"""\
Fatal Error: The file specification
    %s
cannot match any files in the base directory
    %s
Useful file specifications begin with the base directory or some
pattern (such as '**') which matches the base directory.""") %
                           (exc, self.prefix), log.ErrorCode.file_prefix_error)
        elif isinstance(exc, GlobbingError):
            log.FatalError(_(u"Fatal Error while processing expression\n"
                             u"%s") % exc, log.ErrorCode.globbing_error)
        else:
            raise  # pylint: disable=misplaced-bare-raise
Exemple #26
0
    def parse_catch_error(self, exc):
        """Deal with selection error exc"""
        if isinstance(exc, FilePrefixError):
            log.FatalError(
                _("""Fatal Error: The file specification
    %s
cannot match any files in the base directory
    %s
Useful file specifications begin with the base directory or some
pattern (such as '**') which matches the base directory.""") %
                (exc, util.ufn(self.prefix)), log.ErrorCode.file_prefix_error)
        elif isinstance(exc, GlobbingError):
            log.FatalError(
                _("Fatal Error while processing expression\n"
                  "%s") % exc, log.ErrorCode.globbing_error)
        else:
            raise
Exemple #27
0
 def add_filelist(o, s, v, p):
     filename = v
     select_opts.append((s, filename))
     try:
         select_files.append(open(filename, "r"))
     except IOError:
         log.FatalError(_("Error opening file %s") % filename,
                        log.ErrorCode.cant_open_filelist)
Exemple #28
0
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # we expect an output
        try:
            p = os.popen("lftp --version")
            fout = p.read()
            ret = p.close()
        except Exception:
            pass
        # there is no output if lftp not found
        if not fout:
            log.FatalError("LFTP not found:  Please install LFTP.",
                           log.ErrorCode.ftps_lftp_missing)

        # version is the second word of the second part of the first line
        version = fout.split('\n')[0].split(' | ')[1].split()[1]
        log.Notice("LFTP version is %s" % version)

        self.parsed_url = parsed_url

        self.url_string = duplicity.backend.strip_auth_from_url(
            self.parsed_url)

        # Use an explicit directory name.
        if self.url_string[-1] != '/':
            self.url_string += '/'

        self.password = self.get_password()

        if globals.ftp_connection == 'regular':
            self.conn_opt = 'off'
        else:
            self.conn_opt = 'on'

        if parsed_url.port != None and parsed_url.port != 21:
            self.portflag = " -p '%s'" % (parsed_url.port)
        else:
            self.portflag = ""

        self.tempfile, self.tempname = tempdir.default().mkstemp()
        os.write(self.tempfile, "set ftp:ssl-allow true\n")
        os.write(self.tempfile, "set ftp:ssl-protect-data true\n")
        os.write(self.tempfile, "set ftp:ssl-protect-list true\n")
        os.write(self.tempfile, "set net:timeout %s\n" % globals.timeout)
        os.write(self.tempfile,
                 "set net:max-retries %s\n" % globals.num_retries)
        os.write(self.tempfile, "set ftp:passive-mode %s\n" % self.conn_opt)
        os.write(self.tempfile,
                 "open %s %s\n" % (self.portflag, self.parsed_url.hostname))
        # allow .netrc auth by only setting user/pass when user was actually given
        if self.parsed_url.username:
            os.write(
                self.tempfile,
                "user %s %s\n" % (self.parsed_url.username, self.password))
        os.close(self.tempfile)

        self.flags = "-f %s" % self.tempname
    def file_by_name(self, filename):
        from googleapiclient.errors import HttpError

        filename = util.fsdecode(filename)

        if filename in self.id_cache:
            # It might since have been locally moved, renamed or deleted, so we
            # need to validate the entry.
            file_id = self.id_cache[filename]
            try:
                drive_file = self.drive.files().get(
                    fileId=file_id,
                    fields=u'id,size,name,parents,trashed',
                    **self.shared_drive_flags_support).execute()
                if drive_file[
                        u'name'] == filename and not drive_file[u'trashed']:
                    for parent in drive_file[u'parents']:
                        if parent == self.folder:
                            log.Info(
                                u"GDrive backend: found file '%s' with id %s in ID cache"
                                % (filename, file_id))
                            return drive_file
            except HttpError as error:
                # A 404 occurs if the ID is no longer valid
                if error.resp.status != 404:
                    raise
            # If we get here, the cache entry is invalid
            log.Info(
                u"GDrive backend: invalidating '%s' (previously ID %s) from ID cache"
                % (filename, file_id))
            del self.id_cache[filename]

        # Not found in the cache, so use directory listing. This is less
        # reliable because there is no strong consistency.
        q = u"name = '%s' and '%s' in parents and trashed = false" % (
            filename, self.folder)
        results = self.drive.files().list(
            q=q,
            fields=u'files(name,id,size),nextPageToken',
            pageSize=2,
            **self.shared_drive_corpora,
            **self.shared_drive_id,
            **self.shared_drive_flags_include,
            **self.shared_drive_flags_support).execute()
        file_list = results.get(u'files', [])
        if len(file_list) > 1:
            log.FatalError(u"GDrive backend: multiple files called '%s'." %
                           (filename, ))
        elif len(file_list) > 0:
            file_id = file_list[0][u'id']
            self.id_cache[filename] = file_list[0][u'id']
            log.Info(u"GDrive backend: found file '%s' with id %s on server, "
                     u"adding to cache" % (filename, file_id))
            return file_list[0]

        log.Info(u"GDrive backend: file '%s' not found in cache or on server" %
                 (filename, ))
        return None
Exemple #30
0
class ACDBackend(duplicity.backend.Backend):
    acd_cmd='acd_cli'
    """Connect to remote store using acd_cli"""
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # we expect an error return, so go low-level and ignore it
        try:
            p = os.popen(self.acd_cmd + " version")
            fout = p.read()
            ret = p.close()
        except Exception:
            pass
        # the expected error is 0
        if ret != None:
            log.FatalError(self.acd_cmd + " not found:  Please install acd_cli",
                           log.ErrorCode.backend_not_found)

        self.parsed_url = parsed_url
        self.url_string = duplicity.backend.strip_auth_from_url(self.parsed_url)

        # Use an explicit directory name.
        if self.url_string[-1] != '/':
            self.url_string += '/'

        self.subprocess_popen(self.acd_cmd + " sync")

    def _put(self, source_path, remote_filename = None):
        """Transfer source_path to remote_filename"""
        if not remote_filename:
            remote_filename = source_path.get_filename()

        # WORKAROUND for acd_cli: cannot specify remote filename
        # Link tmp file to the desired remote filename locally and upload
        remote_path = urllib.unquote(self.parsed_url.path.replace('///','/'))
        local_real_duplicity_file = os.path.join(os.path.dirname(source_path.name), remote_filename.rstrip())

        deleteFile = False
        if(source_path.name != local_real_duplicity_file):
            try:
                os.symlink(source_path.name, local_real_duplicity_file)
                deleteFile = True
            except IOError, e:
                log.FatalError("Unable to copy " + source_path.name + " to " + local_real_duplicity_file)

        commandline = self.acd_cmd + " upload --force --overwrite '%s' '%s'" % \
            (local_real_duplicity_file, remote_path)

        try:
            l = self.subprocess_popen(commandline)
        finally:
            if (deleteFile):
                try:
                    os.remove(local_real_duplicity_file)
                except OSError, e:
                    log.FatalError("Unable to remove file %s" % e)