Exemple #1
0
    def __init__(self, *args, **kwargs):
        try:
            global socket, ssl
            import socket
            import ssl
        except ImportError:
            raise FatalBackendException("Missing socket or ssl libraries.")

        httplib.HTTPSConnection.__init__(self, *args, **kwargs)

        self.cacert_file = globals.ssl_cacert_file
        cacert_candidates = [
            "~/.duplicity/cacert.pem", "~/duplicity_cacert.pem",
            "/etc/duplicity/cacert.pem"
        ]
        #
        if not self.cacert_file:
            for path in cacert_candidates:
                path = os.path.expanduser(path)
                if (os.path.isfile(path)):
                    self.cacert_file = path
                    break
        # still no cacert file, inform user
        if not self.cacert_file:
            raise FatalBackendException("""\
For certificate verification a cacert database file is needed in one of these locations: %s
Hints:
  Consult the man page, chapter 'SSL Certificate Verification'.
  Consider using the options --ssl-cacert-file, --ssl-no-check-certificate ."""
                                        % ", ".join(cacert_candidates))
        # check if file is accessible (libssl errors are not very detailed)
        if not os.access(self.cacert_file, os.R_OK):
            raise FatalBackendException(
                "Cacert database file '%s' is not readable." %
                self.cacert_file)
    def __init__(self, *args, **kwargs):
        try:
            global socket, ssl
            import socket
            import ssl
        except ImportError:
            raise FatalBackendException(
                _("Missing socket or ssl python modules."))

        httplib.HTTPSConnection.__init__(self, *args, **kwargs)

        self.cacert_file = globals.ssl_cacert_file
        self.cacert_candidates = [
            "~/.duplicity/cacert.pem", "~/duplicity_cacert.pem",
            "/etc/duplicity/cacert.pem"
        ]
        # if no cacert file was given search default locations
        if not self.cacert_file:
            for path in self.cacert_candidates:
                path = os.path.expanduser(path)
                if (os.path.isfile(path)):
                    self.cacert_file = path
                    break

        # check if file is accessible (libssl errors are not very detailed)
        if self.cacert_file and not os.access(self.cacert_file, os.R_OK):
            raise FatalBackendException(
                _("Cacert database file '%s' is not readable.") %
                self.cacert_file)
Exemple #3
0
    def connect(self):
        # create new socket
        sock = socket.create_connection((self.host, self.port),
                                        self.timeout)
        if self._tunnel_host:
            self.sock = sock
            self.tunnel()

        # python 2.7.9+ supports default system certs now
        if u"create_default_context" in dir(ssl):
            context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH,
                                                 cafile=self.cacert_file,
                                                 capath=config.ssl_cacert_path)
            self.sock = context.wrap_socket(sock, server_hostname=self.host)
        # the legacy way needing a cert file
        else:
            if config.ssl_cacert_path:
                raise FatalBackendException(
                    _(u"Option '--ssl-cacert-path' is not supported "
                      u"with python 2.7.8 and below."))

            if not self.cacert_file:
                raise FatalBackendException(_(u"""\
For certificate verification with python 2.7.8 or earlier a cacert database
file is needed in one of these locations: %s
Hints:
Consult the man page, chapter 'SSL Certificate Verification'.
Consider using the options --ssl-cacert-file, --ssl-no-check-certificate .""") %
                                            u", ".join(self.cacert_candidates))

            # wrap the socket in ssl using verification
            self.sock = ssl.wrap_socket(sock,
                                        cert_reqs=ssl.CERT_REQUIRED,
                                        ca_certs=self.cacert_file,
                                        )
Exemple #4
0
    def request(self, method, path, data=None, redirected=0):
        u"""
        Wraps the connection.request method to retry once if authentication is
        required
        """
        self._close()  # or we get previous request's data or exception
        self.connect()

        quoted_path = urllib.parse.quote(path, u"/:~")

        if self.digest_challenge is not None:
            self.headers[u'Authorization'] = self.get_digest_authorization(
                path)

        log.Info(
            _(u"WebDAV %s %s request with headers: %s ") %
            (method, quoted_path, self.headers))
        log.Info(_(u"WebDAV data length: %s ") % len(str(data)))
        self.conn.request(method, quoted_path, data, self.headers)
        response = self.conn.getresponse()
        log.Info(
            _(u"WebDAV response status %s with reason '%s'.") %
            (response.status, response.reason))
        # resolve redirects and reset url on listing requests (they usually come before everything else)
        if response.status in [301, 302] and method == u'PROPFIND':
            redirect_url = response.getheader(u'location', None)
            response.close()
            if redirect_url:
                log.Notice(
                    _(u"WebDAV redirect to: %s ") %
                    urllib.parse.unquote(redirect_url))
                if redirected > 10:
                    raise FatalBackendException(
                        _(u"WebDAV redirected 10 times. Giving up."))
                self.parsed_url = duplicity.backend.ParsedUrl(redirect_url)
                self.directory = self.sanitize_path(self.parsed_url.path)
                return self.request(method, self.directory, data,
                                    redirected + 1)
            else:
                raise FatalBackendException(
                    _(u"WebDAV missing location header in redirect response."))
        elif response.status == 401:
            response.read()
            response.close()
            self.headers[u'Authorization'] = self.get_authorization(
                response, quoted_path)
            log.Info(_(u"WebDAV retry request with authentification headers."))
            log.Info(
                _(u"WebDAV %s %s request2 with headers: %s ") %
                (method, quoted_path, self.headers))
            log.Info(_(u"WebDAV data length: %s ") % len(str(data)))
            self.conn.request(method, quoted_path, data, self.headers)
            response = self.conn.getresponse()
            log.Info(
                _(u"WebDAV response2 status %s with reason '%s'.") %
                (response.status, response.reason))

        return response
    def connect(self, forced=False):
        """
        Connect or re-connect to the server, updates self.conn
        # reconnect on errors as a precaution, there are errors e.g.
        # "[Errno 32] Broken pipe" or SSl errors that render the connection unusable
        """
        if not forced and self.conn \
                and self.conn.host == self.parsed_url.hostname:
            return

        log.Info(
            _("WebDAV create connection on '%s'") % (self.parsed_url.hostname))
        self._close()
        # http schemes needed for redirect urls from servers
        if self.parsed_url.scheme in ['webdav', 'http']:
            self.conn = httplib.HTTPConnection(self.parsed_url.hostname,
                                               self.parsed_url.port)
        elif self.parsed_url.scheme in ['webdavs', 'https']:
            if globals.ssl_no_check_certificate:
                self.conn = httplib.HTTPSConnection(self.parsed_url.hostname,
                                                    self.parsed_url.port)
            else:
                self.conn = VerifiedHTTPSConnection(self.parsed_url.hostname,
                                                    self.parsed_url.port)
        else:
            raise FatalBackendException(
                _("WebDAV Unknown URI scheme: %s") % (self.parsed_url.scheme))
Exemple #6
0
    def __init__(self, parsed_url):
        u"""
        Authorize to B2 api and set up needed variables
        """
        duplicity.backend.Backend.__init__(self, parsed_url)

        global DownloadDestLocalFile, FileVersionInfoFactory
        try:  # try to import the new b2sdk if available
            from b2sdk.api import B2Api
            from b2sdk.account_info import InMemoryAccountInfo
            from b2sdk.download_dest import DownloadDestLocalFile
            from b2sdk.exception import NonExistentBucket
            from b2sdk.file_version import FileVersionInfoFactory
        except ImportError as e:
            if u'b2sdk' in getattr(e, u'name', u'b2sdk'):
                raise
            try:  # fall back to import the old b2 client
                from b2.api import B2Api
                from b2.account_info import InMemoryAccountInfo
                from b2.download_dest import DownloadDestLocalFile
                from b2.exception import NonExistentBucket
                from b2.file_version import FileVersionInfoFactory
            except ImportError:
                if u'b2' in getattr(e, u'name', u'b2'):
                    raise
                raise BackendException(
                    u'B2 backend requires B2 Python SDK (pip install b2sdk)')

        self.service = B2Api(InMemoryAccountInfo())
        self.parsed_url.hostname = u'B2'

        account_id = parsed_url.username
        account_key = self.get_password()

        self.url_parts = [
            x for x in parsed_url.path.replace(u"@", u"/").split(u'/')
            if x != u''
        ]
        if self.url_parts:
            self.username = self.url_parts.pop(0)
            bucket_name = self.url_parts.pop(0)
        else:
            raise BackendException(u"B2 requires a bucket name")
        self.path = u"".join([url_part + u"/" for url_part in self.url_parts])
        self.service.authorize_account(u'production', account_id, account_key)

        log.Log(
            u"B2 Backend (path= %s, bucket= %s, minimum_part_size= %s)" %
            (self.path, bucket_name,
             self.service.account_info.get_minimum_part_size()), log.INFO)
        try:
            self.bucket = self.service.get_bucket_by_name(bucket_name)
            log.Log(u"Bucket found", log.INFO)
        except NonExistentBucket:
            try:
                log.Log(u"Bucket not found, creating one", log.INFO)
                self.bucket = self.service.create_bucket(
                    bucket_name, u'allPrivate')
            except:
                raise FatalBackendException(u"Bucket cannot be created")
    def __init__(self, parsed_url):
        """
        Authorize to B2 api and set up needed variables
        """
        duplicity.backend.Backend.__init__(self, parsed_url)

        # for prettier password prompt only
        self.parsed_url.hostname = 'B2'

        self.account_id = parsed_url.username
        account_key = self.get_password()

        self.url_parts = [
            x for x in parsed_url.path.replace("@", "/").split('/') if x != ''
        ]
        if self.url_parts:
            self.username = self.url_parts.pop(0)
            self.bucket_name = self.url_parts.pop(0)
        else:
            raise BackendException("B2 requires a bucket name")
        self.path = "/".join(self.url_parts)

        self.id_and_key = self.account_id + ":" + account_key
        self._authorize()

        try:
            self.find_or_create_bucket(self.bucket_name)
        except urllib2.HTTPError:
            raise FatalBackendException("Bucket cannot be created")
    def __init__(self, parsed_url):
        u"""
        Authorize to B2 api and set up needed variables
        """
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import B2 API
        try:
            global b2
            import b2
            global b2sdk
            import b2sdk
            import b2.api
            import b2.account_info
            import b2.download_dest
            import b2.file_version
        except ImportError:
            raise BackendException(
                u'B2 backend requires B2 Python APIs (pip install b2)')

        self.service = b2.api.B2Api(b2.account_info.InMemoryAccountInfo())
        self.parsed_url.hostname = u'B2'

        account_id = parsed_url.username
        account_key = self.get_password()

        self.url_parts = [
            x for x in parsed_url.path.replace(u"@", u"/").split(u'/')
            if x != u''
        ]
        if self.url_parts:
            self.username = self.url_parts.pop(0)
            bucket_name = self.url_parts.pop(0)
        else:
            raise BackendException(u"B2 requires a bucket name")
        self.path = u"".join([url_part + u"/" for url_part in self.url_parts])
        self.service.authorize_account(u'production', account_id, account_key)

        log.Log(
            u"B2 Backend (path= %s, bucket= %s, minimum_part_size= %s)" %
            (self.path, bucket_name,
             self.service.account_info.get_minimum_part_size()), log.INFO)
        try:
            self.bucket = self.service.get_bucket_by_name(bucket_name)
            log.Log(u"Bucket found", log.INFO)
        except b2.exception.NonExistentBucket:
            try:
                log.Log(u"Bucket not found, creating one", log.INFO)
                self.bucket = self.service.create_bucket(
                    bucket_name, u'allPrivate')
            except:
                raise FatalBackendException(u"Bucket cannot be created")
Exemple #9
0
    def _put(self, local_source_path, remote_filename):
        from boto3.s3.transfer import TransferConfig  # pylint: disable=import-error

        if not self.s3:
            self.reset_connection()

        remote_filename = util.fsdecode(remote_filename)
        key = self.key_prefix + remote_filename

        if config.s3_use_rrs:
            storage_class = u'REDUCED_REDUNDANCY'
        elif config.s3_use_ia:
            storage_class = u'STANDARD_IA'
        elif config.s3_use_onezone_ia:
            storage_class = u'ONEZONE_IA'
        elif config.s3_use_glacier and u"manifest" not in remote_filename:
            storage_class = u'GLACIER'
        elif config.s3_use_glacier_ir and u"manifest" not in remote_filename:
            storage_class = u'GLACIER_IR'
        elif config.s3_use_deep_archive and u"manifest" not in remote_filename:
            storage_class = u'DEEP_ARCHIVE'
        else:
            storage_class = u'STANDARD'
        extra_args = {u'StorageClass': storage_class}

        if config.s3_use_sse:
            extra_args[u'ServerSideEncryption'] = u'AES256'
        elif config.s3_use_sse_kms:
            if config.s3_kms_key_id is None:
                raise FatalBackendException(u"S3 USE SSE KMS was requested, but key id not provided "
                                            u"require (--s3-kms-key-id)",
                                            code=log.ErrorCode.s3_kms_no_id)
            extra_args[u'ServerSideEncryption'] = u'aws:kms'
            extra_args[u'SSEKMSKeyId'] = config.s3_kms_key_id
            if config.s3_kms_grant:
                extra_args[u'GrantFullControl'] = config.s3_kms_grant

        transfer_config = TransferConfig(multipart_chunksize=config.s3_multipart_chunk_size,
                                         multipart_threshold=config.s3_multipart_chunk_size)

        # Should the tracker be scoped to the put or the backend?
        # The put seems right to me, but the results look a little more correct
        # scoped to the backend.  This brings up questions about knowing when
        # it's proper for it to be reset.
        # tracker = UploadProgressTracker() # Scope the tracker to the put()
        tracker = self.tracker

        log.Info(u"Uploading %s/%s to %s Storage" % (self.straight_url, remote_filename, storage_class))
        self.s3.Object(self.bucket.name, key).upload_file(local_source_path.uc_name,
                                                          Callback=tracker.progress_cb,
                                                          Config=transfer_config,
                                                          ExtraArgs=extra_args)
Exemple #10
0
    def _put(self, source_path, remote_filename):
        if globals.s3_european_buckets:
            if not globals.s3_use_new_style:
                raise FatalBackendException(
                    "European bucket creation was requested, but not new-style "
                    "bucket addressing (--s3-use-new-style)",
                    code=log.ErrorCode.s3_bucket_not_style)

        if self.bucket is None:
            try:
                self.bucket = self.conn.get_bucket(self.bucket_name)
            except Exception as e:
                if "NoSuchBucket" in str(e):
                    self.bucket = self.conn.create_bucket(
                        self.bucket_name, location=self.my_location)
                else:
                    raise

        key = self.bucket.new_key(self.key_prefix + remote_filename)

        if globals.s3_use_rrs:
            storage_class = 'REDUCED_REDUNDANCY'
        elif globals.s3_use_ia:
            storage_class = 'STANDARD_IA'
        elif globals.s3_use_onezone_ia:
            storage_class = 'ONEZONE_IA'
        else:
            storage_class = 'STANDARD'
        log.Info("Uploading %s/%s to %s Storage" %
                 (self.straight_url, remote_filename, storage_class))
        if globals.s3_use_sse:
            headers = {
                'Content-Type': 'application/octet-stream',
                'x-amz-storage-class': storage_class,
                'x-amz-server-side-encryption': 'AES256'
            }
        else:
            headers = {
                'Content-Type': 'application/octet-stream',
                'x-amz-storage-class': storage_class
            }

        upload_start = time.time()
        self.upload(source_path.name, key, headers)
        upload_end = time.time()
        total_s = abs(upload_end - upload_start) or 1  # prevent a zero value!
        rough_upload_speed = os.path.getsize(source_path.name) / total_s
        log.Debug("Uploaded %s/%s to %s Storage at roughly %f bytes/second" %
                  (self.straight_url, remote_filename, storage_class,
                   rough_upload_speed))
Exemple #11
0
    def __init__(self, parsed_url):
        """
        Authorize to B2 api and set up needed variables
        """
        duplicity.backend.Backend.__init__(self, parsed_url)

        # for prettier password prompt only
        self.parsed_url.hostname = 'B2'

        self.account_id = parsed_url.username
        account_key = self.get_password()

        self.url_parts = [
            x for x in parsed_url.path.replace("@", "/").split('/') if x != ''
        ]
        if self.url_parts:
            self.username = self.url_parts.pop(0)
            self.bucket_name = self.url_parts.pop(0)
        else:
            raise BackendException("B2 requires a bucket name")
        self.path = "/".join(self.url_parts)

        id_and_key = self.account_id + ":" + account_key
        basic_auth_string = 'Basic ' + base64.b64encode(id_and_key)
        headers = {'Authorization': basic_auth_string}

        request = urllib2.Request(
            'https://api.backblaze.com/b2api/v1/b2_authorize_account',
            headers=headers
        )

        response = urllib2.urlopen(request)
        response_data = json.loads(response.read())
        response.close()

        self.auth_token = response_data['authorizationToken']
        self.api_url = response_data['apiUrl']
        self.download_url = response_data['downloadUrl']

        try:
            self.find_or_create_bucket(self.bucket_name)
        except urllib2.HTTPError:
            raise FatalBackendException("Bucket cannot be created")
Exemple #12
0
    def reset_connection(self):
        import boto3  # pylint: disable=import-error
        import botocore  # pylint: disable=import-error
        from botocore.exceptions import ClientError  # pylint: disable=import-error

        self.bucket = None
        self.s3 = boto3.resource(u's3', region_name=config.s3_region_name, endpoint_url=config.s3_endpoint_url)

        try:
            self.s3.meta.client.head_bucket(Bucket=self.bucket_name)
        except botocore.exceptions.ClientError as bce:
            error_code = bce.response[u'Error'][u'Code']
            if error_code == u'404':
                raise FatalBackendException(u'S3 bucket "%s" does not exist' % self.bucket_name,
                                            code=log.ErrorCode.backend_not_found)
            else:
                raise

        self.bucket = self.s3.Bucket(self.bucket_name)  # only set if bucket is thought to exist.
Exemple #13
0
    def _put(self, source_path, remote_filename):
        remote_filename = util.fsdecode(remote_filename)

        if config.s3_european_buckets:
            if not config.s3_use_new_style:
                raise FatalBackendException(
                    u"European bucket creation was requested, but not new-style "
                    u"bucket addressing (--s3-use-new-style)",
                    code=log.ErrorCode.s3_bucket_not_style)

        if self.bucket is None:
            try:
                self.bucket = self.conn.get_bucket(self.bucket_name)
            except Exception as e:
                if u"NoSuchBucket" in str(e):
                    self.bucket = self.conn.create_bucket(
                        self.bucket_name, location=self.my_location)
                else:
                    raise

        key = self.bucket.new_key(self.key_prefix + remote_filename)

        if config.s3_use_rrs:
            storage_class = u'REDUCED_REDUNDANCY'
        elif config.s3_use_ia:
            storage_class = u'STANDARD_IA'
        elif config.s3_use_onezone_ia:
            storage_class = u'ONEZONE_IA'
        elif config.s3_use_glacier and u"manifest" not in remote_filename:
            storage_class = u'GLACIER'
        else:
            storage_class = u'STANDARD'
        log.Info(u"Uploading %s/%s to %s Storage" %
                 (self.straight_url, remote_filename, storage_class))
        if config.s3_use_sse:
            headers = {
                u'Content-Type': u'application/octet-stream',
                u'x-amz-storage-class': storage_class,
                u'x-amz-server-side-encryption': u'AES256'
            }
        elif config.s3_use_sse_kms:
            if config.s3_kms_key_id is None:
                raise FatalBackendException(
                    u"S3 USE SSE KMS was requested, but key id not provided "
                    u"require (--s3-kms-key-id)",
                    code=log.ErrorCode.s3_kms_no_id)
            headers = {
                u'Content-Type':
                u'application/octet-stream',
                u'x-amz-storage-class':
                storage_class,
                u'x-amz-server-side-encryption':
                u'aws:kms',
                u'x-amz-server-side-encryption-aws-kms-key-id':
                config.s3_kms_key_id
            }
            if config.s3_kms_grant is not None:
                headers[u'x-amz-grant-full-control'] = config.s3_kms_grant
        else:
            headers = {
                u'Content-Type': u'application/octet-stream',
                u'x-amz-storage-class': storage_class
            }

        upload_start = time.time()
        self.upload(source_path.name, key, headers)
        upload_end = time.time()
        total_s = abs(upload_end - upload_start) or 1  # prevent a zero value!
        rough_upload_speed = os.path.getsize(source_path.name) / total_s
        log.Debug(u"Uploaded %s/%s to %s Storage at roughly %f bytes/second" %
                  (self.straight_url, remote_filename, storage_class,
                   rough_upload_speed))