Esempio n. 1
0
    def __init__(self, name, access_key=None, secret_key=None,
                 security_token=None, profile_name=None):
        self.host = None
        self.port = None
        self.host_header = None
        self.access_key = access_key
        self.secret_key = secret_key
        self.security_token = security_token
        self.profile_name = profile_name
        self.name = name
        self.acl_class = self.AclClassMap[self.name]
        self.canned_acls = self.CannedAclsMap[self.name]
        self._credential_expiry_time = None

        # Load shared credentials file if it exists
        shared_path = os.path.join(expanduser('~'), '.' + name, 'credentials')
        self.shared_credentials = Config(do_load=False)
        if os.path.isfile(shared_path):
            self.shared_credentials.load_from_path(shared_path)

        self.get_credentials(access_key, secret_key, security_token, profile_name)
        self.configure_headers()
        self.configure_errors()

        # Allow config file to override default host and port.
        host_opt_name = '%s_host' % self.HostKeyMap[self.name]
        if config.has_option('Credentials', host_opt_name):
            self.host = config.get('Credentials', host_opt_name)
        port_opt_name = '%s_port' % self.HostKeyMap[self.name]
        if config.has_option('Credentials', port_opt_name):
            self.port = config.getint('Credentials', port_opt_name)
        host_header_opt_name = '%s_host_header' % self.HostKeyMap[self.name]
        if config.has_option('Credentials', host_header_opt_name):
            self.host_header = config.get('Credentials', host_header_opt_name)
Esempio n. 2
0
 def _populate_keys_from_metadata_server(self):
     # get_instance_metadata is imported here because of a circular
     # dependency.
     mssapi.log.debug("Retrieving credentials from metadata server.")
     from mssapi.utils import get_instance_metadata
     timeout = config.getfloat('Mssapi', 'metadata_service_timeout', 1.0)
     attempts = config.getint('Mssapi', 'metadata_service_num_attempts', 1)
     # The num_retries arg is actually the total number of attempts made,
     # so the config options is named *_num_attempts to make this more
     # clear to users.
     metadata = get_instance_metadata(
         timeout=timeout,
         num_retries=attempts,
         data='meta-data/iam/security-credentials/')
     if metadata:
         # I'm assuming there's only one role on the instance profile.
         security = list(metadata.values())[0]
         self._access_key = security['AccessKeyId']
         self._secret_key = self._convert_key_to_str(
             security['SecretAccessKey'])
         self._security_token = security['Token']
         expires_at = security['Expiration']
         self._credential_expiry_time = datetime.strptime(
             expires_at, "%Y-%m-%dT%H:%M:%SZ")
         mssapi.log.debug("Retrieved credentials will expire in %s at: %s",
                          self._credential_expiry_time - datetime.now(),
                          expires_at)
Esempio n. 3
0
    def __init__(self,
                 name,
                 access_key=None,
                 secret_key=None,
                 security_token=None,
                 profile_name=None):
        self.host = None
        self.port = None
        self.host_header = None
        self.access_key = access_key
        self.secret_key = secret_key
        self.security_token = security_token
        self.profile_name = profile_name
        self.name = name
        self.acl_class = self.AclClassMap[self.name]
        self.canned_acls = self.CannedAclsMap[self.name]
        self._credential_expiry_time = None

        # Load shared credentials file if it exists
        shared_path = os.path.join(expanduser('~'), '.' + name, 'credentials')
        self.shared_credentials = Config(do_load=False)
        if os.path.isfile(shared_path):
            self.shared_credentials.load_from_path(shared_path)

        self.get_credentials(access_key, secret_key, security_token,
                             profile_name)
        self.configure_headers()
        self.configure_errors()

        # Allow config file to override default host and port.
        host_opt_name = '%s_host' % self.HostKeyMap[self.name]
        if config.has_option('Credentials', host_opt_name):
            self.host = config.get('Credentials', host_opt_name)
        port_opt_name = '%s_port' % self.HostKeyMap[self.name]
        if config.has_option('Credentials', port_opt_name):
            self.port = config.getint('Credentials', port_opt_name)
        host_header_opt_name = '%s_host_header' % self.HostKeyMap[self.name]
        if config.has_option('Credentials', host_header_opt_name):
            self.host_header = config.get('Credentials', host_header_opt_name)
Esempio n. 4
0
 def _populate_keys_from_metadata_server(self):
     # get_instance_metadata is imported here because of a circular
     # dependency.
     mssapi.log.debug("Retrieving credentials from metadata server.")
     from mssapi.utils import get_instance_metadata
     timeout = config.getfloat('Mssapi', 'metadata_service_timeout', 1.0)
     attempts = config.getint('Mssapi', 'metadata_service_num_attempts', 1)
     # The num_retries arg is actually the total number of attempts made,
     # so the config options is named *_num_attempts to make this more
     # clear to users.
     metadata = get_instance_metadata(
         timeout=timeout, num_retries=attempts,
         data='meta-data/iam/security-credentials/')
     if metadata:
         # I'm assuming there's only one role on the instance profile.
         security = list(metadata.values())[0]
         self._access_key = security['AccessKeyId']
         self._secret_key = self._convert_key_to_str(security['SecretAccessKey'])
         self._security_token = security['Token']
         expires_at = security['Expiration']
         self._credential_expiry_time = datetime.strptime(
             expires_at, "%Y-%m-%dT%H:%M:%SZ")
         mssapi.log.debug("Retrieved credentials will expire in %s at: %s",
                        self._credential_expiry_time - datetime.now(), expires_at)
Esempio n. 5
0
    def _mexe(self, request, sender=None, override_num_retries=None, retry_handler=None):
        """
        mexe - Multi-execute inside a loop, retrying multiple times to handle
               transient Internet errors by simply trying again.
               Also handles redirects.

        This code was inspired by the S3Utils classes posted to the mssapi-users
        Google group by Larry Bates.  Thanks!

        """
        mssapi.log.debug("Method: %s" % request.method)
        mssapi.log.debug("Path: %s" % request.path)
        mssapi.log.debug("Data: %s" % request.body)
        mssapi.log.debug("Headers: %s" % request.headers)
        mssapi.log.debug("Host: %s" % request.host)
        mssapi.log.debug("Port: %s" % request.port)
        mssapi.log.debug("Params: %s" % request.params)
        response = None
        body = None
        ex = None
        if override_num_retries is None:
            num_retries = config.getint("Mssapi", "num_retries", self.num_retries)
        else:
            num_retries = override_num_retries
        i = 0
        connection = self.get_http_connection(request.host, request.port, self.is_secure)

        # Convert body to bytes if needed
        if not isinstance(request.body, bytes) and hasattr(request.body, "encode"):
            request.body = request.body.encode("utf-8")

        while i <= num_retries:
            # Use binary exponential backoff to desynchronize client requests.
            next_sleep = min(random.random() * (2 ** i), mssapi.config.get("Mssapi", "max_retry_delay", 60))
            try:
                # we now re-sign each request before it is retried
                mssapi.log.debug("Token: %s" % self.provider.security_token)
                request.authorize(connection=self)
                # Only force header for non-s3 connections, because s3 uses
                # an older signing method + bucket resource URLs that include
                # the port info. All others should be now be up to date and
                # not include the port.
                if "s3" not in self._required_auth_capability():
                    if not getattr(self, "anon", False):
                        if not request.headers.get("Host"):
                            self.set_host_header(request)
                mssapi.log.debug("Final headers: %s" % request.headers)
                request.start_time = datetime.now()
                if callable(sender):
                    response = sender(connection, request.method, request.path, request.body, request.headers)
                else:
                    connection.request(request.method, request.path, request.body, request.headers)
                    response = connection.getresponse()
                mssapi.log.debug("Response headers: %s" % response.getheaders())
                location = response.getheader("location")
                # -- gross hack --
                # http_client gets confused with chunked responses to HEAD requests
                # so I have to fake it out
                if request.method == "HEAD" and getattr(response, "chunked", False):
                    response.chunked = 0
                if callable(retry_handler):
                    status = retry_handler(response, i, next_sleep)
                    if status:
                        msg, i, next_sleep = status
                        if msg:
                            mssapi.log.debug(msg)
                        time.sleep(next_sleep)
                        continue
                if response.status in [500, 502, 503, 504]:
                    msg = "Received %d response.  " % response.status
                    msg += "Retrying in %3.1f seconds" % next_sleep
                    mssapi.log.debug(msg)
                    body = response.read()
                    if isinstance(body, bytes):
                        body = body.decode("utf-8")
                elif response.status < 300 or response.status >= 400 or not location:
                    # don't return connection to the pool if response contains
                    # Connection:close header, because the connection has been
                    # closed and default reconnect behavior may do something
                    # different than new_http_connection. Also, it's probably
                    # less efficient to try to reuse a closed connection.
                    conn_header_value = response.getheader("connection")
                    if conn_header_value == "close":
                        connection.close()
                    else:
                        self.put_http_connection(request.host, request.port, self.is_secure, connection)
                    if self.request_hook is not None:
                        self.request_hook.handle_request_data(request, response)
                    return response
                else:
                    scheme, request.host, request.path, params, query, fragment = urlparse(location)
                    if query:
                        request.path += "?" + query
                    # urlparse can return both host and port in netloc, so if
                    # that's the case we need to split them up properly
                    if ":" in request.host:
                        request.host, request.port = request.host.split(":", 1)
                    msg = "Redirecting: %s" % scheme + "://"
                    msg += request.host + request.path
                    mssapi.log.debug(msg)
                    connection = self.get_http_connection(request.host, request.port, scheme == "https")
                    response = None
                    continue
            except PleaseRetryException as e:
                mssapi.log.debug("encountered a retry exception: %s" % e)
                connection = self.new_http_connection(request.host, request.port, self.is_secure)
                response = e.response
                ex = e
            except self.http_exceptions as e:
                for unretryable in self.http_unretryable_exceptions:
                    if isinstance(e, unretryable):
                        mssapi.log.debug("encountered unretryable %s exception, re-raising" % e.__class__.__name__)
                        raise
                mssapi.log.debug("encountered %s exception, reconnecting" % e.__class__.__name__)
                connection = self.new_http_connection(request.host, request.port, self.is_secure)
                ex = e
            time.sleep(next_sleep)
            i += 1
        # If we made it here, it's because we have exhausted our retries
        # and stil haven't succeeded.  So, if we have a response object,
        # use it to raise an exception.
        # Otherwise, raise the exception that must have already happened.
        if self.request_hook is not None:
            self.request_hook.handle_request_data(request, response, error=True)
        if response:
            raise MssapiServerError(response.status, response.reason, body)
        elif ex:
            raise ex
        else:
            msg = "Please report this exception as a Mssapi Issue!"
            raise MssapiClientError(msg)
Esempio n. 6
0
    def __init__(
        self,
        host,
        aws_access_key_id=None,
        aws_secret_access_key=None,
        is_secure=True,
        port=None,
        proxy=None,
        proxy_port=None,
        proxy_user=None,
        proxy_pass=None,
        debug=0,
        https_connection_factory=None,
        path="/",
        provider="aws",
        security_token=None,
        suppress_consec_slashes=True,
        validate_certs=True,
        profile_name=None,
    ):
        """
        :type host: str
        :param host: The host to make the connection to

        :keyword str aws_access_key_id: Your AWS Access Key ID (provided by
            Amazon). If none is specified, the value in your
            ``AWS_ACCESS_KEY_ID`` environmental variable is used.
        :keyword str aws_secret_access_key: Your AWS Secret Access Key
            (provided by Amazon). If none is specified, the value in your
            ``AWS_SECRET_ACCESS_KEY`` environmental variable is used.
        :keyword str security_token: The security token associated with
            temporary credentials issued by STS.  Optional unless using
            temporary credentials.  If none is specified, the environment
            variable ``AWS_SECURITY_TOKEN`` is used if defined.

        :type is_secure: boolean
        :param is_secure: Whether the connection is over SSL

        :type https_connection_factory: list or tuple
        :param https_connection_factory: A pair of an HTTP connection
            factory and the exceptions to catch.  The factory should have
            a similar interface to L{http_client.HTTPSConnection}.

        :param str proxy: Address/hostname for a proxy server

        :type proxy_port: int
        :param proxy_port: The port to use when connecting over a proxy

        :type proxy_user: str
        :param proxy_user: The username to connect with on the proxy

        :type proxy_pass: str
        :param proxy_pass: The password to use when connection over a proxy.

        :type port: int
        :param port: The port to use to connect

        :type suppress_consec_slashes: bool
        :param suppress_consec_slashes: If provided, controls whether
            consecutive slashes will be suppressed in key paths.

        :type validate_certs: bool
        :param validate_certs: Controls whether SSL certificates
            will be validated or not.  Defaults to True.

        :type profile_name: str
        :param profile_name: Override usual Credentials section in config
            file to use a named set of keys instead.
        """
        self.suppress_consec_slashes = suppress_consec_slashes
        self.num_retries = 6
        # Override passed-in is_secure setting if value was defined in config.
        if config.has_option("Mssapi", "is_secure"):
            is_secure = config.getboolean("Mssapi", "is_secure")
        self.is_secure = is_secure
        # Whether or not to validate server certificates.
        # The default is now to validate certificates.  This can be
        # overridden in the mssapi config file are by passing an
        # explicit validate_certs parameter to the class constructor.
        self.https_validate_certificates = config.getbool("Mssapi", "https_validate_certificates", validate_certs)
        if self.https_validate_certificates and not HAVE_HTTPS_CONNECTION:
            raise MssapiClientError(
                "SSL server certificate validation is enabled in mssapi "
                "configuration, but Python dependencies required to "
                "support this feature are not available. Certificate "
                "validation is only supported when running under Python "
                "2.6 or later."
            )
        certs_file = config.get_value("Mssapi", "ca_certificates_file", DEFAULT_CA_CERTS_FILE)
        if certs_file == "system":
            certs_file = None
        self.ca_certificates_file = certs_file
        if port:
            self.port = port
        else:
            self.port = PORTS_BY_SECURITY[is_secure]

        self.handle_proxy(proxy, proxy_port, proxy_user, proxy_pass)
        # define exceptions from http_client that we want to catch and retry
        self.http_exceptions = (http_client.HTTPException, socket.error, socket.gaierror, http_client.BadStatusLine)
        # define subclasses of the above that are not retryable.
        self.http_unretryable_exceptions = []
        if HAVE_HTTPS_CONNECTION:
            self.http_unretryable_exceptions.append(https_connection.InvalidCertificateException)

        # define values in socket exceptions we don't want to catch
        self.socket_exception_values = (errno.EINTR,)
        if https_connection_factory is not None:
            self.https_connection_factory = https_connection_factory[0]
            self.http_exceptions += https_connection_factory[1]
        else:
            self.https_connection_factory = None
        if is_secure:
            self.protocol = "https"
        else:
            self.protocol = "http"
        self.host = host
        self.path = path
        # if the value passed in for debug
        if not isinstance(debug, six.integer_types):
            debug = 0
        self.debug = config.getint("Mssapi", "debug", debug)
        self.host_header = None

        # Timeout used to tell http_client how long to wait for socket timeouts.
        # Default is to leave timeout unchanged, which will in turn result in
        # the socket's default global timeout being used. To specify a
        # timeout, set http_socket_timeout in Mssapi config. Regardless,
        # timeouts will only be applied if Python is 2.6 or greater.
        self.http_connection_kwargs = {}
        if (sys.version_info[0], sys.version_info[1]) >= (2, 6):
            # If timeout isn't defined in mssapi config file, use 70 second
            # default as recommended by
            self.http_connection_kwargs["timeout"] = config.getint("Mssapi", "http_socket_timeout", 70)

        if isinstance(provider, Provider):
            # Allow overriding Provider
            self.provider = provider
        else:
            self._provider_type = provider
            self.provider = Provider(
                self._provider_type, aws_access_key_id, aws_secret_access_key, security_token, profile_name
            )

        # Allow config file to override default host, port, and host header.
        if self.provider.host:
            self.host = self.provider.host
        if self.provider.port:
            self.port = self.provider.port
        if self.provider.host_header:
            self.host_header = self.provider.host_header

        self._pool = ConnectionPool()
        self._connection = (self.host, self.port, self.is_secure)
        self._last_rs = None
        self._auth_handler = auth.get_auth_handler(host, config, self.provider, self._required_auth_capability())
        if getattr(self, "AuthServiceName", None) is not None:
            self.auth_service_name = self.AuthServiceName
        self.request_hook = None
    def send_file(self, key, fp, headers, cb=None, num_cb=10, hash_algs=None):
        """
        Upload a file to a key into a bucket on GS, using GS resumable upload
        protocol.

        :type key: :class:`mssapi.s3.key.Key` or subclass
        :param key: The Key object to which data is to be uploaded

        :type fp: file-like object
        :param fp: The file pointer to upload

        :type headers: dict
        :param headers: The headers to pass along with the PUT request

        :type cb: function
        :param cb: a callback function that will be called to report progress on
            the upload.  The callback should accept two integer parameters, the
            first representing the number of bytes that have been successfully
            transmitted to GS, and the second representing the total number of
            bytes that need to be transmitted.

        :type num_cb: int
        :param num_cb: (optional) If a callback is specified with the cb
            parameter, this parameter determines the granularity of the callback
            by defining the maximum number of times the callback will be called
            during the file transfer. Providing a negative integer will cause
            your callback to be called with each buffer read.

        :type hash_algs: dictionary
        :param hash_algs: (optional) Dictionary mapping hash algorithm
            descriptions to corresponding state-ful hashing objects that
            implement update(), digest(), and copy() (e.g. hashlib.md5()).
            Defaults to {'md5': md5()}.

        Raises ResumableUploadException if a problem occurs during the transfer.
        """

        if not headers:
            headers = {}
        # If Content-Type header is present and set to None, remove it.
        # This is gsutil's way of asking mssapi to refrain from auto-generating
        # that header.
        CT = 'Content-Type'
        if CT in headers and headers[CT] is None:
            del headers[CT]

        headers['User-Agent'] = UserAgent

        # Determine file size different ways for case where fp is actually a
        # wrapper around a Key vs an actual file.
        if isinstance(fp, KeyFile):
            file_length = fp.getkey().size
        else:
            fp.seek(0, os.SEEK_END)
            file_length = fp.tell()
            fp.seek(0)
        debug = key.bucket.connection.debug

        # Compute the MD5 checksum on the fly.
        if hash_algs is None:
            hash_algs = {'md5': md5}
        self.digesters = dict(
            (alg, hash_algs[alg]()) for alg in hash_algs or {})

        # Use num-retries from constructor if one was provided; else check
        # for a value specified in the mssapi config file; else default to 5.
        if self.num_retries is None:
            self.num_retries = config.getint('Mssapi', 'num_retries', 6)
        self.progress_less_iterations = 0

        while True:  # Retry as long as we're making progress.
            server_had_bytes_before_attempt = self.server_has_bytes
            self.digesters_before_attempt = dict(
                (alg, self.digesters[alg].copy())
                for alg in self.digesters)
            try:
                # Save generation and metageneration in class state so caller
                # can find these values, for use in preconditions of future
                # operations on the uploaded object.
                (etag, self.generation, self.metageneration) = (
                    self._attempt_resumable_upload(key, fp, file_length,
                                                   headers, cb, num_cb))

                # Get the final digests for the uploaded content.
                for alg in self.digesters:
                    key.local_hashes[alg] = self.digesters[alg].digest()

                # Upload succceded, so remove the tracker file (if have one).
                self._remove_tracker_file()
                self._check_final_md5(key, etag)
                key.generation = self.generation
                if debug >= 1:
                    print('Resumable upload complete.')
                return
            except self.RETRYABLE_EXCEPTIONS as e:
                if debug >= 1:
                    print('Caught exception (%s)' % e.__repr__())
                if isinstance(e, IOError) and e.errno == errno.EPIPE:
                    # Broken pipe error causes httplib to immediately
                    # close the socket (http://bugs.python.org/issue5542),
                    # so we need to close the connection before we resume
                    # the upload (which will cause a new connection to be
                    # opened the next time an HTTP request is sent).
                    key.bucket.connection.connection.close()
            except ResumableUploadException as e:
                self.handle_resumable_upload_exception(e, debug)

            self.track_progress_less_iterations(server_had_bytes_before_attempt,
                                                True, debug)
    def send_file(self, key, fp, headers, cb=None, num_cb=10, hash_algs=None):
        """
        Upload a file to a key into a bucket on GS, using GS resumable upload
        protocol.

        :type key: :class:`mssapi.s3.key.Key` or subclass
        :param key: The Key object to which data is to be uploaded

        :type fp: file-like object
        :param fp: The file pointer to upload

        :type headers: dict
        :param headers: The headers to pass along with the PUT request

        :type cb: function
        :param cb: a callback function that will be called to report progress on
            the upload.  The callback should accept two integer parameters, the
            first representing the number of bytes that have been successfully
            transmitted to GS, and the second representing the total number of
            bytes that need to be transmitted.

        :type num_cb: int
        :param num_cb: (optional) If a callback is specified with the cb
            parameter, this parameter determines the granularity of the callback
            by defining the maximum number of times the callback will be called
            during the file transfer. Providing a negative integer will cause
            your callback to be called with each buffer read.

        :type hash_algs: dictionary
        :param hash_algs: (optional) Dictionary mapping hash algorithm
            descriptions to corresponding state-ful hashing objects that
            implement update(), digest(), and copy() (e.g. hashlib.md5()).
            Defaults to {'md5': md5()}.

        Raises ResumableUploadException if a problem occurs during the transfer.
        """

        if not headers:
            headers = {}
        # If Content-Type header is present and set to None, remove it.
        # This is gsutil's way of asking mssapi to refrain from auto-generating
        # that header.
        CT = "Content-Type"
        if CT in headers and headers[CT] is None:
            del headers[CT]

        headers["User-Agent"] = UserAgent

        # Determine file size different ways for case where fp is actually a
        # wrapper around a Key vs an actual file.
        if isinstance(fp, KeyFile):
            file_length = fp.getkey().size
        else:
            fp.seek(0, os.SEEK_END)
            file_length = fp.tell()
            fp.seek(0)
        debug = key.bucket.connection.debug

        # Compute the MD5 checksum on the fly.
        if hash_algs is None:
            hash_algs = {"md5": md5}
        self.digesters = dict((alg, hash_algs[alg]()) for alg in hash_algs or {})

        # Use num-retries from constructor if one was provided; else check
        # for a value specified in the mssapi config file; else default to 5.
        if self.num_retries is None:
            self.num_retries = config.getint("Mssapi", "num_retries", 6)
        self.progress_less_iterations = 0

        while True:  # Retry as long as we're making progress.
            server_had_bytes_before_attempt = self.server_has_bytes
            self.digesters_before_attempt = dict((alg, self.digesters[alg].copy()) for alg in self.digesters)
            try:
                # Save generation and metageneration in class state so caller
                # can find these values, for use in preconditions of future
                # operations on the uploaded object.
                (etag, self.generation, self.metageneration) = self._attempt_resumable_upload(
                    key, fp, file_length, headers, cb, num_cb
                )

                # Get the final digests for the uploaded content.
                for alg in self.digesters:
                    key.local_hashes[alg] = self.digesters[alg].digest()

                # Upload succceded, so remove the tracker file (if have one).
                self._remove_tracker_file()
                self._check_final_md5(key, etag)
                key.generation = self.generation
                if debug >= 1:
                    print("Resumable upload complete.")
                return
            except self.RETRYABLE_EXCEPTIONS as e:
                if debug >= 1:
                    print("Caught exception (%s)" % e.__repr__())
                if isinstance(e, IOError) and e.errno == errno.EPIPE:
                    # Broken pipe error causes httplib to immediately
                    # close the socket (http://bugs.python.org/issue5542),
                    # so we need to close the connection before we resume
                    # the upload (which will cause a new connection to be
                    # opened the next time an HTTP request is sent).
                    key.bucket.connection.connection.close()
            except ResumableUploadException as e:
                self.handle_resumable_upload_exception(e, debug)

            self.track_progress_less_iterations(server_had_bytes_before_attempt, True, debug)
    def get_file(self, key, fp, headers, cb=None, num_cb=10, torrent=False,
                 version_id=None, hash_algs=None):
        """
        Retrieves a file from a Key
        :type key: :class:`mssapi.s3.key.Key` or subclass
        :param key: The Key object from which upload is to be downloaded
        
        :type fp: file
        :param fp: File pointer into which data should be downloaded
        
        :type headers: string
        :param: headers to send when retrieving the files
        
        :type cb: function
        :param cb: (optional) a callback function that will be called to report
             progress on the download.  The callback should accept two integer
             parameters, the first representing the number of bytes that have
             been successfully transmitted from the storage service and
             the second representing the total number of bytes that need
             to be transmitted.
        
        :type num_cb: int
        :param num_cb: (optional) If a callback is specified with the cb
             parameter this parameter determines the granularity of the callback
             by defining the maximum number of times the callback will be
             called during the file transfer.
             
        :type torrent: bool
        :param torrent: Flag for whether to get a torrent for the file

        :type version_id: string
        :param version_id: The version ID (optional)

        :type hash_algs: dictionary
        :param hash_algs: (optional) Dictionary of hash algorithms and
            corresponding hashing class that implements update() and digest().
            Defaults to {'md5': hashlib/md5.md5}.

        Raises ResumableDownloadException if a problem occurs during
            the transfer.
        """

        debug = key.bucket.connection.debug
        if not headers:
            headers = {}

        # Use num-retries from constructor if one was provided; else check
        # for a value specified in the mssapi config file; else default to 6.
        if self.num_retries is None:
            self.num_retries = config.getint('Mssapi', 'num_retries', 6)
        progress_less_iterations = 0

        while True:  # Retry as long as we're making progress.
            had_file_bytes_before_attempt = get_cur_file_size(fp)
            try:
                self._attempt_resumable_download(key, fp, headers, cb, num_cb,
                                                 torrent, version_id, hash_algs)
                # Download succceded, so remove the tracker file (if have one).
                self._remove_tracker_file()
                # Previously, check_final_md5() was called here to validate 
                # downloaded file's checksum, however, to be consistent with
                # non-resumable downloads, this call was removed. Checksum
                # validation of file contents should be done by the caller.
                if debug >= 1:
                    print('Resumable download complete.')
                return
            except self.RETRYABLE_EXCEPTIONS as e:
                if debug >= 1:
                    print('Caught exception (%s)' % e.__repr__())
                if isinstance(e, IOError) and e.errno == errno.EPIPE:
                    # Broken pipe error causes httplib to immediately
                    # close the socket (http://bugs.python.org/issue5542),
                    # so we need to close and reopen the key before resuming
                    # the download.
                    if isinstance(key, GSKey):
                      key.get_file(fp, headers, cb, num_cb, torrent, version_id,
                                   override_num_retries=0, hash_algs=hash_algs)
                    else:
                      key.get_file(fp, headers, cb, num_cb, torrent, version_id,
                                   override_num_retries=0)
            except ResumableDownloadException as e:
                if (e.disposition ==
                    ResumableTransferDisposition.ABORT_CUR_PROCESS):
                    if debug >= 1:
                        print('Caught non-retryable ResumableDownloadException '
                              '(%s)' % e.message)
                    raise
                elif (e.disposition ==
                    ResumableTransferDisposition.ABORT):
                    if debug >= 1:
                        print('Caught non-retryable ResumableDownloadException '
                              '(%s); aborting and removing tracker file' %
                              e.message)
                    self._remove_tracker_file()
                    raise
                else:
                    if debug >= 1:
                        print('Caught ResumableDownloadException (%s) - will '
                              'retry' % e.message)

            # At this point we had a re-tryable failure; see if made progress.
            if get_cur_file_size(fp) > had_file_bytes_before_attempt:
                progress_less_iterations = 0
            else:
                progress_less_iterations += 1

            if progress_less_iterations > self.num_retries:
                # Don't retry any longer in the current process.
                raise ResumableDownloadException(
                    'Too many resumable download attempts failed without '
                    'progress. You might try this download again later',
                    ResumableTransferDisposition.ABORT_CUR_PROCESS)

            # Close the key, in case a previous download died partway
            # through and left data in the underlying key HTTP buffer.
            # Do this within a try/except block in case the connection is
            # closed (since key.close() attempts to do a final read, in which
            # case this read attempt would get an IncompleteRead exception,
            # which we can safely ignore.
            try:
                key.close()
            except httplib.IncompleteRead:
                pass

            sleep_time_secs = 2**progress_less_iterations
            if debug >= 1:
                print('Got retryable failure (%d progress-less in a row).\n'
                      'Sleeping %d seconds before re-trying' %
                      (progress_less_iterations, sleep_time_secs))
            time.sleep(sleep_time_secs)
    def get_file(self,
                 key,
                 fp,
                 headers,
                 cb=None,
                 num_cb=10,
                 torrent=False,
                 version_id=None,
                 hash_algs=None):
        """
        Retrieves a file from a Key
        :type key: :class:`mssapi.s3.key.Key` or subclass
        :param key: The Key object from which upload is to be downloaded
        
        :type fp: file
        :param fp: File pointer into which data should be downloaded
        
        :type headers: string
        :param: headers to send when retrieving the files
        
        :type cb: function
        :param cb: (optional) a callback function that will be called to report
             progress on the download.  The callback should accept two integer
             parameters, the first representing the number of bytes that have
             been successfully transmitted from the storage service and
             the second representing the total number of bytes that need
             to be transmitted.
        
        :type num_cb: int
        :param num_cb: (optional) If a callback is specified with the cb
             parameter this parameter determines the granularity of the callback
             by defining the maximum number of times the callback will be
             called during the file transfer.
             
        :type torrent: bool
        :param torrent: Flag for whether to get a torrent for the file

        :type version_id: string
        :param version_id: The version ID (optional)

        :type hash_algs: dictionary
        :param hash_algs: (optional) Dictionary of hash algorithms and
            corresponding hashing class that implements update() and digest().
            Defaults to {'md5': hashlib/md5.md5}.

        Raises ResumableDownloadException if a problem occurs during
            the transfer.
        """

        debug = key.bucket.connection.debug
        if not headers:
            headers = {}

        # Use num-retries from constructor if one was provided; else check
        # for a value specified in the mssapi config file; else default to 6.
        if self.num_retries is None:
            self.num_retries = config.getint('Mssapi', 'num_retries', 6)
        progress_less_iterations = 0

        while True:  # Retry as long as we're making progress.
            had_file_bytes_before_attempt = get_cur_file_size(fp)
            try:
                self._attempt_resumable_download(key, fp, headers, cb, num_cb,
                                                 torrent, version_id,
                                                 hash_algs)
                # Download succceded, so remove the tracker file (if have one).
                self._remove_tracker_file()
                # Previously, check_final_md5() was called here to validate
                # downloaded file's checksum, however, to be consistent with
                # non-resumable downloads, this call was removed. Checksum
                # validation of file contents should be done by the caller.
                if debug >= 1:
                    print('Resumable download complete.')
                return
            except self.RETRYABLE_EXCEPTIONS as e:
                if debug >= 1:
                    print('Caught exception (%s)' % e.__repr__())
                if isinstance(e, IOError) and e.errno == errno.EPIPE:
                    # Broken pipe error causes httplib to immediately
                    # close the socket (http://bugs.python.org/issue5542),
                    # so we need to close and reopen the key before resuming
                    # the download.
                    if isinstance(key, GSKey):
                        key.get_file(fp,
                                     headers,
                                     cb,
                                     num_cb,
                                     torrent,
                                     version_id,
                                     override_num_retries=0,
                                     hash_algs=hash_algs)
                    else:
                        key.get_file(fp,
                                     headers,
                                     cb,
                                     num_cb,
                                     torrent,
                                     version_id,
                                     override_num_retries=0)
            except ResumableDownloadException as e:
                if (e.disposition ==
                        ResumableTransferDisposition.ABORT_CUR_PROCESS):
                    if debug >= 1:
                        print(
                            'Caught non-retryable ResumableDownloadException '
                            '(%s)' % e.message)
                    raise
                elif (e.disposition == ResumableTransferDisposition.ABORT):
                    if debug >= 1:
                        print(
                            'Caught non-retryable ResumableDownloadException '
                            '(%s); aborting and removing tracker file' %
                            e.message)
                    self._remove_tracker_file()
                    raise
                else:
                    if debug >= 1:
                        print('Caught ResumableDownloadException (%s) - will '
                              'retry' % e.message)

            # At this point we had a re-tryable failure; see if made progress.
            if get_cur_file_size(fp) > had_file_bytes_before_attempt:
                progress_less_iterations = 0
            else:
                progress_less_iterations += 1

            if progress_less_iterations > self.num_retries:
                # Don't retry any longer in the current process.
                raise ResumableDownloadException(
                    'Too many resumable download attempts failed without '
                    'progress. You might try this download again later',
                    ResumableTransferDisposition.ABORT_CUR_PROCESS)

            # Close the key, in case a previous download died partway
            # through and left data in the underlying key HTTP buffer.
            # Do this within a try/except block in case the connection is
            # closed (since key.close() attempts to do a final read, in which
            # case this read attempt would get an IncompleteRead exception,
            # which we can safely ignore.
            try:
                key.close()
            except httplib.IncompleteRead:
                pass

            sleep_time_secs = 2**progress_less_iterations
            if debug >= 1:
                print('Got retryable failure (%d progress-less in a row).\n'
                      'Sleeping %d seconds before re-trying' %
                      (progress_less_iterations, sleep_time_secs))
            time.sleep(sleep_time_secs)