示例#1
0
 def __init__(self, email, password):
     self.client = Client(
         email,
         password,
     )
     # set config variables
     self.combine_tmpdir_name_with_token = False
示例#2
0
    def __init__(
            self, email, password,
            max_retries_contract_negotiation=10,
            max_retries_upload_same_farmer=3):

        self.client = Client(email, password)
        self.shards_already_uploaded = 0
        self.max_retries_contract_negotiation = max_retries_contract_negotiation
        self.max_retries_upload_same_farmer = max_retries_upload_same_farmer
示例#3
0
class Uploader:
    """

    Attributes:
        client (:py:class:`storj.http.Client`): the Storj HTTP client.
        shared_already_uploaded (int): number of shards already uploaded.
        max_retries_contract_negotiation (int): maximum number of contract negotiation retries (default=10).
        max_retries_upload_same_farmer (int): maximum number of uploads retries to the same farmer (default=3).
    """

    __logger = logging.getLogger('%s.Uploader' % __name__)

    def __init__(self,
                 email,
                 password,
                 max_retries_contract_negotiation=10,
                 max_retries_upload_same_farmer=3):

        self.client = Client(email, password)
        self.shards_already_uploaded = 0
        self.max_retries_contract_negotiation = max_retries_contract_negotiation
        self.max_retries_upload_same_farmer = max_retries_upload_same_farmer

    def _calculate_hmac(self, base_string, key):
        """HMAC hash calculation and returning
        the results in dictionary collection.

        Args:
            base_string (): .
            key (): .
        """
        hmacs = dict()
        # --- MD5 ---
        hashed = hmac.new(key, base_string, hashlib.md5)
        hmac_md5 = hashed.digest().encode('base64').rstrip('\n')
        hmacs['MD5'] = hmac_md5
        # --- SHA-1 ---
        hashed = hmac.new(key, base_string, hashlib.sha1)
        hmac_sha1 = hashed.digest().encode('base64').rstrip('\n')
        hmacs['SHA-1'] = hmac_sha1
        # --- SHA-224 ---
        hashed = hmac.new(key, base_string, hashlib.sha224)
        hmac_sha224 = hashed.digest().encode('base64').rstrip('\n')
        hmacs['SHA-224'] = hmac_sha224
        # --- SHA-256 ---
        hashed = hmac.new(key, base_string, hashlib.sha256)
        hmac_sha256 = hashed.digest().encode('base64').rstrip('\n')
        hmacs['SHA-256'] = hmac_sha256
        # --- SHA-384 ---
        hashed = hmac.new(key, base_string, hashlib.sha384)
        hmac_sha384 = hashed.digest().encode('base64').rstrip('\n')
        hmacs['SHA-384'] = hmac_sha384
        # --- SHA-512 ---
        hashed = hmac.new(key, base_string, hashlib.sha512)
        hmac_sha512 = hashed.digest().encode('base64').rstrip('\n')
        hmacs['SHA-512'] = hmac_sha512
        return hmacs

    def _prepare_bucket_entry_hmac(self, shard_array):
        """

        Args:
            shard_array (): .
        """
        storj_keyring = model.Keyring()
        encryption_key = storj_keyring.get_encryption_key('test')
        current_hmac = ''

        for shard in shard_array:
            base64_decoded = '%s%s' % (base64.decodestring(
                shard.hash), current_hmac)
            current_hmac = self._calculate_hmac(base64_decoded, encryption_key)

        self.__logger.debug('current_hmac=%s' % current_hmac)

        return current_hmac

    def upload_shard(self, shard, chapters, frame,
                     file_name_ready_to_shard_upload, tmp_path):
        """

        Args:
            shard:
            chapters:
            frame:
            file_name_ready_to_shard_upload:
            tmp_path:
        """
        contract_negotiation_tries = 0
        exchange_report = model.ExchangeReport()

        while self.max_retries_contract_negotiation > \
                contract_negotiation_tries:
            contract_negotiation_tries += 1
            self.__logger.debug('Negotiating contract')
            self.__logger.debug('Trying to negotiate storage contract for \
shard at index %s...' % chapters)

            try:
                frame_content = self.client.frame_add_shard(shard, frame.id)

                farmerNodeID = frame_content['farmer']['nodeID']

                url = 'http://%s:%d/shards/%s?token=%s' % (
                    frame_content['farmer']['address'],
                    frame_content['farmer']['port'], frame_content['hash'],
                    frame_content['token'])
                self.__logger.debug('upload_shard url=%s', url)

                # begin recording exchange report
                # exchange_report = model.ExchangeReport()

                current_timestamp = int(time.time())

                exchange_report.exchangeStart = str(current_timestamp)
                exchange_report.farmerId = str(farmerNodeID)
                exchange_report.dataHash = str(shard.hash)

                farmer_tries = 0
                response = None

                while self.max_retries_upload_same_farmer > farmer_tries:
                    farmer_tries += 1

                    try:
                        self.__logger.debug(
                            'Upload shard at index %s to %s:%d attempt #%d',
                            shard.index, frame_content['farmer']['address'],
                            frame_content['farmer']['port'], farmer_tries)

                        mypath = os.path.join(
                            tmp_path, '%s-%s' %
                            (file_name_ready_to_shard_upload, chapters + 1))

                        with open(mypath, 'rb') as f:
                            response = requests.post(url,
                                                     data=self._read_in_chunks(
                                                         f,
                                                         shard_index=chapters),
                                                     timeout=1)

                        j = json.loads(str(response.content))

                        if j.get('result') == \
                                'The supplied token is not accepted':
                            raise SuppliedTokenNotAcceptedError()

                    except FarmerError as e:
                        self.__logger.error(e)
                        continue

                    except Exception as e:
                        self.__logger.error(e)
                        self.__logger.error('Shard upload error for to %s:%d',
                                            frame_content['farmer']['address'],
                                            frame_content['farmer']['port'])
                        continue

                    self.shards_already_uploaded += 1
                    self.__logger.info('Shard uploaded successfully to %s:%d',
                                       frame_content['farmer']['address'],
                                       frame_content['farmer']['port'])

                    self.__logger.debug('%s shards, %s sent',
                                        self.all_shards_count,
                                        self.shards_already_uploaded)

                    if int(self.all_shards_count) <= \
                            int(self.shards_already_uploaded):
                        self.__logger.debug('finish upload')

                    break

                self.__logger.debug('response.content=%s', response.content)

                j = json.loads(str(response.content))
                if j.get('result') == 'The supplied token is not accepted':
                    raise SuppliedTokenNotAcceptedError()

            except BridgeError as e:
                self.__logger.error(e)

                # upload failed due to Storj Bridge failure
                self.__logger.debug('Exception raised while trying to \
negotiate contract: ')
                continue

            except Exception as e:
                # now send Exchange Report
                # upload failed probably while sending data to farmer
                self.__logger.error(e)
                self.__logger.error('Error occured while trying to upload \
shard or negotiate contract. Retrying... ')
                self.__logger.error(
                    'Unhandled exception occured while trying \
to upload shard or negotiate contract for shard at index %s . Retrying...',
                    chapters)

                current_timestamp = int(time.time())
                exchange_report.exchangeEnd = str(current_timestamp)
                exchange_report.exchangeResultCode = (exchange_report.FAILURE)
                exchange_report.exchangeResultMessage = \
                    (exchange_report.STORJ_REPORT_UPLOAD_ERROR)
                # Send exchange report
                # self.client.send_exchange_report(exchange_report)
                continue

            # uploaded with success
            current_timestamp = int(time.time())
            # prepare second half of exchange heport
            exchange_report.exchangeEnd = str(current_timestamp)
            exchange_report.exchangeResultCode = exchange_report.SUCCESS
            exchange_report.exchangeResultMessage = \
                exchange_report.STORJ_REPORT_SHARD_UPLOADED

            self.__logger.info(
                'Shard %s successfully added and exchange \
report sent.', chapters + 1)
            # Send exchange report
            # self.client.send_exchange_report(exchange_report)
            break

    def _read_in_chunks(self,
                        file_object,
                        blocksize=4096,
                        chunks=-1,
                        shard_index=None):
        """Lazy function (generator) to read a file piece by piece.

        Default chunk size: 1k.

        Args:
            file_object (): .
            blocksize (): .
            chunks (): .
        """

        i = 0

        while chunks:
            data = file_object.read(blocksize)
            if not data:
                break
            yield data
            i += 1

            chunks -= 1

    def file_upload(self, bucket_id, file_path, tmp_file_path):
        """"""

        self.__logger.debug('Upload %s in bucket %d', file_path, bucket_id)
        self.__logger.debug('Temp folder %s', tmp_file_path)

        encryption_enabled = True

        bname = os.path.split(file_path)[1]  # File name

        file_mime_type = 'text/plain'

        # Encrypt file
        self.__logger.debug('Encrypting file...')

        file_crypto_tools = FileCrypto()

        # File name of encrypted file
        file_name_ready_to_shard_upload = '%s.encrypted' % bname
        # Path where to save the encrypted file in temp dir
        file_path_ready = os.path.join(tmp_file_path,
                                       file_name_ready_to_shard_upload)
        self.__logger.debug('file_path_ready: %s', file_path_ready)

        # Begin file encryption
        file_crypto_tools.encrypt_file('AES', file_path, file_path_ready,
                                       self.client.password)

        self.fileisdecrypted_str = ''

        file_size = os.stat(file_path).st_size
        self.__logger.debug('File encrypted')

        # Get the PUSH token from Storj Bridge
        self.__logger.debug('Get PUSH Token')

        push_token = None
        try:
            push_token = self.client.token_create(bucket_id, 'PUSH')
        except BridgeError as e:
            self.__logger.error(e)
            self.__logger.debug('PUSH token create exception')

        self.__logger.debug('PUSH Token ID %s', push_token.id)

        # Get a frame
        self.__logger.debug('Frame')
        frame = None

        try:
            frame = self.client.frame_create()
        except BridgeError as e:
            self.__logger.error(e)
            self.__logger.debug('Unhandled exception while creating file \
staging frame')

        self.__logger.debug('frame.id = %s', frame.id)

        # Now generate shards
        self.__logger.debug('Sharding started...')
        shards_manager = model.ShardManager(filepath=file_path_ready,
                                            tmp_path=tmp_file_path)
        self.all_shards_count = len(shards_manager.shards)

        self.__logger.debug('Sharding ended...')

        self.__logger.debug('There are %d shards', self.all_shards_count)

        # create file hash

        mp = Pool()
        mp.map(foo, [(self, shards_manager.shards[x], x, frame,
                      file_name_ready_to_shard_upload, tmp_file_path)
                     for x in range(len(shards_manager.shards))])

        # finish_upload
        self.__logger.debug('Generating HMAC...')

        hash_sha512_hmac_b64 = self._prepare_bucket_entry_hmac(
            shards_manager.shards)
        hash_sha512_hmac = hashlib.sha224(str(
            hash_sha512_hmac_b64['SHA-512'])).hexdigest()

        self.__logger.debug('Now upload file')
        data = {
            'x-token': push_token.id,
            'x-filesize': str(file_size),
            'frame': frame.id,
            'mimetype': file_mime_type,
            'filename': str(bname) + str(self.fileisdecrypted_str),
            'hmac': {
                'type': 'sha512',
                'value': hash_sha512_hmac
            },
        }

        self.__logger.debug('Finishing upload')
        self.__logger.debug('Adding file %s to bucket...', bname)

        success = False
        try:
            # Post an upload_file request
            response = self.client._request(
                method='POST',
                path='/buckets/%s/files' % bucket_id,
                headers={
                    'x-token': push_token.id,
                    'x-filesize': str(file_size),
                },
                json=data,
            )
            success = True

        except BridgeError as e:
            self.__logger.error(e)
            self.__logger.debug('Unhandled bridge exception')

        if success:
            self.__logger.debug('File uploaded successfully!')

        # Remove temp files
        try:
            # Remove shards
            file_shards = map(lambda i: '%s-%s' % (file_path_ready, i),
                              range(1, self.all_shards_count + 1))
            self.__logger.debug('Remove shards %s' % file_shards)
            map(os.remove, file_shards)
            # Remove encrypted file
            self.__logger.debug('Remove encrypted file %s' % file_path_ready)
            os.remove(file_path_ready)
        except OSError as e:
            self.__logger.error(e)
示例#4
0
 def __init__(self, email, password, timeout=None):
     self.client = Client(email, password, timeout=timeout)
     self.max_spooled = 10 * 1024 * 1024  # keep files up to 10MiB in memory
示例#5
0
class Downloader:

    __logger = logging.getLogger('%s.ClassName' % __name__)

    def __init__(self, email, password, timeout=None):
        self.client = Client(email, password, timeout=timeout)
        self.max_spooled = 10 * 1024 * 1024  # keep files up to 10MiB in memory

    def _calculate_timeout(self, shard_size, mbps=0.5):
        """
        Calculate the timeout with respect to the minimum bandwidth accepted
        by the user (default: 5 Mbps).

        Args:
            shard_size: shard size in Byte
            mbps: upload throughtput. Default 500 kbps
        """
        if not self.client.timeout:
            self.client.timeout = int(shard_size * 8.0 / (1024**2 * mbps))
        self.__logger.info('Set timeout to %s seconds' % self.client.timeout)

    def get_file_pointers_count(self, bucket_id, file_id):
        frame_data = self.client.frame_get(self.file_frame.id)
        return len(frame_data.shards)

    def set_file_metadata(self, bucket_id, file_id):
        try:
            file_metadata = self.client.file_metadata(bucket_id, file_id)
            # Get file name
            self.filename_from_bridge = str(file_metadata.filename)
            self.__logger.debug('Filename from bridge: %s',
                                self.filename_from_bridge)
            # Get file frame
            self.file_frame = file_metadata.frame

        except StorjBridgeApiError as e:
            self.__logger.error(e)
            self.__logger.error('Error while resolving file metadata')

        except Exception as e:
            self.__logger.error(e)
            self.__logger.error(
                'Unhandled error while resolving file metadata')

    def download_begin(self, bucket_id, file_id):
        # Initialize environment
        self.set_file_metadata(bucket_id, file_id)
        # Get the number of shards
        self.all_shards_count = self.get_file_pointers_count(
            bucket_id, file_id)
        # Set the paths
        self.destination_file_path = os.path.expanduser('~')
        self.__logger.debug('destination path %s', self.destination_file_path)

        mp = ThreadPool()
        shards = None

        try:
            self.__logger.debug(
                'Resolving file pointers to download file with ID: %s ...',
                file_id)

            tries_get_file_pointers = 0

            while MAX_RETRIES_GET_FILE_POINTERS > tries_get_file_pointers:

                self.__logger.debug(
                    'Attempt number %s of getting a pointer to the file',
                    tries_get_file_pointers)
                tries_get_file_pointers += 1

                try:
                    # Get all the pointers to the shards
                    shard_pointers = self.client.file_pointers(
                        bucket_id,
                        file_id,
                        limit=str(self.all_shards_count),
                        skip='0')

                    self.__logger.debug('There are %s shard pointers: ',
                                        len(shard_pointers))

                    # Calculate timeout
                    self._calculate_timeout(shard_pointers[0]['size'], mbps=1)

                    # Upload shards thread pool
                    self.__logger.debug('Begin shards download process')
                    shards = mp.map(
                        lambda x: self.shard_download(x[1], x[0], bucket_id,
                                                      file_id),
                        enumerate(shard_pointers))

                except StorjBridgeApiError as e:
                    self.__logger.error(e)
                    self.__logger.error('Bridge error')
                    self.__logger.error(
                        'Error while resolving file pointers \
to download file with ID: %s ...', file_id)
                    self.__logger.error(e)
                    continue
                else:
                    break

        except StorjBridgeApiError as e:
            self.__logger.error(e)
            self.__logger.error('Outern Bridge error')
            self.__logger.error('Error while resolving file pointers to \
download file with ID: %s' % str(file_id))

        # All the shards have been downloaded
        self.__logger.debug(shards)
        if shards is not None:
            self.finish_download(shards)

    def finish_download(self, shards):
        self.__logger.debug('Finish download')
        fileisencrypted = '[DECRYPTED]' not in self.filename_from_bridge

        destination_path = os.path.join(self.destination_file_path,
                                        self.filename_from_bridge)
        self.__logger.debug('Destination path %s', destination_path)

        # Join shards
        sharding_tools = ShardingTools()
        self.__logger.debug('Joining shards...')

        try:
            if not fileisencrypted:
                with open(destination_path, 'wb') as destination_fp:
                    sharding_tools.join_shards(shards, destination_fp)

            else:
                with SpooledTemporaryFile(self.max_spooled, 'r+') as encrypted:
                    sharding_tools.join_shards(shards, encrypted)

                    # move file read pointer at beginning
                    encrypted.seek(0)

                    # decrypt file
                    self.__logger.debug('Decrypting file...')
                    file_crypto_tools = FileCrypto()

                    # Begin file decryption
                    with open(destination_path, 'wb') as destination_fp:
                        file_crypto_tools.decrypt_file_aes(
                            encrypted, destination_fp,
                            str(self.client.password))

            self.__logger.debug('Finish decryption')
            self.__logger.info('Download completed successfully!')

        except (OSError, IOError, EOFError) as e:
            self.__logger.error(e)

        finally:
            # delete temporary shards
            for shard in shards:
                shard.close()

        return True

    def retrieve_shard_file(self, url, shard_index):
        farmer_tries = 0

        self.__logger.debug('Downloading shard at index %s from farmer: %s',
                            shard_index, url)

        tries_download_from_same_farmer = 0
        while MAX_RETRIES_DOWNLOAD_FROM_SAME_FARMER > \
                tries_download_from_same_farmer:

            tries_download_from_same_farmer += 1
            farmer_tries += 1

            try:
                # data is spooled in memory until the file size exceeds max_size
                shard = SpooledTemporaryFile(self.max_spooled, 'wb')

                # Request the shard
                r = requests.get(url, stream=True, timeout=self.client.timeout)
                if r.status_code != 200 and r.status_code != 304:
                    raise StorjFarmerError()

                # Write the file
                for chunk in r.iter_content(chunk_size=1024):
                    if chunk:  # filter out keep-alive new chunks
                        shard.write(chunk)

                # Everything ok
                # move file read pointer at beginning
                shard.seek(0)
                return shard

            except StorjFarmerError as e:
                self.__logger.error(e)
                # Update shard download state
                self.__logger.error('First try failed. Retrying... (%s)' %
                                    str(farmer_tries))

            except requests.exceptions.Timeout as ret:
                self.__logger.error('Request number %s for shard %s timed out.\
Took too much.' % (farmer_tries, shard_index))
                self.__logger.error(ret)

            except Exception as e:
                self.__logger.error(e)
                self.__logger.error('Unhandled error')
                self.__logger.error('Error occured while downloading shard at '
                                    'index %s. Retrying... (%s)' %
                                    (shard_index, farmer_tries))

        self.__logger.error('Shard download at index %s failed' % shard_index)
        raise ClientError()

    def shard_download(self, pointer, shard_index, bucket_id, file_id):
        self.__logger.debug('Beginning download proccess...')

        try:
            self.__logger.debug('Starting download threads...')
            self.__logger.debug('Downloading shard at index %s ...',
                                shard_index)

            url = 'http://{address}:{port}/shards/{hash}?token={token}'.format(
                address=pointer.get('farmer')['address'],
                port=str(pointer.get('farmer')['port']),
                hash=pointer['hash'],
                token=pointer['token'])
            self.__logger.debug(url)

            tp = ThreadPool(processes=1)
            async_result = tp.apply_async(
                self.retrieve_shard_file,
                (url, shard_index))  # tuple of args for foo
            shard = async_result.get(
                self.client.timeout)  # get the return value

            # shard = self.retrieve_shard_file(url, shard_index)
            self.__logger.debug('Shard downloaded')
            self.__logger.debug('Shard at index %s downloaded successfully',
                                shard_index)
            return shard

        except IOError as e:
            self.__logger.error('Perm error %s', e)
            if str(e) == str(13):
                self.__logger.error("""Error while saving or reading file or
                temporary file.
                Probably this is caused by insufficient permisions.
                Please check if you have permissions to write or
                read from selected directories.""")

        except TimeoutError:
            self.__logger.warning('Aborting shard %s download due to timeout' %
                                  shard_index)
            tp.terminate()
            self.__logger.warning('Try with a new pointer')
            new_pointer = self.client.file_pointers(
                bucket_id=bucket_id,
                file_id=file_id,
                limit='1',
                skip=str(shard_index),
                exclude=str([pointer['farmer']['nodeID']]))
            self.__logger.debug('Found new pointer')
            return self.shard_download(new_pointer[0], shard_index, bucket_id,
                                       file_id)

        except Exception as e:
            self.__logger.error(e)
            self.__logger.error('Unhandled')
示例#6
0
import sys
sys.path.append('.')

from datetime import datetime
from http import Client, Request, Date

client = Client(agent='my uber agent')

request = Request('HEAD', 'http://lumberjaph.net')
request.if_modified_since = datetime(2011, 12, 1, 0, 0)

response = client.request(request)

if response.is_success:
    print "yeah, success!"
    print "status: {status}".format(status=response.status)
    print "message: {message}".format(message=response.message)
    print "content length: {length}".format(length=response.content_length)
    print "last modified in epoch: {last_modified}".format(last_modified=Date.time2epoch(response.last_modified))
    print "last modified in string: {last_modified}".format(last_modified=response.header('Last-Modified'))
    if response.content_is_text:
        print response.content
else:
    print "oups! {status_line}".format(status_line=response.status_line)
示例#7
0
文件: simple.py 项目: franckcuny/http
import sys
sys.path.append('.')

from datetime import datetime
from http import Client, Request, Date

client = Client(agent='my uber agent')

request = Request('HEAD', 'http://lumberjaph.net')
request.if_modified_since = datetime(2011, 12, 1, 0, 0)

response = client.request(request)

if response.is_success:
    print "yeah, success!"
    print "status: {status}".format(status=response.status)
    print "message: {message}".format(message=response.message)
    print "content length: {length}".format(length=response.content_length)
    print "last modified in epoch: {last_modified}".format(
        last_modified=Date.time2epoch(response.last_modified))
    print "last modified in string: {last_modified}".format(
        last_modified=response.header('Last-Modified'))
    if response.content_is_text:
        print response.content
else:
    print "oups! {status_line}".format(status_line=response.status_line)
示例#8
0
class Downloader:

    __logger = logging.getLogger('%s.ClassName' % __name__)

    def __init__(self, email, password):
        self.client = Client(
            email,
            password,
        )
        # set config variables
        self.combine_tmpdir_name_with_token = False

    def get_file_pointers_count(self, bucket_id, file_id):
        frame_data = self.client.frame_get(self.file_frame.id)
        return len(frame_data.shards)

    def set_file_metadata(self, bucket_id, file_id):
        try:
            file_metadata = self.client.file_metadata(bucket_id, file_id)
            # Get file name
            self.filename_from_bridge = str(file_metadata.filename)
            self.__logger.debug('Filename from bridge: %s',
                                self.filename_from_bridge)
            # Get file frame
            self.file_frame = file_metadata.frame

        except StorjBridgeApiError as e:
            self.__logger.error(e)
            self.__logger.error('Error while resolving file metadata')

        except Exception as e:
            self.__logger.error(e)
            self.__logger.error(
                'Unhandled error while resolving file metadata')

    def get_paths(self):
        # set default paths
        temp_dir = ""
        if platform == 'linux' or platform == 'linux2':
            # linux
            temp_dir = '/tmp'
        elif platform == 'darwin':
            # OS X
            temp_dir = '/tmp'
        elif platform == 'win32':
            # Windows
            temp_dir = 'C:/Windows/temp'
        home = os.path.expanduser('~')
        return temp_dir, home

    def download_begin(self, bucket_id, file_id):
        # Initialize environment
        self.set_file_metadata(bucket_id, file_id)
        # Get the number of shards
        self.all_shards_count = self.get_file_pointers_count(
            bucket_id, file_id)
        # Set the paths
        self.tmp_path, self.destination_file_path = self.get_paths()
        self.__logger.debug('temp path %s', self.tmp_path)
        self.__logger.debug('destination path %s', self.destination_file_path)

        mp = Pool()
        try:
            self.__logger.debug(
                'Resolving file pointers to download file with ID: %s ...',
                file_id)

            tries_get_file_pointers = 0

            while MAX_RETRIES_GET_FILE_POINTERS > tries_get_file_pointers:

                self.__logger.debug(
                    'Attempt number %s of getting a pointer to the file',
                    tries_get_file_pointers)
                tries_get_file_pointers += 1

                try:
                    # Get all the pointers to the shards
                    shard_pointers = self.client.file_pointers(
                        bucket_id,
                        file_id,
                        limit=str(self.all_shards_count),
                        skip='0')
                    self.__logger.debug('There are %s shard pointers: ',
                                        len(shard_pointers))

                    self.__logger.debug('Begin shards download process')
                    mp.map(foo, [(self, p, shard_pointers.index(p))
                                 for p in shard_pointers])
                except StorjBridgeApiError as e:
                    self.__logger.error(e)
                    self.__logger.error('Bridge error')
                    self.__logger.error(
                        'Error while resolving file pointers to download file with ID: %s ...',
                        file_id)
                    self.__logger.error(e)
                    continue
                else:
                    break

        except StorjBridgeApiError as e:
            self.__logger.error(e)
            self.__logger.error("Outern Bridge error")
            self.__logger.error("Error while resolving file pointers to \
                download file with ID: %s" % str(file_id))

        # All the shards have been downloaded
        self.finish_download()
        return

    def finish_download(self):
        self.__logger.debug('Finish download')
        fileisencrypted = '[DECRYPTED]' not in self.filename_from_bridge

        # Join shards
        sharing_tools = ShardingTools()
        self.__logger.debug('Joining shards...')

        actual_path = os.path.join(self.tmp_path, self.filename_from_bridge)
        destination_path = os.path.join(self.destination_file_path,
                                        self.filename_from_bridge)
        self.__logger.debug('Actual path %s', actual_path)
        self.__logger.debug('Destination path %s', destination_path)

        if fileisencrypted:
            sharing_tools.join_shards(actual_path, '-',
                                      '%s.encrypted' % actual_path)

        else:
            sharing_tools.join_shards(actual_path, "-", destination_path)

        if fileisencrypted:
            # decrypt file
            self.__logger.debug('Decrypting file...')
            file_crypto_tools = FileCrypto()
            # Begin file decryption
            file_crypto_tools.decrypt_file('AES', '%s.encrypted' % actual_path,
                                           destination_path,
                                           str(self.client.password))

        self.__logger.debug('Finish decryption')
        self.__logger.info('Download completed successfully!')

        # Remove temp files
        try:
            # Remove shards
            file_shards = map(lambda i: '%s-%s' % (actual_path, i),
                              range(self.all_shards_count))
            map(os.remove, file_shards)
            # Remove encrypted file
            os.remove('%s.encrypted' % actual_path)

        except OSError as e:
            self.__logger.error(e)

        return True

    def create_download_connection(self, url, path_to_save, shard_index):
        farmer_tries = 0

        self.__logger.debug('Downloading shard at index %s from farmer: %s',
                            shard_index, url)

        tries_download_from_same_farmer = 0
        while MAX_RETRIES_DOWNLOAD_FROM_SAME_FARMER > \
                tries_download_from_same_farmer:

            tries_download_from_same_farmer += 1
            farmer_tries += 1

            try:
                r = requests.get(url)
                # Write the file
                with open(path_to_save, 'wb') as f:
                    for chunk in r.iter_content(chunk_size=1024):
                        if chunk:  # filter out keep-alive new chunks
                            f.write(chunk)
                if r.status_code != 200 and r.status_code != 304:
                    raise StorjFarmerError()

            except StorjFarmerError as e:
                self.__logger.error(e)
                self.__logger.error(
                    "First try failed. Retrying... (%s)" %
                    str(farmer_tries))  # update shard download state
                continue

            except Exception as e:
                self.__logger.error("Unhandled error")
                self.__logger.error("Error occured while downloading shard \
                    at index %s. Retrying... (%s)" %
                                    (shard_index, farmer_tries))
                self.__logger.error(e)
                continue
            else:
                break

    def shard_download(self, pointer, shard_index):
        self.__logger.debug('Beginning download proccess...')

        try:
            self.__logger.debug('Starting download threads...')
            self.__logger.debug('Downloading shard at index %s ...',
                                shard_index)

            url = 'http://%s:%s/shards/%s?token=%s' % (
                pointer.get('farmer')['address'],
                str(pointer.get('farmer')['port']), pointer['hash'],
                pointer['token'])
            self.__logger.debug(url)

            file_temp_path = "%s-%s" % (os.path.join(
                self.tmp_path, self.filename_from_bridge), str(shard_index))
            if self.combine_tmpdir_name_with_token:
                file_temp_path = '%s-%s' % (os.path.join(
                    self.tmp_path, pointer['token'],
                    self.filename_from_bridge), str(shard_index))
            else:
                self.__logger.debug('Do not combine tmpdir and token')
            self.create_download_connection(url, file_temp_path, shard_index)

            self.__logger.debug('Shard downloaded')
            self.__logger.debug('Shard at index %s downloaded successfully',
                                shard_index)
            self.__logger.debug('%s saved', file_temp_path)

        except IOError as e:
            self.__logger.error('Perm error %s', e)
            if str(e) == str(13):
                self.__logger.error("""Error while saving or reading file or
                temporary file.
                Probably this is caused by insufficient permisions.
                Please check if you have permissions to write or
                read from selected directories.""")
        except Exception as e:
            self.__logger.error(e)
            self.__logger.error('Unhandled')