def test_readall(self): tstr = '1234567890' with TemporaryFile() as f: f.write(tstr) f.flush() f.seek(0) self.assertEqual(utils.readall(f, 5), tstr[:5]) self.assertEqual(utils.readall(f, 10), tstr[5:]) self.assertEqual(utils.readall(f, 1), '') self.assertRaises(IOError, utils.readall, f, 1, 0)
def upload_object_unchunked( self, obj, f, withHashFile=False, size=None, etag=None, content_encoding=None, content_disposition=None, content_type=None, sharing=None, public=None): """ :param obj: (str) remote object path :param f: open file descriptor :param withHashFile: (bool) :param size: (int) size of data to upload :param etag: (str) :param content_encoding: (str) :param content_disposition: (str) :param content_type: (str) :param sharing: {'read':[user and/or grp names], 'write':[usr and/or grp names]} :param public: (bool) :returns: (dict) created object metadata """ self._assert_container() if withHashFile: data = f.read() try: import json data = json.dumps(json.loads(data)) except ValueError: raise ClientError('"%s" is not json-formated' % f.name, 1) except SyntaxError: msg = '"%s" is not a valid hashmap file' % f.name raise ClientError(msg, 1) f = StringIO(data) else: data = readall(f, size) if size else f.read() r = self.object_put( obj, data=data, etag=etag, content_encoding=content_encoding, content_disposition=content_disposition, content_type=content_type, permissions=sharing, public=public, success=201) return r.headers
def _calculate_blocks_for_upload(self, blocksize, blockhash, size, nblocks, hashes, hmap, fileobj, hash_cb=None): offset = 0 if hash_cb: hash_gen = hash_cb(nblocks) hash_gen.next() for i in xrange(nblocks): block = readall(fileobj, min(blocksize, size - offset)) bytes = len(block) if bytes <= 0: break hash = _pithos_hash(block, blockhash) hashes.append(hash) hmap[hash] = (offset, bytes) offset += bytes if hash_cb: hash_gen.next() msg = "Failed to calculate uploading blocks: " "read bytes(%s) != requested size (%s)" % (offset, size) assert offset == size, msg
def _upload_missing_blocks(self, missing, hmap, fileobj, upload_gen=None): """upload missing blocks asynchronously""" self._init_thread_limit() flying = [] failures = [] for hash in missing: offset, bytes = hmap[hash] fileobj.seek(offset) data = readall(fileobj, bytes) r = self._put_block_async(data, hash) flying.append(r) unfinished = self._watch_thread_limit(flying) for thread in set(flying).difference(unfinished): if thread.exception: failures.append(thread) if isinstance( thread.exception, ClientError) and thread.exception.status == 502: self.POOLSIZE = self._thread_limit elif thread.isAlive(): flying.append(thread) elif upload_gen: try: upload_gen.next() except: pass flying = unfinished for thread in flying: thread.join() if thread.exception: failures.append(thread) elif upload_gen: try: upload_gen.next() except: pass return [failure.kwargs['hash'] for failure in failures]
def _hash_from_file(self, fp, start, size, blockhash): fp.seek(start) block = readall(fp, size) h = newhashlib(blockhash) h.update(block.strip('\x00')) return hexlify(h.digest())