def upload_image(self, image_id, parent_id, layer): json_obj = {'id': image_id} if parent_id: json_obj['parent'] = parent_id json_data = compat.json.dumps(json_obj) h = hashlib.sha256(json_data + '\n') h.update(layer) layer_checksum = 'sha256:{0}'.format(h.hexdigest()) headers = {'X-Docker-Payload-Checksum': layer_checksum} resp = self.http_client.put('/v1/images/{0}/json'.format(image_id), headers=headers, data=json_data) self.assertEqual(resp.status_code, 200, resp.data) # Make sure I cannot download the image before push is complete resp = self.http_client.get('/v1/images/{0}/json'.format(image_id)) self.assertEqual(resp.status_code, 400, resp.data) layer_file = compat.StringIO(layer) resp = self.http_client.put('/v1/images/{0}/layer'.format(image_id), input_stream=layer_file) layer_file.close() self.assertEqual(resp.status_code, 200, resp.data) self.set_image_checksum(image_id, layer_checksum) # Push done, test reading the image resp = self.http_client.get('/v1/images/{0}/json'.format(image_id)) self.assertEqual(resp.status_code, 200, resp.data) self.assertEqual(resp.headers.get('x-docker-size'), str(len(layer))) self.assertEqual(resp.headers['x-docker-checksum-payload'], layer_checksum)
def stream_write(self, path, fp): try: buf = fp.read() io = compat.StringIO(buf) path = self._init_path(path) key = self.makeKey(path) key.set_contents_from_file(io) io.close() except IOError as e: raise e
def stream_write(self, path, fp): # Minimum size of upload part size on S3 is 5MB buffer_size = 5 * 1024 * 1024 if self.buffer_size > buffer_size: buffer_size = self.buffer_size path = self._init_path(path) mp = self._boto_bucket.initiate_multipart_upload( path, encrypt_key=(self._config.s3_encrypt is True)) num_part = 1 try: while True: buf = fp.read(buffer_size) if not buf: break io = compat.StringIO(buf) mp.upload_part_from_file(io, num_part) num_part += 1 io.close() except IOError as e: raise e mp.complete_upload()