def start(): bucket = options.bucket if not bucket: return error("Must supply a bucket name!") creds = AWSCredentials(options.access_key, options.secret_key) region = AWSServiceRegion(creds=creds, region=options.region) throughput_counter = ThroughputCounter() uploader = MultipartUploadsManager(region=region, throughput_counter=throughput_counter) finished = [] for path in paths: fd = open(path) object_name = os.path.basename(path) content_type = mimetypes.guess_type(path)[0] d = uploader.upload(fd, bucket, object_name, content_type=content_type, amz_headers={'acl': 'public-read'}) finished.append(d) gatherResults(finished).addCallback(show_stats, throughput_counter ).addCallbacks(complete, log.err).addBoth(stop)
def test_upload_creation(self): def check(task): self.assertIsInstance(task, MultipartUpload) self.assertEqual(task.bucket, "mybucket") self.assertEqual(task.object_name, "mykey") self.assertIdentical(task.fd, fd) self.assertEqual(task.metadata, {}) self.assertIsInstance(task.counter, PartsTransferredCounter) self.assertIsInstance(task.client, S3Client) self.assertEqual(task.amz_headers, {'acl': 'public-read'}) self.assert_(self.log.buffer) verifyObject(IPartsGenerator, task.parts_generator) verifyObject(IPartHandler, task.part_handler) for entry in self.log.buffer: self.assertEqual(entry[0], 'msg') manager = MultipartUploadsManager(log=self.log) fd = StringIO("some data") d = manager.upload(fd, 'mybucket', 'mykey', amz_headers={'acl': 'public-read'}) d.addCallback(check) return d