def handle_uploaded_chunk(chunk, chunkinfo): s3conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) b = Bucket(s3conn, settings.AWS_STORAGE_BUCKET_NAME) k = Key(b) k.key = chunkinfo["NEWNAME"] content_type = mimetypes.guess_type(k.key)[0] chunk_diff = int(chunkinfo['CHUNK_TOTAL']) - int(chunkinfo['CHUNK_END']) # First chunk if int(chunkinfo["CHUNK_START"]) == 0: mpu = b.initiate_multipart_upload(k.key, {"Content-Type": content_type}, False, None, False, 'public-read') upload_part_from_bytesio(mpu, chunk, chunkinfo["PART"]) s3data = {"MPUID": mpu.id, "MPKN": mpu.key_name} info = { "name": mpu.key_name, "size": chunkinfo['CHUNK_TOTAL'], "upload_id": chunkinfo['UPLOAD_ID'], "part": chunkinfo['PART'], "mpuid": s3data["MPUID"], "content_type": content_type } # Last chunk, -2 +2 byte error forgiveness elif chunk_diff > -2 and chunk_diff < 2: mpu = get_mpu(b, chunkinfo["MPUID"], k.key) upload_part_from_bytesio(mpu, chunk, chunkinfo["PART"]) xml = mpu.to_xml() completed_mpu = b.complete_multipart_upload(mpu.key_name, mpu.id, xml) print("return value for completed_mpu: %s\n" % completed_mpu) info = { "name": k.key, "size": chunkinfo['CHUNK_TOTAL'], "upload_id": chunkinfo['UPLOAD_ID'], "part": chunkinfo['PART'], "mpuid": mpu.id, "uri": completed_mpu.location, "content_type": content_type } try: media_obj = Media() media_obj.item.name = k.key media_obj.save() except Exception as e: print(e) # Middle chunk else: mpu = get_mpu(b, chunkinfo["MPUID"], k.key) upload_part_from_bytesio(mpu, chunk, chunkinfo["PART"]) info = { "name": k.key, "size": chunkinfo['CHUNK_TOTAL'], "upload_id": chunkinfo['UPLOAD_ID'], "part": chunkinfo['PART'], "mpuid": mpu.id, "content_type": content_type } return info