def upload_to_s3(local_path, key=None): if key is None: key = os.path.basename(local_path) bucket = helper.get_bucket() k = Key(bucket) k.key = key k.set_contents_from_filename(local_path) k.set_canned_acl('public-read') k.set_metadata('Content-Type', helper.get_mimetype(k.key)) return k
def _process_fullcopy(key): # Set the content-type correctly bucket = helper.get_bucket() k = bucket.lookup(key) k.copy(k.bucket, k.name, preserve_acl=True, metadata={'Content-Type': helper.get_mimetype(k.name)}) orig_video = Video(key=key, status='downloading') db.add(orig_video) db.commit() url = helper.get_s3url(key) orig_path = download_url(url) orig_video.update(get_video_attrs(orig_path)) orig_video.status = 'done' for preset in FFMPEG_PRESETS.iterkeys(): # Transcode/Upload based on ffmpeg preset iphone_path = os.path.splitext(orig_path)[0] + preset iphone_video = Video(key=os.path.basename(iphone_path), status='transcoding') db.add(iphone_video) db.commit() try: make_iphone(orig_path, iphone_path, preset) iphone_video.update(get_video_attrs(iphone_path)) except: iphone_video.status = 'transcoding error' else: iphone_video.status = 'uploading' db.commit() if iphone_video.status == 'uploading': upload_to_s3(iphone_path) iphone_video.status = 'done' db.commit() os.remove(iphone_path) os.remove(orig_path)
def upload_action(action): key = request.args.get("key") upload_id = request.args.get("upload_id") chunk = request.args.get("chunk") string = date = None mime_type = helper.get_mimetype(key) if action == "chunk_loaded": filename = request.args["filename"] filesize = request.args["filesize"] last_modified = request.args["last_modified"] chunk = int(request.args["chunk"]) if filesize > CHUNK_SIZE: try: u = ( db.query(Upload) .filter( Upload.filename == filename, Upload.filesize == filesize, Upload.last_modified == last_modified ) .first() ) assert u chunks = set(map(int, u.chunks_uploaded.split(","))) chunks.add(chunk) u.chunks_uploaded = ",".join(map(str, chunks)) db.commit() except AssertionError: u = Upload( filename=filename, filesize=filesize, last_modified=last_modified, chunks_uploaded=str(chunk), key=key, upload_id=upload_id, ) db.add(u) db.commit() return "" if action == "get_all_signatures": date = _http_date() list_signature, _ = _action_list(key, upload_id, date) end_signature, _ = _action_end(key, upload_id, mime_type, date) delete_signature, _ = _action_delete(key, upload_id, date) num_chunks = int(request.args["num_chunks"]) chunk_signatures = dict( [(chunk, (_action_chunk(key, upload_id, chunk, mime_type, date))) for chunk in xrange(1, num_chunks + 1)] ) return json.dumps( { "list_signature": [list_signature, date], "end_signature": [end_signature, date], "chunk_signatures": chunk_signatures, } ) if action == "get_init_signature": filename = request.args["filename"] filesize = request.args["filesize"] last_modified = request.args["last_modified"] try: assert "force" not in request.args u = ( db.query(Upload) .filter(Upload.filename == filename, Upload.filesize == filesize, Upload.last_modified == last_modified) .first() ) assert u string, date = _action_init(u.key) return json.dumps( { "signature": string, "date": date, "key": u.key, "upload_id": u.upload_id, "chunks": map(int, u.chunks_uploaded.split(",")), } ) except AssertionError: db.query(Upload).filter( Upload.filename == filename, Upload.filesize == filesize, Upload.last_modified == last_modified ).delete() db.commit() string, date = _action_init(key) elif action == "get_chunk_signature": string, date = _action_chunk(key, upload_id, chunk, mime_type) elif action == "get_list_signature": string, date = _action_list(key, upload_id) elif action == "get_end_signature": string, date = _action_end(key, upload_id, mime_type) elif action == "get_delete_signature": string, date = _action_delete(key, upload_id) elif action == "upload_finished": start_worker(key) return json.dumps({"signature": string, "date": date})