def delete(self, id): owner_id = request.cookies.get(app.config['COOKIE_OWNER_ID']) video = db_session.query(models.Video).filter_by(owner=owner_id, id=id).one_or_none() if not video: abort(404) try: if (video.playlist): rm_f(video.playlist) if (video.orig_file): rm_f(os.path.join(app.config['MOVIE_PATH'], video.orig_file)) if app.config['STORAGE_BACKEND'] == 'S3': tasks.s3_delete.delay(video.id) else: viddir = f"{app.config['MOVIE_PATH']}/{video.id}" shutil.rmtree(viddir, ignore_errors=True) except: abort(500) db_session.delete(video) db_session.commit() return "Video deleted", 204
def delete(self, video_id): owner_id = request.cookies.get(app.config['COOKIE_OWNER_ID']) video = db_session.query(models.Video).filter_by( owner=owner_id, id=video_id).one_or_none() if not video: abort(404, 'Video not found') if video.status in ['encoding', 'start-encoding']: abort(409, 'Cannot delete while encoding') try: if (video.playlist): rm_f(video.playlist) if (video.orig_file): rm_f(os.path.join(app.config['MOVIE_PATH'], video.orig_file)) if app.config['STORAGE_BACKEND'] == 'S3': tasks.s3_delete.delay(video.id) else: viddir = f"{app.config['MOVIE_PATH']}/{video.id}" shutil.rmtree(viddir, ignore_errors=True) except: abort(500) db_session.delete(video) db_session.commit() return {'message': 'Video deleted'}, 204
def post(self, video_id): owner_id = request.cookies.get(app.config['COOKIE_OWNER_ID']) video = db_session.query(models.Video).filter_by( owner=owner_id, id=video_id).one_or_none() if not video: return {'message': 'Video not found'}, 403 resumableTotalChunks = request.form.get('resumableTotalChunks', type=int) resumableChunkNumber = request.form.get('resumableChunkNumber', default=1, type=int) resumableIdentifier = request.form.get('resumableIdentifier', default='error', type=str) resumableFilename = request.form.get('resumableFilename', default='error', type=str) resumableTotalSize = request.form.get('resumableTotalSize', default=0, type=int) resumableChunkSize = request.form.get('resumableChunkSize', default=0, type=int) if not resumableIdentifier or not resumableChunkNumber or not resumableTotalSize or not resumableChunkSize: return {'message': 'Parameter error'}, 500 target_file_name = target_name(video.id) chunk_data = request.files['file'] if video.status in ['file-waiting', 'file-uploaded', 'ready', 'error']: video.status = 'file-uploading' video.upload_identifier = resumableIdentifier video.orig_file_name = resumableFilename video.orig_file = target_file_name.name db_session.commit() if video.upload_identifier != resumableIdentifier: return {'message': 'Different upload already in progress'}, 409 try: if target_file_name.stat( ).st_size != resumableTotalSize or video.orig_file_name != resumableFilename: rm_f(target_file_name) except FileNotFoundError: pass if not target_file_name.exists(): target_file = open(target_file_name, "wb") target_file.truncate(resumableTotalSize) target_file.close() upload_complete = False with open(target_file_name, "r+b") as target_file: offset = (resumableChunkNumber - 1) * resumableChunkSize target_file.seek(offset, os.SEEK_SET) target_file.write(chunk_data.read()) fh = target_file.fileno() if os.lseek(fh, 0, os.SEEK_HOLE) == resumableTotalSize: upload_complete = True os.fsync(fh) last_chunk_offset = resumableTotalChunks * resumableChunkSize if resumableTotalSize >= resumableChunkSize: if (not is_hole(fh, resumableChunkSize - 1) and resumableChunkNumber == resumableTotalChunks) or ( not is_hole(fh, resumableTotalSize - 1) and resumableChunkNumber == 1): update_video_metadata(video, target_file_name) if upload_complete: if not is_video_file(target_file_name): video.status = 'error' video.status_message = 'File uploaded was not a video file. Please use a different file.' db_session.commit() return {'message': video.status_message}, 501 update_video_metadata(video, target_file_name) video.status = 'file-uploaded' video.encoding_progress = 0 for encoded_file in video.encoded_files: rm_f( os.path.join(app.config['MOVIE_PATH'], video.id, encoded_file.encoded_file_name)) db_session.delete(encoded_file) db_session.commit()
def resumable_post(video_id): video = db_session.query(models.Video).filter_by( owner=owner_id, id=video_id).one_or_none() if not video: return abort(403, "Video not found") resumableTotalChunks = request.form.get('resumableTotalChunks', type=int) resumableChunkNumber = request.form.get('resumableChunkNumber', default=1, type=int) resumableIdentfier = request.form.get('resumableIdentifier', default='error', type=str) # get the chunk data chunk_data = request.files['file'] # make our temp directory temp_dir = os.path.join(app.config['MOVIE_PATH'], video_id, 'tmp', resumableIdentfier) if not os.path.isdir(temp_dir): os.makedirs(temp_dir) # save the chunk data chunk_name = get_chunk_name('orig', resumableChunkNumber) chunk_file = os.path.join(temp_dir, chunk_name) chunk_data.save(chunk_file) app.logger.debug('Saved chunk: %s', chunk_file) # check if the upload is complete chunk_paths = [ os.path.join(temp_dir, get_chunk_name('orig', x)) for x in range(1, resumableTotalChunks + 1) ] upload_complete = all([os.path.exists(p) for p in chunk_paths]) # combine all the chunks to create the final file if upload_complete: target_file_name = os.path.join(app.config['MOVIE_PATH'], f'{video_id}_orig') rm_f(target_file_name) with open(target_file_name, "ab") as target_file: for p in chunk_paths: stored_chunk_file_name = p stored_chunk_file = open(stored_chunk_file_name, 'rb') target_file.write(stored_chunk_file.read()) stored_chunk_file.close() os.unlink(stored_chunk_file_name) target_file.close() os.rmdir(temp_dir) app.logger.debug('File saved to: %s', target_file_name) cmd = f'ffprobe -v quiet -show_streams -show_format -print_format json {target_file_name}' output = os.popen(cmd).read() output = json.loads(output) print(output) if output == {}: print("Not a video file") os.unlink(target_file_name) return make_response( jsonify({"message": "File is not a video file"}), 400) vcodec = "" for stream in output['streams']: if stream['codec_type'] == 'video': vcodec = stream['codec_name'] if vcodec == "": os.unlink(target_file_name) return make_response( jsonify({"message": "File is not a video file"}), 400) tasks.transcode.delay(target_file_name, output, video_id) return 'OK'
def transcode_video(video, task): status = 'error' output = "" outdir = f"{celery.conf.get('MOVIE_PATH')}/{video.id}" try: os.mkdir(outdir) except FileExistsError: pass master_playlist = f"{outdir}/playlist.mpd" rm_f(master_playlist) dash_size = 4 dash_command = ['MP4Box', '-dash', f'{dash_size * 1000}', '-rap', '-frag-rap', '-min-buffer', '16000', '-profile', 'dashavc264:onDemand', '-mpd-title', video.title ,'-out', master_playlist] try: print("Reencoded file") def sort_video(video): return int(video.split("_")[1]) def sort_audio(audio): return int(audio.split("_")[1].split("k")[0]) video_files = [] encoded_files = db_session.query(models.EncodedFile).filter_by(video_id = video.id, track_type='video').all() for encoded_file in encoded_files: video_files.append(f'{outdir}/{encoded_file.encoded_file_name}') video_files.sort(key=sort_video) audio_files = [] encoded_files = db_session.query(models.EncodedFile).filter_by(video_id = video.id, track_type='audio').all() for encoded_file in encoded_files: audio_files.append(f'{outdir}/{encoded_file.encoded_file_name}') audio_files.sort(key=sort_audio) dash_command.extend(video_files) dash_command.extend(audio_files) print(f'Executing: {" ".join(dash_command)}') output = subprocess.check_call(dash_command, stderr=subprocess.STDOUT) print("DASHed file") status = 'ready' except Exception as e: print(output) print(e) video.status = 'error' video.status_message = 'MP4Box failed' db_session.commit() if celery.conf.get('STORAGE_BACKEND') == "S3": print("Uploading to S3") nthreads = celery.conf.get('S3_UPLOAD_THREADS') g = glob.glob(f"{outdir}/*") splits = numpy.array_split(g, nthreads) threads = list() for index in range(nthreads): x = threading.Thread(target=s3_upload, args=(splits[index].copy(),)) threads.append(x) x.start() for index, thread in enumerate(threads): thread.join() print("Done uploading") video.playlist = f'{video.id}/playlist.mpd' video.encoding_progress = 100 video.status = status db_session.commit()
def transcode(tmpfile, streaminfo, video_id): status = 'error' output = "" outdir = f"{celery.conf.get('MOVIE_PATH')}/{video_id}" shutil.rmtree(outdir, ignore_errors=True) os.mkdir(outdir) master_playlist = f"{outdir}/playlist.mpd" rm_f(master_playlist) vwidth = 0 vheight = 0 duration = 0 acodec = "" vcodec = "" framerate = 24 chunk_size = 4 video_streamidx = -1 audio_streamidx = -1 has_audio = False video = db_session.query(models.Video).filter_by(id=video_id).one_or_none() duration = float(streaminfo['format']['duration']) for stream in streaminfo['streams']: if stream['codec_type'] == 'video': vcodec = stream['codec_name'] vwidth = stream['width'] vheight = stream['height'] framerate = stream['r_frame_rate'] video_streamidx = stream['index'] if stream['codec_type'] == 'audio': has_audio = True if audio_streamidx == -1 and stream['tags']['language'] == 'und': audio_streamidx = stream['index'] audio_codec = stream['codec_name'] if stream['tags']['language'] == 'eng': audio_streamidx = stream['index'] audio_codec = stream['codec_name'] if video_streamidx == -1: video_streamidx = 0 if audio_streamidx == -1 and has_audio: audio_streamidx = 1 try: framerate = round(float(framerate)) except ValueError: x, y = framerate.split("/") framerate = round(int(x) / int(y)) dash_size = 4 keyint = framerate if vwidth > 1920: vheight = int(vheight / (vwidth / 1920)) vwidth = 1920 audio_formats = [] if has_audio: audio_formats = [{ 'rate': '64k', 'channels': '1' }, { 'rate': '128k', 'channels': '2' }, { 'rate': '196k', 'channels': '2' }] video_profiles = [ { 'profile': 'main', 'preset': 'veryslow', 'crf': '22', 'maxrate': '600k', 'bufsize': '800k', 'width': 480 }, { 'profile': 'main', 'preset': 'slow', 'crf': '22', 'maxrate': '900k', 'bufsize': '1200k', 'width': 640 }, { 'profile': 'high', 'preset': 'slow', 'crf': '22', 'maxrate': '1200k', 'bufsize': '1500k', 'width': 960 }, { 'profile': 'high', 'preset': 'slow', 'crf': '21', 'maxrate': '2000k', 'bufsize': '4000k', 'width': 1280 }, { 'profile': 'high', 'preset': 'slow', 'crf': '21', 'maxrate': '4500k', 'bufsize': '8000k', 'width': 1920 }, ] video_formats = [{ 'profile': 'baseline', 'preset': 'veryslow', 'crf': '22', 'maxrate': '200k', 'bufsize': '300k', 'width': 320 }, { 'profile': 'baseline', 'preset': 'veryslow', 'crf': '22', 'maxrate': '400k', 'bufsize': '500k', 'width': 320 }] sizes = [1, 1.5, 2, 3] for size in sizes: this_width = int(vwidth / size) + (int(vwidth / size) % 2) if this_width < video_profiles[0]['width']: next this_profile = None for idx in range(len(video_profiles)): if this_width == video_profiles[idx]['width']: this_profile = video_profiles[idx].copy() break if this_width > video_profiles[idx][ 'width'] and this_width < video_profiles[idx + 1]['width']: this_profile = video_profiles[idx + 1].copy() this_profile['width'] = this_width break if this_profile: video_formats.append(this_profile) print(video_formats) tmpdir = tempfile.mkdtemp() socketfile = os.path.join(tmpdir, 'progress') sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(socketfile) sock.listen(1) transcode_command = [ 'ffmpeg', '-y', '-nostdin', '-i', f'{tmpfile}', '-progress', f'unix://{socketfile}', '-loglevel', '24' ] dash_command = [ 'MP4Box', '-dash', f'{dash_size * 1000}', '-rap', '-frag-rap', '-min-buffer', '16000', '-profile', 'dashavc264:onDemand', '-mpd-title', video.title, '-out', master_playlist ] tmpfiles = [] for num, f in enumerate(video_formats): stream = num filename = f'{outdir}/video_{f["width"]}_{f["maxrate"]}.mp4' transcode_command.extend([ '-map', f'0:{video_streamidx}', f'-c:v', 'libx264', '-x264-params', f'no-scenecut', f'-profile:v', f['profile'], '-preset:v', f["preset"], '-tune:v', video.tune, '-keyint_min', f'{keyint}', '-g', f'{keyint}', '-sc_threshold', '0', '-bf', '1', '-b_strategy', '0', f'-crf', f['crf'], f'-maxrate', f'{f["maxrate"]}', f'-bufsize', f'{f["bufsize"]}', f'-filter', f'scale={f["width"]}:-2', '-map_chapters', '-1', filename ]) dash_command.append(filename) tmpfiles.append(filename) for num, f in enumerate(audio_formats): stream = num filename = f'{outdir}/audio_{f["rate"]}.mp4' transcode_command.extend([ '-map', f'0:{audio_streamidx}', f'-c:a', 'aac', f'-b:a', f['rate'], f'-ac', f['channels'], '-map_chapters', '-1', filename ]) dash_command.append(filename) tmpfiles.append(filename) video.encoding_status = 'encoding' db_session.commit() ffmpeg = multiprocessing.Process(target=run_ffmpeg, args=(transcode_command, f'{tmpfile}.log')) ffmpeg.start() connection, client_address = sock.accept() percentage = 0 speed = 0 try: while True: data = connection.recv(1024) if data: string = data.decode('utf-8') for line in string.splitlines(): if line.startswith('out_time_ms'): progress = int(line.split('=')[1]) / 1000000 percentage = (progress / duration) * 100 percentage = min(percentage, 100) if line.startswith('speed'): speed = float(line.split('=')[1].strip().split('x')[0]) video.encoding_progress = percentage video.encoding_speed = speed db_session.commit() else: break finally: ffmpeg.terminate() connection.close() shutil.rmtree(tmpdir, ignore_errors=True) if percentage < 100: video.status = 'error' db_session.commit() try: print("Reencoded file") print(f'Executing: {" ".join(dash_command)}') output = subprocess.check_call(dash_command, stderr=subprocess.STDOUT) print("DASHed file") status = 'ready' except Exception as e: print(output) print(e) for f in tmpfiles: os.unlink(f) if celery.conf.get('STORAGE_BACKEND') == "S3": print("Uploading to S3") nthreads = celery.conf.get('S3_UPLOAD_THREADS') g = glob.glob(f"{outdir}/*") splits = numpy.array_split(g, nthreads) threads = list() for index in range(nthreads): x = threading.Thread(target=s3_upload, args=(splits[index].copy(), )) threads.append(x) x.start() for index, thread in enumerate(threads): thread.join() shutil.rmtree(outdir, ignore_errors=True) print("Done uploading") video.playlist = f'{video_id}/playlist.mpd' video.width = vwidth video.height = vheight video.duration = duration video.encoding_status = status db_session.commit()