def media_url(f, absolute=True): f = shard(f) cdn = _cfg("cdn") domain = _cfg("domain") base = _cfg("protocol") + "://" + domain if len(cdn) == 0 else cdn return '%s/%s' % (base, f) if absolute else '/%s' % f
def _send_file(self, id): if ".." in id or id.startswith("/"): abort(403) if "." in id: if os.path.exists(os.path.join(_cfg("storage_folder"), id)): # These requests are handled by nginx if it's set up path = os.path.join(_cfg("storage_folder"), id) return send_file(path, as_attachment=True)
def cdn_url(path): request_domain = request.headers["Host"].strip() path = shard(path) if request_domain != _cfg("incoming_domain").strip(): return "/" + path else: return "%s/%s" % (_cfg("protocol") + "://" + _cfg("domain") if _cfg("cdn") == '' else _cfg("cdn"), path)
def _template_params(f): if f.compression: compression = int(float(f.compression) * 100) if compression == 100 or f.status != "done": compression = None can_delete = None try: if request.cookies.get("hist-opt-out", "0") == "1": can_delete = check_password_hash(f.ip, get_ip()) except: pass mimetype = f.mimetype processor = get_processor(f.processor) types = [mimetype] for f_ext in processor.outputs: types.append(get_mimetype(f_ext)) if "do-not-send" in request.cookies: try: blacklist = json.loads(request.cookies["do-not-send"]) for t in blacklist: if t in types: types.remove(t) except: pass metadata = {} if f.metadata and f.metadata != "null": metadata = json.loads(f.metadata) subtitles = None if "subtitles" in metadata and "streams" in metadata["subtitles"]: for stream in metadata["subtitles"]["streams"]: if stream["type"] == "subtitle": subtitles = stream if subtitles["info"]["codec_name"] == "ssa": subtitles["info"]["codec_name"] = "ass" subtitles["url"] = "/" + f.hash + "." + subtitles["info"]["codec_name"] break return { "filename": f.hash, "original": f.original, "video": normalise_processor(f.processor) == "video", "flags": f.flags.as_dict(), "metadata": metadata, "subtitles": subtitles, "has_subtitles": subtitles != None, "compression": compression, "mimetype": mimetype, "can_delete": can_delete if can_delete is not None else "check", "fragment": "fragments/" + fragment(f.processor) + ".html", "types": types, "processor": f.processor, "protocol": _cfg("protocol"), "domain": _cfg("domain"), }
def _template_params(f): if f.compression: compression = int(float(f.compression) * 100) if compression == 100 or f.status != "done": compression = None can_delete = None try: if request.cookies.get('hist-opt-out', '0') == '1': can_delete = check_password_hash(f.ip, get_ip()) except: pass mimetype = f.mimetype processor = get_processor(f.processor) types = [mimetype] for f_ext in processor.outputs: types.append(get_mimetype(f_ext)) if 'do-not-send' in request.cookies: try: blacklist = json.loads(request.cookies['do-not-send']) for t in blacklist: if t in types: types.remove(t) except: pass metadata = {} if f.metadata and f.metadata != 'null': metadata = json.loads(f.metadata) subtitles = None if 'subtitles' in metadata and 'streams' in metadata['subtitles']: for stream in metadata['subtitles']['streams']: if stream['type'] == 'subtitle': subtitles = stream if subtitles['info']['codec_name'] == 'ssa': subtitles['info']['codec_name'] = 'ass' subtitles['url'] = '/' + f.hash + '.' + subtitles['info']['codec_name'] break return { 'filename': f.hash, 'original': f.original, 'video': normalise_processor(f.processor) == 'video', 'flags': f.flags.as_dict(), 'metadata': metadata, 'subtitles': subtitles, 'has_subtitles': subtitles != None, 'compression': compression, 'mimetype': mimetype, 'can_delete': can_delete if can_delete is not None else 'check', 'fragment': 'fragments/' + fragment(f.processor) + '.html', 'types': types, 'processor': f.processor, 'protocol': _cfg("protocol"), 'domain': _cfg("domain"), }
def _template_params(f): if f.compression: compression = int(float(f.compression) * 100) if compression == 100 or f.status != "done": compression = None can_delete = None try: if request.cookies.get('hist-opt-out', '0') == '1': can_delete = check_password_hash(f.ip, get_ip()) except: pass mimetype = f.mimetype processor = get_processor(f.processor) types = [mimetype] for f_ext in processor.outputs: types.append(get_mimetype(f_ext)) if 'do-not-send' in request.cookies: try: blacklist = json.loads(request.cookies['do-not-send']) for t in blacklist: if t in types: types.remove(t) except: pass metadata = {} if f.metadata and f.metadata != 'None': metadata = json.loads(f.metadata) return { 'filename': f.hash, 'original': f.original, 'video': normalise_processor(f.processor) == 'video', 'flags': f.flags.as_dict(), 'metadata': metadata, 'compression': compression, 'mimetype': mimetype, 'can_delete': can_delete if can_delete is not None else 'check', 'fragment': 'fragments/' + fragment(f.processor) + '.html', 'types': types, 'processor': f.processor, 'protocol': _cfg("protocol"), 'domain': _cfg("domain"), }
def __init__(self, tmppath, f, extra): self.path = tmppath self.output = os.path.join(_cfg("storage_folder"), f.hash) self.extra = extra self.important = True self.f = f
def __init__(self, tmppath, f, processor_state): self.path = tmppath self.output = os.path.join(_cfg("storage_folder"), f.hash) self.processor_state = processor_state self.important = True self.f = f
def delete_file_storage(hash): try: for root, dirs, files in os.walk(_cfg("storage_folder")): for f in files: if f.startswith(hash): try: os.unlink(os.path.join(root, f)) except: pass # It's fine if one or more files are missing - it means that the processing pipeline might not have got to them. except: pass
def __init__(self, tmppath, f, processor_state, ignore_limit): self.path = tmppath self.output = os.path.join(_cfg("storage_folder"), f.hash) self.processor_state = processor_state self.ignore_limit = ignore_limit self.important = True self.f = f
def init(args): folder = _cfg("storage_folder") sharding_level = _cfgi("sharding") for i in range(64 ** sharding_level): try: os.mkdir(os.path.join(folder, int2base(i, 64))) except OSError as e: print(e)
def _template_params(f): if f.compression: compression = int(float(f.compression) * 100) if compression == 100 or f.status != "done": compression = None can_delete = None try: if request.cookies.get('hist-opt-out', '0') == '1': can_delete = check_password_hash(f.ip, get_ip()) except: pass mimetype = f.mimetype processor = get_processor(f.processor) types = [mimetype] for f_ext in processor.outputs: types.append(get_mimetype(f_ext)) if 'do-not-send' in request.cookies: try: blacklist = json.loads(request.cookies['do-not-send']) for t in blacklist: if t in types: types.remove(t) except: pass return { 'filename': f.hash, 'original': f.original, 'video': mimetype in VIDEO_FORMATS, 'loop': mimetype in LOOP_FORMATS, 'autoplay': mimetype in AUTOPLAY_FORMATS, 'compression': compression, 'mimetype': mimetype, 'can_delete': can_delete if can_delete is not None else 'check', 'fragment': 'fragments/' + fragment(f.processor) + '.html', 'types': types, 'processor': f.processor, 'protocol': _cfg("protocol"), 'domain': _cfg("domain"), }
def prepare(): if os.path.exists(app.static_folder): rmtree(app.static_folder) os.makedirs(app.static_folder) compiler = scss.Scss(scss_opts = { 'style': 'compressed' }) # Compile styles (scss) d = os.walk('styles') for f in list(d)[0][2]: if extension(f) == "scss": with open(os.path.join('styles', f)) as r: output = compiler.compile(r.read()) parts = f.rsplit('.') css = '.'.join(parts[:-1]) + ".css" with open(os.path.join(app.static_folder, css), "w") as w: w.write(output) w.flush() copy = ['images', 'scripts'] preprocess = ['scripts/view.js', 'scripts/mediacrush.js'] # Copy images, preprocess some JS files for folder in copy: for f in list(os.walk(folder))[0][2]: outputpath = os.path.join(app.static_folder, os.path.basename(f)) inputpath = os.path.join(folder, f) if inputpath in preprocess: with open(inputpath) as r: # Using Jinja here is overkill output = r.read() output = output.replace("{{ protocol }}", _cfg("protocol")) output = output.replace("{{ domain }}", _cfg("domain")) with open(outputpath, "w") as w: w.write(output) w.flush() else: copyfile(inputpath, outputpath)
def get_maxsize(): size = _cfg("max_file_size") symbols = ("B", "K", "M", "G", "T", "P", "E", "Z", "Y") letter = size[-1:].strip().upper() num = size[:-1] assert num.isdigit() and letter in symbols num = float(num) prefix = {symbols[0]: 1} for i, size in enumerate(symbols[1:]): prefix[size] = 1 << (i + 1) * 10 return int(num * prefix[letter])
def get_maxsize(): size = _cfg("max_file_size") symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') letter = size[-1:].strip().upper() num = size[:-1] assert num.isdigit() and letter in symbols num = float(num) prefix = {symbols[0]: 1} for i, size in enumerate(symbols[1:]): prefix[size] = 1 << (i + 1) * 10 return int(num * prefix[letter])
def static_file(self, p, ext): f = p + "." + ext if p.startswith("static/"): return self._send_file(f.split("/")[1], base=current_app.static_folder) elif p.startswith("download/"): f = "{}.{}".format(p.split(os.path.sep)[-1], ext) return self._send_file( f, as_attachment=True, base=os.path.join(current_app.root_path, os.pardir, _cfg("storage_folder")), ) else: return self._send_file( f, base=os.path.join(current_app.root_path, os.pardir, _cfg("storage_folder")), )
def get_maxsize(): size = _cfg("max_file_size") symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') letter = size[-1:].strip().upper() num = size[:-1] assert num.isdigit() and letter in symbols num = float(num) prefix = {symbols[0]:1} for i, size in enumerate(symbols[1:]): prefix[size] = 1 << (i+1)*10 return int(num * prefix[letter])
def migrate(args): base = _cfg("storage_folder") for f in os.listdir(base): path = os.path.join(base, f) if os.path.isfile(path): newpath = os.path.join(base, shard(f)) try: print("Moving " + path + " into " + newpath) os.rename(path, newpath) except: print("Move failed")
def _send_file(self, id, base=_cfg("storage_folder"), as_attachment=False): if ".." in id or id.startswith("/"): abort(403) if "." in id: path = os.path.join(base, id) if os.path.exists(path): return send_file(path, as_attachment=as_attachment, mimetype=get_mimetype(path)) else: return abort(404) return False
def _send_file(self, id, base=_cfg("storage_folder")): if ".." in id or id.startswith("/"): abort(403) if "." in id: path = os.path.join(base, id) if os.path.exists(path): # These requests are handled by nginx if it's set up return send_file(path, as_attachment=True) else: return abort(404) return False
def migrate(args): base = _cfg("storage_folder") for f in os.listdir(base): path = os.path.join(base, f) if os.path.isfile(path): newpath = os.path.join(base, shard(f)) try: print(("Moving " + path + " into " + newpath)) os.rename(path, newpath) except: print("Move failed")
def download(self, url): r = requests.get(url, stream=True) length = r.headers["content-length"] if not length.isdigit() or int(length) > get_maxsize(): raise FileTooBig("The file was larger than " + _cfg("max_file_size")) for i, chunk in enumerate(r.iter_content(chunk_size=1024)): if i > get_maxsize() / 1024: # Evil servers may send more than Content-Length bytes # As of 54541a9, python-requests keeps reading indefinitely raise FileTooBig("The file was larger than " + _cfg("max_file_size")) self.f.write(chunk) self.f.flush() if r.status_code == 404: return False parsed_url = urlparse(url) self.filename = list(reversed(parsed_url.path.split("/")))[0] if "content-type" in r.headers: self.content_type = r.headers['content-type'] ext = mimetypes.guess_extension(self.content_type) if ext: self.filename = self.filename + ext if "content-disposition" in r.headers: disposition = r.headers['content-disposition'] parts = disposition.split(';') if len(parts) > 1: self.filename = parts[1].strip(' ') self.filename = self.filename[self.filename.find('=') + 1:].strip(' ') self.filename = ''.join( [c for c in self.filename if c.isalpha() or c == '.']) return True
def inject(): if is_tor(): cdn = _cfg("tor_domain") ads = True if "ad-opt-out" in request.cookies: ads = False if g.do_not_track: ads = False if not _cfg("project_wonderful_id"): ads = False return { "mobile": g.mobile, "ua_platform": request.user_agent.platform, "analytics_id": _cfg("google_analytics_id"), "analytics_domain": _cfg("google_analytics_domain"), "dwolla_id": _cfg("dwolla_id"), "coinbase_id": _cfg("coinbase_id"), "flattr_id": _cfg("flattr_id"), "dark_theme": "dark_theme" in request.cookies, "ads": ads, "ad_id": _cfg("project_wonderful_id"), "notice_text": notice_text, "notice_enabled": notice_enabled, "share": share, "render_media": render_media, "len": len, "str": str, "get_mimetype": get_mimetype, "cdn_url": cdn_url, "is_tor": is_tor(), "ip": get_ip(), "media_url": media_url, "root": _cfg("protocol") + "://" + _cfg("domain"), "random": random, "shard": shard, "max_file_size": get_maxsize(), }
def inject(): if is_tor(): cdn = _cfg("tor_domain") ads = True if 'ad-opt-out' in request.cookies: ads = False if g.do_not_track: ads = False if not _cfg("project_wonderful_id"): ads = False return { 'mobile': g.mobile, 'ua_platform': request.user_agent.platform, 'analytics_id': _cfg("google_analytics_id"), 'analytics_domain': _cfg("google_analytics_domain"), 'dwolla_id': _cfg("dwolla_id"), 'coinbase_id': _cfg("coinbase_id"), 'flattr_id': _cfg("flattr_id"), 'dark_theme': "dark_theme" in request.cookies, 'ads': ads, 'ad_id': _cfg("project_wonderful_id"), 'notice_text': notice_text, 'notice_enabled': notice_enabled, 'share': share, 'render_media': render_media, 'len': len, 'str': str, 'get_mimetype': get_mimetype, 'cdn_url': cdn_url, 'is_tor': is_tor(), 'ip': get_ip(), 'media_url': media_url, 'root': _cfg("protocol") + "://" + _cfg("domain"), 'random': random, 'shard': shard, 'max_file_size': get_maxsize() }
def inject(): mobile = request.user_agent.platform in ['android', 'iphone', 'ipad'] return { 'mobile': mobile, 'analytics_id': _cfg("google_analytics_id"), 'analytics_domain': _cfg("google_analytics_domain"), 'dwolla_id': _cfg("dwolla_id"), 'coinbase_id': _cfg("coinbase_id"), 'flattr_id': _cfg("flattr_id"), 'adsense_client': _cfg("adsense_client"), 'adsense_slot': _cfg("adsense_slot") }
def inject(): if is_tor(): cdn = _cfg("tor_domain") ads = True if "ad-opt-out" in request.cookies: ads = False if g.do_not_track: ads = False if not _cfg("project_wonderful_id"): ads = False return { "mobile": g.mobile, "ua_platform": request.user_agent.platform, "analytics_id": _cfg("google_analytics_id"), "analytics_domain": _cfg("google_analytics_domain"), "dwolla_id": _cfg("dwolla_id"), "coinbase_id": _cfg("coinbase_id"), "flattr_id": _cfg("flattr_id"), "ads": ads, "ad_id": _cfg("project_wonderful_id"), "notice_text": notice_text, "notice_enabled": notice_enabled, "share": share, "render_media": render_media, "len": len, "str": str, "get_mimetype": get_mimetype, "cdn_url": cdn_url, "is_tor": is_tor(), "ip": get_ip(), "media_url": media_url, "root": _cfg("protocol") + "://" + _cfg("domain"), "random": random, "shard": shard, "max_file_size": get_maxsize(), }
def download(self, url): r = requests.get(url, stream=True) length = r.headers["content-length"] if not length.isdigit() or int(length) > get_maxsize(): raise FileTooBig("The file was larger than "+_cfg("max_file_size")) for i, chunk in enumerate(r.iter_content(chunk_size=1024)): if i > get_maxsize() / 1024: # Evil servers may send more than Content-Length bytes # As of 54541a9, python-requests keeps reading indefinitely raise FileTooBig("The file was larger than "+_cfg("max_file_size")) self.f.write(chunk) self.f.flush() if r.status_code == 404: return False parsed_url = urlparse(url) self.filename = list(reversed(parsed_url.path.split("/")))[0] if "content-type" in r.headers: self.content_type = r.headers['content-type'] ext = mimetypes.guess_extension(self.content_type) if ext: self.filename = self.filename + ext if "content-disposition" in r.headers: disposition = r.headers['content-disposition'] parts = disposition.split(';') if len(parts) > 1: self.filename = parts[1].strip(' ') self.filename = self.filename[self.filename.find('=') + 1:].strip(' ') self.filename = ''.join([c for c in self.filename if c.isalpha() or c == '.']) return True
def inject(): return { "mobile": g.mobile, "analytics_id": _cfg("google_analytics_id"), "analytics_domain": _cfg("google_analytics_domain"), "dwolla_id": _cfg("dwolla_id"), "coinbase_id": _cfg("coinbase_id"), "flattr_id": _cfg("flattr_id"), "adsense_client": _cfg("adsense_client"), "adsense_slot": _cfg("adsense_slot"), "dark_theme": "dark_theme" in request.cookies, "ads": not "ad-opt-out" in request.cookies, "share": share, }
def inject(): return { 'mobile': g.mobile, 'analytics_id': _cfg("google_analytics_id"), 'analytics_domain': _cfg("google_analytics_domain"), 'dwolla_id': _cfg("dwolla_id"), 'coinbase_id': _cfg("coinbase_id"), 'flattr_id': _cfg("flattr_id"), 'adsense_client': _cfg("adsense_client"), 'adsense_slot': _cfg("adsense_slot"), 'dark_theme': "dark_theme" in request.cookies, 'ads': not "ad-opt-out" in request.cookies, 'notice_text': notice_text, 'notice_enabled': notice_enabled, 'share': share, 'render_media': render_media, 'type_files': type_files, 'len': len }
from flask import Flask, render_template, request, g from flaskext.bcrypt import Bcrypt import traceback from mediacrush.views import UploadView, HookView, APIView, ImageView from mediacrush.config import _cfg app = Flask(__name__) app.secret_key = _cfg("secret_key") bcrypt = Bcrypt(app) @app.before_request def find_dnt(): field = "Dnt" do_not_track = False if field in request.headers: do_not_track = True if request.headers[field] == "1" else False g.do_not_track = do_not_track @app.errorhandler(404) def not_found(e): return render_template("error.html", error="File not found."), 404 @app.errorhandler(Exception) def exception_catch_all(e): traceback.print_exc() return render_template("error.html", error=repr(e)), 500 @app.context_processor
def inject(): cdn = _cfg("cdn") if is_tor(): cdn = _cfg("tor_domain") return { "mobile": g.mobile, "analytics_id": _cfg("google_analytics_id"), "analytics_domain": _cfg("google_analytics_domain"), "dwolla_id": _cfg("dwolla_id"), "coinbase_id": _cfg("coinbase_id"), "flattr_id": _cfg("flattr_id"), "adsense_client": _cfg("adsense_client"), "adsense_slot": _cfg("adsense_slot"), "dark_theme": "dark_theme" in request.cookies, "ads": not "ad-opt-out" in request.cookies, "notice_text": notice_text, "notice_enabled": notice_enabled, "share": share, "render_media": render_media, "len": len, "str": str, "get_mimetype": get_mimetype, "cdn": cdn, "is_tor": is_tor(), "ip": get_ip(), "media_url": media_url, "root": _cfg("protocol") + "://" + _cfg("domain"), }
def file_storage(f): return os.path.join(_cfg("storage_folder"), f)
def tor_redirect(path): if is_tor(): return redirect(_cfg("tor_domain") + '/' + path) return redirect(path)
def prepare(): path = tempfile.mkdtemp() compiler = scss.Scss(scss_opts = { 'style': 'compressed' if not app.debug else None }) # Unsafe extnsion function used only here extension = lambda f: f.rsplit('.', 1)[1].lower() # Compile styles (scss) d = os.walk('styles') for f in list(d)[0][2]: if extension(f) == "scss": print("[scss] %s" % f) with open(os.path.join('styles', f)) as r: output = compiler.compile(r.read()) parts = f.rsplit('.') css = '.'.join(parts[:-1]) + ".css" with open(os.path.join(path, css), "w") as w: w.write(output) w.flush() # Compile scripts (coffeescript) d = os.walk('scripts') preprocess = ['scripts/mediacrush.js'] for f in list(d)[0][2]: outputpath = os.path.join(path, os.path.basename(f)) inputpath = os.path.join('scripts', f) if extension(f) == "js": if inputpath in preprocess: with open(inputpath) as r: output = r.read().decode("utf-8") output = output.replace("{{ protocol }}", _cfg("protocol")) output = output.replace("{{ domain }}", _cfg("domain")) with open(outputpath, "w") as w: w.write(output.encode("utf-8")) w.flush() else: copyfile(inputpath, outputpath) elif extension(f) == "manifest": with open(inputpath) as r: manifest = r.read().decode("utf-8").split('\n') javascript = '' for script in manifest: script = script.strip(' ') if script == '' or script.startswith('#'): continue bare = False if script.startswith('bare: '): bare = True script = script[6:] print("[coffee] %s" % script) with open(os.path.join('scripts', script)) as r: coffee = r.read() if script.endswith('.js'): javascript += coffee # straight up copy else: javascript += coffeescript.compile(coffee, bare=bare) output = '.'.join(f.rsplit('.')[:-1]) + '.js' if not app.debug: javascript = minify(javascript) with open(os.path.join(path, output), "w") as w: w.write(javascript.encode("utf-8")) w.flush() if os.path.exists(app.static_folder): rmtree(app.static_folder) os.makedirs(app.static_folder) d = os.walk(path) for f in list(d)[0][2]: inputpath = os.path.join(path, os.path.basename(f)) outputpath = os.path.join(app.static_folder, f) copyfile(inputpath, outputpath) d = os.walk('images') for f in list(d)[0][2]: outputpath = os.path.join(app.static_folder, os.path.basename(f)) inputpath = os.path.join('images', f) copyfile(inputpath, outputpath)
def domain_url(path): if is_tor(): return "%s/%s" % (_cfg("tor_domain"), path) return "%s://%s/%s" % (_cfg("protocol"), _cfg("domain"), path)
from mediacrush.config import _cfg from celery.app import Celery from celery.utils.log import get_task_logger from celery import chord, signature redis_connection = "redis://%s:%s/1" % (_cfg("redis-ip"), _cfg("redis-port")) app = Celery( "proj", broker=redis_connection, backend=redis_connection, include=["mediacrush.tasks"], ) # Optional configuration, see the application user guide. app.conf.update( CELERY_ACCEPT_CONTENT=["json"], CELERY_TASK_SERIALIZER="json", CELERY_RESULT_SERIALIZER="json", CELERY_CHORD_PROPAGATES=False, CELERY_ROUTES={"mediacrush.tasks.process_file": { "queue": "priority" }}, ) if __name__ == "__main__": app.start()
def prepare(): path = tempfile.mkdtemp() compiler = scss.Scss(scss_opts = { 'style': 'compressed' if not app.debug else None }) # Compile styles (scss) d = os.walk('styles') for f in list(d)[0][2]: if extension(f) == "scss": print("[scss] %s" % f) with open(os.path.join('styles', f)) as r: output = compiler.compile(r.read()) parts = f.rsplit('.') css = '.'.join(parts[:-1]) + ".css" with open(os.path.join(path, css), "w") as w: w.write(output) w.flush() # Compile scripts (coffeescript) d = os.walk('scripts') preprocess = ['scripts/mediacrush.js'] for f in list(d)[0][2]: outputpath = os.path.join(path, os.path.basename(f)) inputpath = os.path.join('scripts', f) if extension(f) == "js": if inputpath in preprocess: with open(inputpath, "rb") as r: output = r.read().decode("utf-8") output = output.replace("{{ protocol }}", _cfg("protocol")) output = output.replace("{{ domain }}", _cfg("domain")) with open(outputpath, "wb") as w: w.write(output.encode("utf-8")) w.flush() else: copyfile(inputpath, outputpath) elif extension(f) == "manifest": with open(inputpath, "rb") as r: manifest = r.read().decode("utf-8").split('\n') javascript = '' for script in manifest: script = script.strip(' ') if script == '' or script.startswith('#'): continue bare = False if script.startswith('bare: '): bare = True script = script[6:] print("[coffee] %s" % script) with open(os.path.join('scripts', script)) as r: coffee = r.read() if script.endswith('.js'): javascript += coffee # straight up copy else: javascript += coffeescript.compile(coffee, bare=bare) output = '.'.join(f.rsplit('.')[:-1]) + '.js' if not app.debug: # FIXME https://github.com/rspivak/slimit/issues/64 if sys.version_info.major == 3: sys.stderr.write("WARNING: Minifying is not supported on Python 3 yet\n") else: javascript = minify(javascript) with open(os.path.join(path, output), "wb") as w: w.write(javascript.encode("utf-8")) w.flush() if os.path.exists(app.static_folder): rmtree(app.static_folder) os.makedirs(app.static_folder) d = os.walk(path) for f in list(d)[0][2]: inputpath = os.path.join(path, os.path.basename(f)) outputpath = os.path.join(app.static_folder, f) copyfile(inputpath, outputpath) d = os.walk('images') for f in list(d)[0][2]: outputpath = os.path.join(app.static_folder, os.path.basename(f)) inputpath = os.path.join('images', f) copyfile(inputpath, outputpath)
def _file_entry(f, mimetype=None): return { 'type': mimetype if mimetype else get_mimetype(f), 'file': media_url(f), 'url': _cfg("cdn") + media_url(f) }
def get_ip(): ip = request.remote_addr if (all_matching_cidrs(ip, [_cfg("trusted_proxies")]) != []) and "X-Forwarded-For" in request.headers: ip = request.headers.get("X-Forwarded-For") return ip
def prepare(): path = tempfile.mkdtemp() compiler = scss.Scss( scss_opts={'style': 'compressed' if not app.debug else None}) # Compile styles (scss) d = os.walk('styles') for f in list(d)[0][2]: if extension(f) == "scss": print("[scss] %s" % f) with open(os.path.join('styles', f)) as r: output = compiler.compile(r.read()) parts = f.rsplit('.') css = '.'.join(parts[:-1]) + ".css" with open(os.path.join(path, css), "w") as w: w.write(output) w.flush() # Compile scripts (coffeescript) d = os.walk('scripts') preprocess = ['scripts/mediacrush.js'] for f in list(d)[0][2]: outputpath = os.path.join(path, os.path.basename(f)) inputpath = os.path.join('scripts', f) if extension(f) == "js": if inputpath in preprocess: with open(inputpath, "rb") as r: output = r.read().decode("utf-8") output = output.replace("{{ protocol }}", _cfg("protocol")) output = output.replace("{{ domain }}", _cfg("domain")) with open(outputpath, "wb") as w: w.write(output.encode("utf-8")) w.flush() else: copyfile(inputpath, outputpath) elif extension(f) == "manifest": with open(inputpath, "rb") as r: manifest = r.read().decode("utf-8").split('\n') javascript = '' for script in manifest: script = script.strip(' ') if script == '' or script.startswith('#'): continue bare = False if script.startswith('bare: '): bare = True script = script[6:] print("[coffee] %s" % script) with open(os.path.join('scripts', script)) as r: coffee = r.read() if script.endswith('.js'): javascript += coffee # straight up copy else: javascript += coffeescript.compile(coffee, bare=bare) output = '.'.join(f.rsplit('.')[:-1]) + '.js' if not app.debug: # FIXME https://github.com/rspivak/slimit/issues/64 if sys.version_info.major == 3: sys.stderr.write( "WARNING: Minifying is not supported on Python 3 yet\n" ) else: javascript = minify(javascript) with open(os.path.join(path, output), "wb") as w: w.write(javascript.encode("utf-8")) w.flush() if os.path.exists(app.static_folder): rmtree(app.static_folder) os.makedirs(app.static_folder) d = os.walk(path) for f in list(d)[0][2]: inputpath = os.path.join(path, os.path.basename(f)) outputpath = os.path.join(app.static_folder, f) copyfile(inputpath, outputpath) d = os.walk('images') for f in list(d)[0][2]: outputpath = os.path.join(app.static_folder, os.path.basename(f)) inputpath = os.path.join('images', f) copyfile(inputpath, outputpath)
def compile_if_debug(): if app.debug and _cfg("debug-static-recompile") == 'true': prepare()
def prepare(): if os.path.exists(app.static_folder): rmtree(app.static_folder) os.makedirs(app.static_folder) compiler = scss.Scss( scss_opts={'style': 'compressed' if not app.debug else None}) # Compile styles (scss) d = os.walk('styles') for f in list(d)[0][2]: if extension(f) == "scss": with open(os.path.join('styles', f)) as r: output = compiler.compile(r.read()) parts = f.rsplit('.') css = '.'.join(parts[:-1]) + ".css" with open(os.path.join(app.static_folder, css), "w") as w: w.write(output) w.flush() # Compile scripts (coffeescript) d = os.walk('scripts') preprocess = ['scripts/mediacrush.js'] for f in list(d)[0][2]: outputpath = os.path.join(app.static_folder, os.path.basename(f)) inputpath = os.path.join('scripts', f) if extension(f) == "js": if inputpath in preprocess: with open(inputpath) as r: output = r.read().decode("utf-8") output = output.replace("{{ protocol }}", _cfg("protocol")) output = output.replace("{{ domain }}", _cfg("domain")) with open(outputpath, "w") as w: w.write(output.encode("utf-8")) w.flush() else: copyfile(inputpath, outputpath) elif extension(f) == "manifest": with open(inputpath) as r: manifest = r.read().decode("utf-8").split('\n') javascript = '' for script in manifest: script = script.strip(' ') if script == '' or script.startswith('#'): continue bare = False if script.startswith('bare: '): bare = True script = script[6:] with open(os.path.join('scripts', script)) as r: coffee = r.read() if script.endswith('.js'): javascript += coffee # straight up copy else: javascript += coffeescript.compile(coffee, bare=bare) output = '.'.join(f.rsplit('.')[:-1]) + '.js' if not app.debug: javascript = minify(javascript) with open(os.path.join(app.static_folder, output), "w") as w: w.write(javascript.encode("utf-8")) w.flush() d = os.walk('images') for f in list(d)[0][2]: outputpath = os.path.join(app.static_folder, os.path.basename(f)) inputpath = os.path.join('images', f) copyfile(inputpath, outputpath)
from mediacrush.app import app from mediacrush.config import _cfg, _cfgi import os app.static_folder = os.path.join(os.getcwd(), "static") if __name__ == '__main__': app.run(host=_cfg("debug-host"), port=_cfgi('debug-port'), debug=True)
def mediacrushjs(): v = render_template("mediacrush.js", host=_cfg("domain")) return Response(v, mimetype="application/javascript")
def _template_params(f): if f.compression: compression = int(float(f.compression) * 100) if compression == 100 or f.status != "done": compression = None can_delete = None try: if request.cookies.get('hist-opt-out', '0') == '1': can_delete = check_password_hash(f.ip, get_ip()) except: pass mimetype = f.mimetype processor = get_processor(f.processor) types = [mimetype] for f_ext in processor.outputs: types.append(get_mimetype(f_ext)) if 'do-not-send' in request.cookies: try: blacklist = json.loads(request.cookies['do-not-send']) for t in blacklist: if t in types: types.remove(t) except: pass metadata = {} if f.metadata and f.metadata != 'null': try: metadata = json.loads(f.metadata) except: pass subtitles = None if 'subtitles' in metadata and 'streams' in metadata['subtitles']: for stream in metadata['subtitles']['streams']: if stream['type'] == 'subtitle': subtitles = stream if subtitles['info']['codec_name'] == 'ssa': subtitles['info']['codec_name'] = 'ass' subtitles['url'] = '/' + f.hash + '.' + subtitles['info']['codec_name'] break if f.description: f.description = slimdown.convert(f.description) return { 'filename': f.hash, 'original': f.original, 'video': normalise_processor(f.processor) == 'video', 'flags': f.flags.as_dict(), 'metadata': metadata, 'subtitles': subtitles, 'has_subtitles': subtitles != None, 'compression': compression, 'mimetype': mimetype, 'can_delete': can_delete if can_delete is not None else 'check', 'fragment': 'fragments/' + fragment(f.processor) + '.html', 'types': types, 'processor': f.processor, 'protocol': _cfg("protocol"), 'domain': _cfg("domain"), 'file': f }
def sync(self): self._execute(copy) map_string = '' filter_string = 'scale=trunc(in_w/2)*2:trunc(in_h/2)*2' if self.processor_state['has_video']: self._execute("ffmpeg -y -i {0} -vframes 1 -map 0:v:0 {1}.jpg") map_string += ' -map 0:v:0' if self.processor_state['has_audio']: map_string += ' -map 0:a:0' if 'interlaced' in self.processor_state: print("WARNING: Detected interlacing on " + self.output) filter_string = 'yadif,' + filter_string self._execute("ffmpeg -y -i {0} -vcodec libx264 -acodec libfdk_aac -pix_fmt yuv420p -profile:v baseline -preset slower -crf 18 -vf " + filter_string + map_string + " {1}.mp4") skip_webm = False for s in self.processor_state['streams']: if 'info' in s: if 'video_codec' in s['info'] and s['info']['video_codec'] == 'vp8': skip_webm = True if not skip_webm: self._execute("ffmpeg -y -i {0} -c:v libvpx -c:a libvorbis -pix_fmt yuv420p -quality good -b:v 5M -crf 5 -vf " + filter_string + map_string + " {1}.webm") # Extract extra streams if present fonts = [] extract_fonts = False if 'has_fonts' in self.processor_state and 'has_subtitles' in self.processor_state: if self.processor_state['has_fonts'] or self.processor_state['has_subtitles']: for stream in self.processor_state['streams']: if stream['type'] == 'font': ext = _extension(stream["info"]) if ext in ['ttf', 'otf']: # Note that ffmpeg returns a nonzero exit code when dumping attachments because there's technically no output file # -dump_attachment is a mechanism completely removed from the rest of the ffmpeg workflow self._execute("ffmpeg -y -dump_attachment:" + str(stream["index"]) + ' {1}_attachment_' + str(len(fonts)) + '.' + ext + ' -i {0}', ignoreNonZero=True) fonts.append(stream) elif stream['type'] == 'subtitle' and 'info' in stream: extension = None if stream['info']['codec_name'] == 'ssa': extension = '.ass' extract_fonts = True elif stream['info']['codec_name'] == 'srt': extension = '.srt' elif stream['info']['codec_name'] == 'vtt': extension = '.vtt' if extension != None: self._execute("ffmpeg -y -i {0} -map 0:s:0 {1}" + extension) if extension == '.srt': # convert to vtt vtt = convert_to_vtt(os.path.join(_cfg("storage_folder"), '%s.srt' % self.f.hash)) with open(os.path.join(_cfg("storage_folder"), '%s.vtt' % self.f.hash), 'w') as f: f.write(vtt) os.remove(os.path.join(_cfg("storage_folder"), '%s.srt' % self.f.hash)) if extract_fonts: # Examine font files and construct some CSS to import them css = '' i = 0 for font in fonts: ext = _extension(font['info']) if not ext in ['ttf', 'otf']: continue command = Invocation('otfinfo --info {0}') command(os.path.join(_cfg("storage_folder"), '%s_attachment_%s.%s' % (self.f.hash, i, _extension(font['info'])))) command.run() output = command.stdout[0].split('\n') family = None subfamily = None for line in output: if line.startswith('Family:'): family = line[7:].strip(' \t') if line.startswith('Subfamily:'): subfamily = line[10:].strip(' \t') css += '@font-face{font-family: "%s";' % family css += 'src:url("/%s_attachment_%s.%s");' % (self.f.hash, i, _extension(font['info'])) if subfamily == 'SemiBold': css += 'font-weight: 600;' elif subfamily == 'Bold': css += 'font-weight: bold;' elif subfamily == 'Italic': css += 'font-style: italic;' css += '}' i += 1 css_file = open(os.path.join(_cfg("storage_folder"), '%s_fonts.css' % self.f.hash), 'w') css_file.write(css) css_file.close()
def sync(self): self._execute(copy) map_string = "" filter_string = "scale=trunc(in_w/2)*2:trunc(in_h/2)*2" if self.processor_state["has_video"]: self._execute("ffmpeg -y -i {0} -vframes 1 -map 0:v:0 {1}.jpg") map_string += " -map 0:v:0" if self.processor_state["has_audio"]: map_string += " -map 0:a:0" if "interlaced" in self.processor_state: print(("WARNING: Detected interlacing on " + self.output)) filter_string = "yadif," + filter_string self._execute( "ffmpeg -y -i {0} -vcodec libx264 -acodec libfdk_aac -movflags faststart -pix_fmt yuv420p -profile:v baseline -level 3.0 -preset slower -crf 18 -vf " + filter_string + map_string + " {1}.mp4" ) skip_webm = False for s in self.processor_state["streams"]: if "info" in s: if "video_codec" in s["info"] and s["info"]["video_codec"] == "vp8": skip_webm = True if not skip_webm: self._execute( "ffmpeg -y -i {0} -c:v libvpx -c:a libvorbis -q:a 5 -pix_fmt yuv420p -quality good -b:v 5M -crf 5 -vf " + filter_string + map_string + " {1}.webm" ) # Extract extra streams if present fonts = [] extract_fonts = False if ( "has_fonts" in self.processor_state and "has_subtitles" in self.processor_state ): if ( self.processor_state["has_fonts"] or self.processor_state["has_subtitles"] ): for stream in self.processor_state["streams"]: if stream["type"] == "font": ext = _extension(stream["info"]) if ext in ["ttf", "otf"]: # Note that ffmpeg returns a nonzero exit code when dumping attachments because there's technically no output file # -dump_attachment is a mechanism completely removed from the rest of the ffmpeg workflow self._execute( "ffmpeg -y -dump_attachment:" + str(stream["index"]) + " {1}_attachment_" + str(len(fonts)) + "." + ext + " -i {0}", ignoreNonZero=True, ) fonts.append(stream) elif stream["type"] == "subtitle" and "info" in stream: extension = None if stream["info"]["codec_name"] == "ssa": extension = ".ass" extract_fonts = True elif stream["info"]["codec_name"] == "srt": extension = ".srt" elif stream["info"]["codec_name"] == "vtt": extension = ".vtt" if extension != None: self._execute("ffmpeg -y -i {0} -map 0:s:0 {1}" + extension) if extension == ".srt": # convert to vtt vtt = convert_to_vtt( os.path.join( _cfg("storage_folder"), "%s.srt" % self.f.hash ) ) with open( os.path.join( _cfg("storage_folder"), "%s.vtt" % self.f.hash ), "w", ) as f: f.write(vtt) os.remove( os.path.join( _cfg("storage_folder"), "%s.srt" % self.f.hash ) ) if extract_fonts: # Examine font files and construct some CSS to import them css = "" i = 0 for font in fonts: ext = _extension(font["info"]) if not ext in ["ttf", "otf"]: continue command = Invocation("otfinfo --info {0}") command( os.path.join( _cfg("storage_folder"), "%s_attachment_%s.%s" % (self.f.hash, i, _extension(font["info"])), ) ) command.run() output = command.stdout[0].split("\n") family = None subfamily = None for line in output: if line.startswith("Family:"): family = line[7:].strip(" \t") if line.startswith("Subfamily:"): subfamily = line[10:].strip(" \t") css += '@font-face{font-family: "%s";' % family css += 'src:url("/%s_attachment_%s.%s");' % ( self.f.hash, i, _extension(font["info"]), ) if subfamily == "SemiBold": css += "font-weight: 600;" elif subfamily == "Bold": css += "font-weight: bold;" elif subfamily == "Italic": css += "font-style: italic;" css += "}" i += 1 css_file = open( os.path.join(_cfg("storage_folder"), "%s_fonts.css" % self.f.hash), "w", ) css_file.write(css) css_file.close()
# Copy images, preprocess some JS files for folder in copy: for f in list(os.walk(folder))[0][2]: outputpath = os.path.join(app.static_folder, os.path.basename(f)) inputpath = os.path.join(folder, f) if inputpath in preprocess: with open(inputpath) as r: # Using Jinja here is overkill output = r.read() output = output.replace("{{ protocol }}", _cfg("protocol")) output = output.replace("{{ domain }}", _cfg("domain")) with open(outputpath, "w") as w: w.write(output) w.flush() else: copyfile(inputpath, outputpath) @app.before_first_request def compile_first(): prepare() @app.before_request def compile_if_debug(): if app.debug: prepare() if __name__ == '__main__': app.run(host=_cfg("debug-host"), port=_cfgi('debug-port'), debug=True)
def is_tor(): return _cfg("tor_domain") and get_ip() == '127.0.0.1'
def sync(self): self._execute(copy) map_string = '' filter_string = 'scale=trunc(in_w/2)*2:trunc(in_h/2)*2' if self.processor_state['has_video']: self._execute("ffmpeg -y -i {0} -vframes 1 -map 0:v:0 {1}.jpg") map_string += ' -map 0:v:0' if self.processor_state['has_audio']: map_string += ' -map 0:a:0' if 'interlaced' in self.processor_state: print("WARNING: Detected interlacing on " + self.output) filter_string = 'yadif,' + filter_string self._execute( "ffmpeg -y -i {0} -vcodec libx264 -acodec libfdk_aac -movflags faststart -pix_fmt yuv420p -profile:v baseline -level 3.0 -preset slower -crf 18 -vf " + filter_string + map_string + " {1}.mp4") skip_webm = False for s in self.processor_state['streams']: if 'info' in s: if 'video_codec' in s['info'] and s['info'][ 'video_codec'] == 'vp8': skip_webm = True if not skip_webm: self._execute( "ffmpeg -y -i {0} -c:v libvpx -c:a libvorbis -pix_fmt yuv420p -quality good -b:v 5M -crf 5 -vf " + filter_string + map_string + " {1}.webm") # Extract extra streams if present fonts = [] extract_fonts = False if 'has_fonts' in self.processor_state and 'has_subtitles' in self.processor_state: if self.processor_state['has_fonts'] or self.processor_state[ 'has_subtitles']: for stream in self.processor_state['streams']: if stream['type'] == 'font': ext = _extension(stream["info"]) if ext in ['ttf', 'otf']: # Note that ffmpeg returns a nonzero exit code when dumping attachments because there's technically no output file # -dump_attachment is a mechanism completely removed from the rest of the ffmpeg workflow self._execute( "ffmpeg -y -dump_attachment:" + str(stream["index"]) + ' {1}_attachment_' + str(len(fonts)) + '.' + ext + ' -i {0}', ignoreNonZero=True) fonts.append(stream) elif stream['type'] == 'subtitle' and 'info' in stream: extension = None if stream['info']['codec_name'] == 'ssa': extension = '.ass' extract_fonts = True elif stream['info']['codec_name'] == 'srt': extension = '.srt' elif stream['info']['codec_name'] == 'vtt': extension = '.vtt' if extension != None: self._execute("ffmpeg -y -i {0} -map 0:s:0 {1}" + extension) if extension == '.srt': # convert to vtt vtt = convert_to_vtt( os.path.join(_cfg("storage_folder"), '%s.srt' % self.f.hash)) with open( os.path.join(_cfg("storage_folder"), '%s.vtt' % self.f.hash), 'w') as f: f.write(vtt) os.remove( os.path.join(_cfg("storage_folder"), '%s.srt' % self.f.hash)) if extract_fonts: # Examine font files and construct some CSS to import them css = '' i = 0 for font in fonts: ext = _extension(font['info']) if not ext in ['ttf', 'otf']: continue command = Invocation('otfinfo --info {0}') command( os.path.join( _cfg("storage_folder"), '%s_attachment_%s.%s' % (self.f.hash, i, _extension(font['info'])))) command.run() output = command.stdout[0].split('\n') family = None subfamily = None for line in output: if line.startswith('Family:'): family = line[7:].strip(' \t') if line.startswith('Subfamily:'): subfamily = line[10:].strip(' \t') css += '@font-face{font-family: "%s";' % family css += 'src:url("/%s_attachment_%s.%s");' % ( self.f.hash, i, _extension(font['info'])) if subfamily == 'SemiBold': css += 'font-weight: 600;' elif subfamily == 'Bold': css += 'font-weight: bold;' elif subfamily == 'Italic': css += 'font-style: italic;' css += '}' i += 1 css_file = open( os.path.join(_cfg("storage_folder"), '%s_fonts.css' % self.f.hash), 'w') css_file.write(css) css_file.close()