def cg_download(target_path, method, timeout=10, verbose=True, **kwargs): start_time = time.monotonic() target_dir = os.path.dirname(os.path.abspath(target_path)) cg_server = config.get("cg_server", "https://cg.immstudios.org") cg_site = config.get("cg_site", config["site_name"]) if not os.path.isdir(target_dir): try: os.makedirs(target_dir) except Exception: logging.error(f"Unable to create output directory {target_dir}") return False url = f"{cg_server}/render/{cg_site}/{method}" try: response = requests.get(url, params=kwargs, timeout=timeout) except Exception: log_traceback("Unable to download CG item") return False if response.status_code != 200: logging.error(f"CG Download failed with code {response.status_code}") return False try: temp_path = target_path + ".creating" with open(temp_path, "wb") as f: f.write(response.content) os.rename(temp_path, target_path) except Exception: log_traceback(f"Unable to write CG item to {target_path}") return False if verbose: elapsed = time.monotonic() - start_time logging.info(f"CG {method} downloaded in {elapsed:.02f}s") return True
class SiteContext(object): context = { "name": config["site_name"], "meta_types": meta_types, "language": config.get("language", "en"), "webtools": webtools, "css": config.get("hub_css", SITE_CSS), "js": config.get("hub_js", SITE_JS), } def __getitem__(self, key): if key in self.context: return self.context[key] return config[key]
def get_plugin_path(group=False): try: plugin_path = os.path.join( storages[int(config.get("plugin_storage", 1))].local_path, config.get("plugin_root", ".nx/scripts/v5"), ) except Exception: log_traceback() return "" if group: plugin_path = os.path.join(plugin_path, group) if not os.path.exists(plugin_path): return "" return plugin_path
def listen_rabbit(self): try: import pika except ModuleNotFoundError: critical_error("'pika' module is not installed") host = config.get("rabbitmq_host", "rabbitmq") conparams = pika.ConnectionParameters(host=host) while True: try: connection = pika.BlockingConnection(conparams) channel = connection.channel() result = channel.queue_declare( queue=config["site_name"], arguments={"x-message-ttl": 1000}) queue_name = result.method.queue logging.info("Listening on", queue_name) channel.basic_consume( queue=queue_name, on_message_callback=lambda c, m, p, b: self.handle_data(b), auto_ack=True, ) channel.start_consuming() except pika.exceptions.AMQPConnectionError: logging.error("RabbitMQ connection error", handlers=[]) except Exception: log_traceback() time.sleep(2)
def proxy_path(self): if not self.id: return "" if not hasattr(self, "_proxy_path"): tpl = config.get("proxy_path", ".nx/proxy/{id1000:04d}/{id}.mp4") id1000 = int(self.id / 1000) self._proxy_path = tpl.format(id1000=id1000, **self.meta) return self._proxy_path
def send_mail(to, subject, body, **kwargs): if type(to) == str: to = [to] default_reply_address = config.get( "mail_from", f"Nebula <{config['site_name']}@nebulabroadcast.com>") reply_address = kwargs.get("from", default_reply_address) smtp_host = config.get("smtp_host", "localhost") smtp_user = config.get("smtp_user", False) smtp_pass = config.get("smtp_pass", False) if isinstance(body, MIMEMultipart): msg = body else: msg = MIMEText(body) msg["Subject"] = subject msg["From"] = reply_address msg["To"] = ",".join(to) if config.get("smtp_ssl", False): smtp_port = config.get("smtp_port", 25) s = smtplib.SMTP_SSL(smtp_host, port=smtp_port) else: smtp_port = config.get("smtp_port", 465) s = smtplib.SMTP(smtp_host, port=smtp_port) if smtp_user and smtp_pass: s.login(smtp_user, smtp_pass) s.sendmail(reply_address, [to], msg.as_string())
def connect(self): if config.get("cache_mode", "memcached") == "redis": pass else: if not has_pylibmc: critical_error("'pylibmc' module is not installed") self.cstring = f"{self.host}:{self.port}" self.pool = False self.conn = pylibmc.Client([self.cstring])
def listen(): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) addr = config.get("seismic_addr", "224.168.1.1") port = int(config.get("seismic_port", 42005)) try: firstoctet = int(addr.split(".")[0]) is_multicast = firstoctet >= 224 except ValueError: is_multicast = False if is_multicast: logging.info(f"Starting multicast listener {addr}:{port}") sock.bind(("0.0.0.0", port)) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 255) sock.setsockopt( socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(addr) + socket.inet_aton("0.0.0.0"), ) else: logging.info(f"Starting unicast listener {addr}:{port}") sock.bind((addr, port)) sock.settimeout(1) while True: try: data, _ = sock.recvfrom(4092) except (socket.error): continue try: message = SeismicMessage(json.loads(data.decode())) except Exception: continue if message.site_name != config["site_name"]: continue if message.method == "log": log_message(message)
def load(self, key): if config.get("mc_thread_safe", False): return self.threaded_load(key) key = str(self.site + "-" + key) try: result = self.conn.get(key) except pylibmc.ConnectionError: self.connect() result = False except ValueError: result = False return result
def listen_udp(self): self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) addr = config.get("seismic_addr", "224.168.1.1") port = config.get("seismic_port", 42005) try: firstoctet = int(addr.split(".")[0]) is_multicast = firstoctet >= 224 except ValueError: is_multicast = False if is_multicast: logging.info(f"Starting multicast listener {addr}:{port}") self.sock.bind(("0.0.0.0", int(port))) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 255) self.sock.setsockopt( socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(addr) + socket.inet_aton("0.0.0.0"), ) else: logging.info(f"Starting unicast listener {addr}:{port}") self.sock.bind((addr, int(port))) self.sock.settimeout(1) while True: try: data, _ = self.sock.recvfrom(4092) except (socket.error): continue self.handle_data(data)
def connect(self): host = config.get("rabbitmq_host", "rabbitmq") conparams = pika.ConnectionParameters(host=host) try: self.connection = pika.BlockingConnection(conparams) except Exception: self.connection = None logging.error(f"Unable to connect RabbitMQ broker at {host}", handlers=[]) return self.channel = self.connection.channel() self.channel.queue_declare( queue=config["site_name"], arguments={"x-message-ttl": 1000} ) return True
def delete(self, key): if config.get("mc_thread_safe", False): return self.threaded_delete(key) key = self.site + "-" + key for i in range(MAX_RETRIES): try: self.conn.delete(key) break except Exception: log_traceback(f"Cache delete failed ({key})") time.sleep(0.3) self.connect() else: critical_error("Memcache delete failed. This should never happen.") return True
def start_service(self, id_service, title, db=False): proc_cmd = [ os.path.join(config["nebula_root"], "manage.py"), "run", str(id_service), '"{}"'.format(title), ] if config.get("daemon_mode"): proc_cmd.append("--daemon") logging.info(f"Starting service ID {id_service} ({title})") self.services[id_service] = [ subprocess.Popen(proc_cmd, cwd=config["nebula_root"]), title, ]
def proxy_storage(self): return config.get("proxy_storage", 1)
def on_main(self): db = DB() self.existing = [] start_time = time.time() db.query("SELECT meta FROM assets WHERE media_type=1 AND status=1") for (meta, ) in db.fetchall(): asset = Asset(meta=meta, db=db) file_path = asset.file_path self.existing.append(file_path) duration = time.time() - start_time if duration > 5 or config.get("debug_mode", False): logging.debug(f"Online assets loaded in {s2time(duration)}") start_time = time.time() for wf_settings in self.settings.findall("folder"): id_storage = int(wf_settings.attrib["id_storage"]) rel_wf_path = wf_settings.attrib["path"] quarantine_time = int( wf_settings.attrib.get("quarantine_time", "10")) id_folder = int(wf_settings.attrib.get("id_folder", 12)) storage_path = storages[id_storage].local_path watchfolder_path = os.path.join(storage_path, rel_wf_path) if not os.path.exists(watchfolder_path): logging.warning("Skipping non-existing watchfolder", watchfolder_path) continue i = 0 for file_object in get_files( watchfolder_path, recursive=wf_settings.attrib.get("recursive", False), hidden=wf_settings.attrib.get("hidden", False), case_sensitive_exts=wf_settings.get( "case_sensitive_exts", False), ): i += 1 if i % 100 == 0 and config.get("debug_mode", False): logging.debug("{} files scanned".format(i)) if not file_object.size: continue full_path = file_object.path if full_path in self.existing: continue now = time.time() asset_path = full_path.replace(storage_path, "", 1).lstrip("/") ext = os.path.splitext(asset_path)[1].lstrip(".").lower() if ext not in FileTypes.exts(): continue asset = asset_by_path(id_storage, asset_path, db=db) if asset: self.existing.append(full_path) continue base_name = get_base_name(asset_path) if quarantine_time and now - file_object.mtime < quarantine_time: logging.debug(f"{base_name} is too young. Skipping") continue asset = Asset(db=db) asset["content_type"] = FileTypes.by_ext(ext) asset["media_type"] = MediaType.FILE asset["id_storage"] = id_storage asset["path"] = asset_path asset["ctime"] = now asset["mtime"] = now asset["status"] = ObjectStatus.CREATING asset["id_folder"] = id_folder asset["title"] = base_name asset.load_sidecar_metadata() failed = False for post_script in wf_settings.findall("post"): try: exec(post_script.text) except Exception: log_traceback( f"Error executing post-script on {asset}") failed = True if not failed: asset.save(set_mtime=False) duration = time.time() - start_time if duration > 60 or config.get("debug_mode", False): logging.debug(f"Watchfolders scanned in {s2time(duration)}")
def build(self, *args, **kwargs): self["name"] = "passreset" self["title"] = "Password reset" self["mode"] = "email-entry" # # REQUEST EMAIL # if "email" in kwargs: email = kwargs["email"].strip() if not re.match(EMAIL_REGEXP, email): self.context.message("Invalid e-mail address specified", "error") return db = DB() db.query( """ SELECT meta FROM users where LOWER(meta->>'email') = LOWER(%s) """, [email], ) try: user = User(meta=db.fetchall()[0][0], db=db) except IndexError: self.context.message("No such user", "error") return if time.time() - user.meta.get("pass_reset_time", 0) < 3600: self.context.message( "Only one password reset request per hour is allowed", "error" ) return token = get_guid() user["pass_reset_time"] = time.time() user["pass_reset_code"] = token mailvars = { "name": user["full_name"] or user["login"], "site_name": config["site_name"], "hub_url": config.get( "hub_url", f"https://{config['site_name']}.nbla.cloud" ), "token": token, } body = MAIL_BODY.format(**mailvars) try: send_mail(email, "Nebula password reset", body) except Exception: log_traceback() self.context.message( """Unable to send password reset email. Please contact your system administrator""", "error", ) return user.save() self["mode"] = "mail-sent" return # # GOT TOKEN # elif "token" in kwargs: token = kwargs["token"].strip() self["mode"] = False self["token"] = token if not re.match(GUID_REGEXP, token): self.context.message("Invalid token specified", "error") return db = DB() db.query( "SELECT meta FROM users WHERE meta->>'pass_reset_code' = %s", [token] ) try: user = User(meta=db.fetchall()[0][0], db=db) except IndexError: self.context.message("No such token", "error") return if user["pass_reset_time"] < time.time() - 3600: self.context.message("Token expired.", "error") self["mode"] = "email-entry" return pass1 = kwargs.get("pass1", False) pass2 = kwargs.get("pass2", False) if pass1 and pass2: if pass1 != pass2: self["mode"] = "pass-entry" self.context.message("Passwords don't match", "error") return if len(pass1) < 8: self["mode"] = "pass-entry" self.context.message( "The password is weak. Must be at least 8 characters", "error" ) return user.set_password(pass1) del user.meta["pass_reset_code"] del user.meta["pass_reset_time"] user.save() self["mode"] = "finished" return self["mode"] = "pass-entry" return
def build(self, *args, **kwargs): # Query params query = kwargs.get("q", "") order_key = kwargs.get("o", "id") order_trend = kwargs.get("ot", "desc") if order_trend != "asc": order_trend = "desc" try: id_view = int(kwargs["v"]) view = config["views"][id_view] except (KeyError, ValueError): id_view = min(config["views"]) view = config["views"][id_view] try: current_page = int(kwargs["p"]) except (KeyError, ValueError, TypeError): current_page = 1 # Build view assets = api_get( user=self["user"], id_view=id_view, fulltext=query or False, count=False, order="{} {}".format(order_key, order_trend), limit=RECORDS_PER_PAGE + 1, offset=(current_page - 1) * RECORDS_PER_PAGE, ) if len(assets["data"]) > RECORDS_PER_PAGE: page_count = current_page + 1 elif len(assets["data"]) == 0: page_count = max(1, current_page - 1) else: page_count = current_page if current_page > page_count: current_page = page_count def get_params(**override): data = copy.copy(kwargs) for key in override: if not override[key] and key in data: del data[key] else: data[key] = override[key] return "&".join(["{}={}".format(k, data[k]) for k in data]) self["show_jobs"] = config.get("hub_browser_jobs_column", True) self["name"] = "assets" self["title"] = config["views"][id_view]["title"] self["js"] = ["/static/js/assets.js"] self["id_view"] = id_view self["query"] = query self["current_page"] = current_page self["page_count"] = page_count self["columns"] = view["columns"] self["assets"] = [Asset(meta=meta) for meta in assets["data"]] self["order_key"] = order_key self["order_trend"] = order_trend self["get_params"] = get_params self["view_list"] = sorted( list(config["views"].keys()), key=lambda x: config["views"][x]["position"])
def api_get(**kwargs): # db = kwargs.get("db", DB()) user = kwargs.get("user", anonymous) object_type = kwargs.get("object_type", "asset") result_type = kwargs.get("result", False) result_format = kwargs.get("result_format", False) result_lang = kwargs.get("language", config.get("language", "en")) as_folder = kwargs.get("as_folder", None) if not user: return NebulaResponse(401) start_time = time.time() ObjectClass = { "asset": Asset, "item": Item, "bin": Bin, "event": Event, "user": User, }[object_type] result = {"message": "Incomplete query", "response": 500, "data": [], "count": 0} rformat = None if result_format: rformat = {"result": result_format, "language": result_lang} if type(result_type) == list: result_format = [] for i, key in enumerate(result_type): form = key.split("@") if len(form) == 2: rf = json.loads(form[1] or "{}") if rformat: rformat.update(rf) result_format.append(rformat) else: result_format.append(rf) else: if rformat: result_format.append(rformat) else: result_format.append(None) result_type[i] = form[0] for response, obj in get_objects(ObjectClass, **kwargs): result["count"] |= response["count"] row = [] for key, form in zip(result_type, result_format): if form is None: row.append(obj[key]) else: form = form or {} row.append(obj.show(key, **form)) result["data"].append(row) elif result_type == "form": for response, obj in get_objects(ObjectClass, **kwargs): result["count"] |= response["count"] if as_folder is not None: obj["id_folder"] = as_folder row = { "id": obj.id, "id_folder": obj["id_folder"], "qc/state": obj["qc/state"], "proxy_url": obj.proxy_url, "title": obj["title"], "duration": obj["duration"], "mark_in": obj["mark_in"], "mark_out": obj["mark_out"], "form": {}, } for key, _ in config["folders"][obj["id_folder"]]["meta_set"]: row["form"][key] = obj.show(key, result="full") result["data"].append(row) elif result_type == "ids": # Result is an array of matching object IDs for response, obj in get_objects(ObjectClass, **kwargs): result["count"] |= response["count"] result["data"].append(obj.id) else: # Result is an array of asset metadata sets for response, obj in get_objects(ObjectClass, **kwargs): result["count"] |= response["count"] result["data"].append(obj.meta) result["count"] = min(result["count"], 10000) # # response # result["response"] = 200 result["message"] = "{} {}s returned in {:.02}s".format( len(result["data"]), object_type, time.time() - start_time ) return result
def on_init(self): self.site_name = config["site_name"] self.queue = [] self.last_message = 0 # # Message relays # self.relays = [] for relay in self.settings.findall("relay"): if relay is None or not relay.text: continue url = relay.text.rstrip("/") logging.info(f"Adding message relay: {url}") url += "/msg_publish?id=" + config["site_name"] self.relays.append(url) self.session = requests.Session() # # Logging # # Loki self.loki = None for loki in self.settings.findall("loki"): port = int(loki.attrib.get("port", 3100)) self.loki = LokiLogger(loki.text, port) break # Log to file log_dir = self.settings.find("log_dir") if log_dir is None or not log_dir.text: self.log_dir = None else: self.log_dir = log_dir.text if not os.path.exists(self.log_dir): try: os.makedirs(self.log_dir) except Exception: log_traceback() self.log_dir = None if not os.path.isdir(self.log_dir): logging.error( f"{log_dir} is not a directory. Logs will not be saved") self.log_dir = None log_ttl = self.settings.find("log_ttl") if log_ttl is None or not log_ttl.text: self.log_ttl = None else: try: self.log_ttl = int(log_ttl.text) except ValueError: log_traceback() self.log_ttl = None # # Listener # if config.get("messaging") == "rabbitmq": listener = self.listen_rabbit else: listener = self.listen_udp listen_thread = threading.Thread(target=listener, daemon=True) listen_thread.start() process_thread = threading.Thread(target=self.process, daemon=True) process_thread.start()
def site_context_helper(): return SiteContext() def page_context_helper(): return {} def user_context_helper(meta): return User(meta=meta or {}) static_dir = config.get( "hub_static_dir", os.path.join(config["nebula_root"], "hub", "static"), ) templates_dir = config.get( "hub_templates_dir", os.path.join(config["nebula_root"], "hub", "templates"), ) default_sessions_dir = os.path.join("/tmp", config["site_name"] + "-sessions") hub_config = { "host": config.get("hub_host", "0.0.0.0"), "port": config.get("hub_port", 8080), "static_dir": static_dir, "templates_dir": templates_dir, "login_helper": login_helper,
def configure(self): self.site = config["site_name"] self.host = config.get("cache_host", "localhost") self.port = config.get("cache_port", 11211) self.connect()
def __init__(self): self.addr = config.get("seismic_addr", "224.168.1.1") self.port = int(config.get("seismic_port", 42005)) self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 255)
def main(self): storages_conf = config.get("storages", "all") db = DB() db.query("SELECT id, settings FROM storages") for id_storage, storage_settings in db.fetchall(): if type(storages_conf) == list and id_storage not in storages_conf: continue storage = Storage(id_storage, **storage_settings) if storage: storage_string = f"{config['site_name']}:{storage.id}" storage_ident_path = os.path.join(storage.local_path, ".nebula_root") if not ( os.path.exists(storage_ident_path) and storage_string in [line.strip() for line in open(storage_ident_path).readlines()] ): try: with open(storage_ident_path, "a") as f: f.write(storage_string + "\n") except Exception: if self.first_run: logging.warning(f"{storage} is mounted, but read only") else: if self.first_run: logging.info(f"{storage} is mounted and root is writable") continue s, i, lcheck = storage_status.get(id_storage, [True, 2, 0]) if not s and time.time() - lcheck < i: continue if s: logging.info(f"{storage} is not mounted. Mounting...") if not os.path.exists(storage.local_path): try: os.mkdir(storage.local_path) except Exception: if s: logging.error(f"Unable to create mountpoint for {storage}") storage_status[id_storage] = [False, 240, time.time()] continue self.mount(storage) if ismount(storage.local_path): logging.goodnews(f"{storage} mounted successfully") if id_storage not in storage_status: storage_status[id_storage] = [True, 2, 0] storage_status[id_storage][0] = True storage_status[id_storage][1] = 2 else: if s: logging.error(f"{storage} mounting failed") storage_status[id_storage][0] = False check_interval = storage_status[id_storage][1] storage_status[id_storage][1] = min(240, check_interval * 2) storage_status[id_storage][2] = time.time()
def configure(self): if config.get("messaging") == "rabbitmq": self.sender = RabbitSender() else: self.sender = UDPSender()