def _proc(self, id_asset, db): asset = Asset(id_asset, db = db) for id_action in self.conditions: if "broker/started/{}".format(id_action) in asset.meta: continue cond_title, cond, priority = self.conditions[id_action] if eval(cond): logging.info("{} matches action condition {}".format(asset, cond_title)) res, msg = send_to(asset.id, id_action, settings={}, id_user=0, priority=priority, restart_existing=False, db=db) if success(res): logging.info(msg) else: logging.error(msg) asset["broker/started/{}".format(id_action)] = 1 asset.save()
def _proc(self, id_asset, db): asset = Asset(id_asset, db=db) for analyzer in self.analyzers: qinfo = asset["qc/analyses"] or {} if type(qinfo) in [str, unicode]: qinfo = json.loads(qinfo) if analyzer.proc_name in qinfo and ( qinfo[analyzer.proc_name] == -1 or qinfo[analyzer.proc_name] >= analyzer.version): continue if eval(analyzer.condition): logging.info("Analyzing {} using '{}'".format( asset, analyzer.proc_name)) a = analyzer(asset) # # Reload asset (it may be changed by someone during analysis # del (asset) asset = Asset(id_asset, db=db) result = -1 if not a.status else analyzer.version qinfo = asset["qc/analyses"] or {} if type(qinfo) in [str, unicode]: qinfo = json.loads(qinfo) qinfo[analyzer.proc_name] = result asset["qc/analyses"] = qinfo # # Save result # for key in a.result: value = a.result[key] if value: logging.debug("Set {} {} to {}".format( asset, key, value)) asset[key] = value asset.save() self.heartbeat()
def finalize(self): new = None asset = Asset( self.asset.id) # Reload asset (possibly changed during encoding) if self.task.find("target").text == "new": id_storage = self.id_storage r = asset_by_path(id_storage, self.target_rel_path) if r: new = Asset(r) logging.info("Updating asset {!r}".format(new)) keys = new.meta.keys() for key in keys: if key in meta_types and meta_types[key].namespace in [ "qc", "fmt" ]: new[key] = "" else: logging.info( "Creating new asset for {!r} conversion.".format(asset)) new = Asset() new["media_type"] = FILE new["content_type"] = VIDEO new["version_of"] = asset.id new["id_storage"] = id_storage new["path"] = self.target_rel_path new["origin"] = "Video conversion" new["id_folder"] = asset["id_folder"] for key in asset.meta: if key in meta_types and meta_types[key].namespace in [ "AIEB", "m" ]: new[key] = asset[key] new["status"] = CREATING for intra in self.task.findall("intra"): exec(intra.text) try: os.rename(self.temp_file_path, self.target_file_path) except: return "Unable to move output file to target destination" if new is not None: new.save() for post in self.task.findall("post"): exec(post.text) if new is not None: new.save() asset.save()
def _proc(self, id_asset, db): asset = Asset(id_asset, db=db) fname = asset.file_path if asset["id_storage"] not in self.mounted_storages: return if not os.path.exists(fname): if asset["status"] in [ONLINE, RESET, CREATING]: logging.warning( "Turning offline {} (File does not exist)".format(asset)) asset["status"] = OFFLINE asset.save() return try: fmtime = int(os.path.getmtime(fname)) fsize = int(os.path.getsize(fname)) except: log_traceback("Unable to get file attrs {}".format(asset)) return if fsize == 0: if asset["status"] != OFFLINE: logging.warning( "Turning offline {} (empty file)".format(asset)) asset["status"] = OFFLINE asset.save() return if fmtime != asset["file/mtime"] or asset["status"] == RESET: try: f = open(fname, "rb") except: logging.debug("{} creation in progress.".format(asset)) return else: f.seek(0, 2) fsize = f.tell() f.close() if asset["status"] == RESET: asset.load_sidecar_metadata() # Filesize must be changed to update metadata automatically. # It sucks, but mtime only condition is.... errr doesn't work always if fsize == asset["file/size"] and asset["status"] != RESET: logging.debug( "{} file mtime has been changed. Updating.".format(asset)) asset["file/mtime"] = fmtime asset.save(set_mtime=False, notify=False) else: logging.info("Updating {}".format(asset)) keys = list(asset.meta.keys()) for key in keys: if meta_types[key].namespace in ("fmt", "qc"): del (asset.meta[key]) asset["file/size"] = fsize asset["file/mtime"] = fmtime ######################################### ## PROBE for probe in probes: if probe.accepts(asset): logging.debug("Probing {} using {}".format( asset, probe)) asset = probe.work(asset) ## PROBE ######################################### if asset["status"] == RESET: asset["status"] = ONLINE logging.info("{} reset completed".format(asset)) else: asset["status"] = CREATING asset.save() if asset["status"] == CREATING and asset["mtime"] + 15 > time.time(): logging.debug("Waiting for {} completion assurance.".format(asset)) asset.save(set_mtime=False, notify=False) elif asset["status"] in (CREATING, OFFLINE): logging.goodnews("Turning online {}".format(asset)) asset["status"] = ONLINE asset.save() db = DB() db.query( """UPDATE nx_jobs SET progress=-1, id_service=0, ctime=%s, stime=0, etime=0, id_user=0, message='Restarting after source update' WHERE id_object=%s AND id_action > 0 and progress IN (-2, -3)""", [time.time(), id_asset]) db.commit()
def on_main(self): db = DB() self.existing = [] start_time = time.time() db.query("SELECT meta FROM assets WHERE media_type=1 AND status=1") for (meta, ) in db.fetchall(): asset = Asset(meta=meta, db=db) file_path = asset.file_path self.existing.append(file_path) duration = time.time() - start_time if duration > 5 or config.get("debug_mode", False): logging.debug(f"Online assets loaded in {s2time(duration)}") start_time = time.time() for wf_settings in self.settings.findall("folder"): id_storage = int(wf_settings.attrib["id_storage"]) rel_wf_path = wf_settings.attrib["path"] quarantine_time = int( wf_settings.attrib.get("quarantine_time", "10")) id_folder = int(wf_settings.attrib.get("id_folder", 12)) storage_path = storages[id_storage].local_path watchfolder_path = os.path.join(storage_path, rel_wf_path) if not os.path.exists(watchfolder_path): logging.warning("Skipping non-existing watchfolder", watchfolder_path) continue i = 0 for file_object in get_files( watchfolder_path, recursive=wf_settings.attrib.get("recursive", False), hidden=wf_settings.attrib.get("hidden", False), case_sensitive_exts=wf_settings.get( "case_sensitive_exts", False), ): i += 1 if i % 100 == 0 and config.get("debug_mode", False): logging.debug("{} files scanned".format(i)) if not file_object.size: continue full_path = file_object.path if full_path in self.existing: continue now = time.time() asset_path = full_path.replace(storage_path, "", 1).lstrip("/") ext = os.path.splitext(asset_path)[1].lstrip(".").lower() if ext not in FileTypes.exts(): continue asset = asset_by_path(id_storage, asset_path, db=db) if asset: self.existing.append(full_path) continue base_name = get_base_name(asset_path) if quarantine_time and now - file_object.mtime < quarantine_time: logging.debug(f"{base_name} is too young. Skipping") continue asset = Asset(db=db) asset["content_type"] = FileTypes.by_ext(ext) asset["media_type"] = MediaType.FILE asset["id_storage"] = id_storage asset["path"] = asset_path asset["ctime"] = now asset["mtime"] = now asset["status"] = ObjectStatus.CREATING asset["id_folder"] = id_folder asset["title"] = base_name asset.load_sidecar_metadata() failed = False for post_script in wf_settings.findall("post"): try: exec(post_script.text) except Exception: log_traceback( f"Error executing post-script on {asset}") failed = True if not failed: asset.save(set_mtime=False) duration = time.time() - start_time if duration > 60 or config.get("debug_mode", False): logging.debug(f"Watchfolders scanned in {s2time(duration)}")