def _proc(self, id_asset, db): asset = Asset(id_asset, db=db) fname = asset.file_path if asset["id_storage"] not in self.mounted_storages: return if not os.path.exists(fname): if asset["status"] in [ONLINE, RESET, CREATING]: logging.warning( "Turning offline {} (File does not exist)".format(asset)) asset["status"] = OFFLINE asset.save() return try: fmtime = int(os.path.getmtime(fname)) fsize = int(os.path.getsize(fname)) except: log_traceback("Unable to get file attrs {}".format(asset)) return if fsize == 0: if asset["status"] != OFFLINE: logging.warning( "Turning offline {} (empty file)".format(asset)) asset["status"] = OFFLINE asset.save() return if fmtime != asset["file/mtime"] or asset["status"] == RESET: try: f = open(fname, "rb") except: logging.debug("{} creation in progress.".format(asset)) return else: f.seek(0, 2) fsize = f.tell() f.close() if asset["status"] == RESET: asset.load_sidecar_metadata() # Filesize must be changed to update metadata automatically. # It sucks, but mtime only condition is.... errr doesn't work always if fsize == asset["file/size"] and asset["status"] != RESET: logging.debug( "{} file mtime has been changed. Updating.".format(asset)) asset["file/mtime"] = fmtime asset.save(set_mtime=False, notify=False) else: logging.info("Updating {}".format(asset)) keys = list(asset.meta.keys()) for key in keys: if meta_types[key].namespace in ("fmt", "qc"): del (asset.meta[key]) asset["file/size"] = fsize asset["file/mtime"] = fmtime ######################################### ## PROBE for probe in probes: if probe.accepts(asset): logging.debug("Probing {} using {}".format( asset, probe)) asset = probe.work(asset) ## PROBE ######################################### if asset["status"] == RESET: asset["status"] = ONLINE logging.info("{} reset completed".format(asset)) else: asset["status"] = CREATING asset.save() if asset["status"] == CREATING and asset["mtime"] + 15 > time.time(): logging.debug("Waiting for {} completion assurance.".format(asset)) asset.save(set_mtime=False, notify=False) elif asset["status"] in (CREATING, OFFLINE): logging.goodnews("Turning online {}".format(asset)) asset["status"] = ONLINE asset.save() db = DB() db.query( """UPDATE nx_jobs SET progress=-1, id_service=0, ctime=%s, stime=0, etime=0, id_user=0, message='Restarting after source update' WHERE id_object=%s AND id_action > 0 and progress IN (-2, -3)""", [time.time(), id_asset]) db.commit()
def on_main(self): db = DB() self.existing = [] start_time = time.time() db.query("SELECT meta FROM assets WHERE media_type=1 AND status=1") for (meta, ) in db.fetchall(): asset = Asset(meta=meta, db=db) file_path = asset.file_path self.existing.append(file_path) duration = time.time() - start_time if duration > 5 or config.get("debug_mode", False): logging.debug(f"Online assets loaded in {s2time(duration)}") start_time = time.time() for wf_settings in self.settings.findall("folder"): id_storage = int(wf_settings.attrib["id_storage"]) rel_wf_path = wf_settings.attrib["path"] quarantine_time = int( wf_settings.attrib.get("quarantine_time", "10")) id_folder = int(wf_settings.attrib.get("id_folder", 12)) storage_path = storages[id_storage].local_path watchfolder_path = os.path.join(storage_path, rel_wf_path) if not os.path.exists(watchfolder_path): logging.warning("Skipping non-existing watchfolder", watchfolder_path) continue i = 0 for file_object in get_files( watchfolder_path, recursive=wf_settings.attrib.get("recursive", False), hidden=wf_settings.attrib.get("hidden", False), case_sensitive_exts=wf_settings.get( "case_sensitive_exts", False), ): i += 1 if i % 100 == 0 and config.get("debug_mode", False): logging.debug("{} files scanned".format(i)) if not file_object.size: continue full_path = file_object.path if full_path in self.existing: continue now = time.time() asset_path = full_path.replace(storage_path, "", 1).lstrip("/") ext = os.path.splitext(asset_path)[1].lstrip(".").lower() if ext not in FileTypes.exts(): continue asset = asset_by_path(id_storage, asset_path, db=db) if asset: self.existing.append(full_path) continue base_name = get_base_name(asset_path) if quarantine_time and now - file_object.mtime < quarantine_time: logging.debug(f"{base_name} is too young. Skipping") continue asset = Asset(db=db) asset["content_type"] = FileTypes.by_ext(ext) asset["media_type"] = MediaType.FILE asset["id_storage"] = id_storage asset["path"] = asset_path asset["ctime"] = now asset["mtime"] = now asset["status"] = ObjectStatus.CREATING asset["id_folder"] = id_folder asset["title"] = base_name asset.load_sidecar_metadata() failed = False for post_script in wf_settings.findall("post"): try: exec(post_script.text) except Exception: log_traceback( f"Error executing post-script on {asset}") failed = True if not failed: asset.save(set_mtime=False) duration = time.time() - start_time if duration > 60 or config.get("debug_mode", False): logging.debug(f"Watchfolders scanned in {s2time(duration)}")