def __run(self): """ Loops through the standard ripper function""" with self._pool_sema: self.__active = True self._get_udfInfo( File.location(self.__filename, CONFIG["webui"]["uploadlocation"].value)) if self.__thread_run is False: return if self._disc["type"] == "audiocd": return # self._ripper = AudioCDLinux(self.get_device(), self._thread.getName(), # self._set_drive_status, self._thread_run) else: self._add_video_disc_to_database(self.__filename) self._ripper = MakeMKV() if self.__thread_run is False: return self._ripper.call(self._db_id) self._ripper = None if self._disc["type"] == "audiocd": pass else: RipperVideoInfo.do_update({ "iso_file": "" }).where(RipperVideoInfo.id == self._db_id).execute() File.rm( File.location(self.__filename, CONFIG["webui"]["uploadlocation"].value))
def call(self, db_id: int) -> List[int]: """run the makemkv backup function MUST HAVE DATA IN THE DB""" ids = [] info = RipperVideoInfo.do_select().where(RipperVideoInfo.id == db_id) if info is None: return ids if info.iso_file: self._in_file = File.location( info.iso_file, CONFIG["ripper"]["locations"]["iso"].value, ) disc_rip_info: Optional[DiscType] = None if info.rip_data is None: disc_rip_info = DiscAPI.find_disctype(info.uuid, info.label) if disc_rip_info is None: info.data_disc = self.makemkv_info_from_disc() info.save() else: disc_rip_info = make_disc_type(json.loads(info.rip_data)) temp_dir = File.location(f"{CONFIG['ripper']['locations']['ripping'].value}{str(db_id)}/") if isinstance(disc_rip_info, DiscType): self._track_data = True for idx, track in enumerate(disc_rip_info.tracks): if isinstance(track, VideoTrackType): if track.track_type in CONFIG["ripper"]["videoripping"]["torip"].value: self._makemkv_backup_from_disc(temp_dir, idx) ids.append( self.__pass_single_to_converter( info.id, idx, temp_dir + str(idx).zfill(2) + ".mkv", disc_rip_info.track_title(idx), track.json(), ) ) elif disc_rip_info is None: self._makemkv_backup_from_disc(temp_dir) for idx, path in enumerate(Path(temp_dir).rglob("*.mkv")): ids.append( self.__pass_single_to_converter( info.id, idx, ("/" + "/".join(path.parts[1:])), f"{info.label}: Track {idx}", "{}", ) ) return ids
def file_path(self): """returns the full file path""" return ( File.location(CONFIG["libraries"][self.library]["location"].value) + self.folder + self.filename )
def __start_isos(cls): """starts the ISO ripper system and checks the upload folders for isos to add""" cls.__iso_pool_sema_count = CONFIG["ripper"]["iso"]["threadcount"].value cls.__iso_pool_sema = BoundedSemaphore(value=cls.__iso_pool_sema_count) # Check for ISOs iso_path = File.location(CONFIG["ripper"]["locations"]["iso"].value) for path in Path(iso_path).rglob("*.iso"): filename = ("/" + "/".join(path.parts[1:])).replace(iso_path, "") cls.iso_add(filename)
def start(cls): """Run starting commands need sql to run""" User.table_setup() if User.do_select().where( User.is_admin == True).count() == 0: # noqa E712 password = cls.generate_password() with open(File.location("adminpasssword"), "w") as f: f.write(password) User.insert(username="******", password=cls.__password_encryption(password), is_admin=True).execute()
def _get_udfInfo(self, in_file: str): """Grabs the relevent Data from UDF images""" list = {} process = Popen([which("udfinfo"), File.location(in_file)], stdout=PIPE) part_list = process.communicate()[0].decode("utf-8").split("\n")[:-1] for item in part_list: list[item.split("=")[0]] = item.split("=")[1] if process.returncode != 0: self._disc = {"disc_type": "audiocd"} self._disc = { "label": list["label"], "uuid": list["uuid"], "type": "bluray" if list["udfrev"] == "2.50" else "dvd", }
def __init__( self, folder: str, # (folder, difference in count since last) callable: Callable[[], None], recursive: bool = True, sleep_timer: int = 300, ): self.__folder = File.location(folder) self.__callable = callable self.__folder_count = False self.__recursive = recursive self.__sleep_timer = sleep_timer self._thread = threading.Thread(target=self.run, args=()) self._thread.setName(f"Folder Watcher: {folder}") self.__thread_run = True self._thread.start()
def scan_folder_base(cls, library: str): """Scans the folder for new files""" for path in Path(File.location(CONFIG["libraries"][library]["location"].value)).rglob("*"): ext = path.parts[-1].split(".")[-1] if ext not in CONFIG["libraries"][library]["extensions"].value: continue folder = path.joinpath().replace(path.name, "") existing = LibraryFile.get_or_none( LibraryFile.folder == folder, LibraryFile.filename == path.name ) if existing: continue new_file = LibraryFile() new_file.library = library[0].upper() new_file.folder = folder new_file.filename = path.name new_file.checksum = new_file.get_file_checksum() new_file.last_check = int(time.time()) new_file.save()
def index(self, key: str = None): """Handle non-multipart upload""" if key is None: raise cherrypy.HTTPError(status=403) info = PostUpload.get_or_none(PostUpload.key == key) if info is None: raise cherrypy.HTTPError(status=403) filename = info.filename upload_name = File.location( f"{CONFIG['webui']['uploadlocation'].value}{filename}") with open(upload_name, "wb") as file: shutil.copyfileobj(cherrypy.request.body, file) if os.path.getsize(upload_name) == info.filesize: system = info.system info.delete_instance() self.__call_next_system(filename, system) return "OK" return "FAILED"
def start(cls): """starts the webserver""" if cls.__running: return PostUpload.table_setup() def error_page(status, message, traceback, version) -> str: """error page""" return f"Error {status}" cherrypy.config.update({ "error_page.400": error_page, "error_page.401": error_page, "error_page.403": error_page, "error_page.404": error_page, "error_page.405": error_page, # "error_page.500": error_page, "server.socket_host": CONFIG["webui"]["socket"].value, "server.socket_port": CONFIG["webui"]["port"].value, "server.threadPool": 10, "server.environment": "production", "server.max_request_body_size": 0, "server.socket_timeout": 60, "log.screen": False, "log.access_file": "", "log.error_file": "", }) conf_www = { "/img": { "tools.staticdir.on": True, "tools.staticdir.dir": os.getcwd() + "/www/static/img/", }, "/js": { "tools.staticdir.on": True, "tools.staticdir.dir": os.getcwd() + "/www/static/js/", }, "/style.css": { "tools.staticfile.on": True, "tools.staticfile.filename": os.getcwd() + "/www/static/style.css", }, "/upload": { "response.timeout": 3600 }, } conf_api = { "/": { "request.dispatch": cherrypy.dispatch.MethodDispatcher(), "tools.response_headers.on": True, "tools.response_headers.headers": [("Content-Type", "text/plain")], "tools.json_out.on": True, } } baseurl = CONFIG["webui"]["baseurl"].value cherrypy.tree.mount(Root(), baseurl, conf_www) cherrypy.tree.mount(API(), baseurl + "api/", conf_api) File.mkdir(File.location(CONFIG["webui"]["uploadlocation"].value)) cherrypy.engine.start() cls.__running = True