async def setup_db(): """ Creates a database pool for PostgreSQL interaction. """ if os.environ.get("IS_DOCKER", False): await git.migrate() host = get_config_value("PostgreSQL", "host") port = get_config_value("PostgreSQL", "port") user = get_config_value("PostgreSQL", "user") password = get_config_value("PostgreSQL", "password") database = get_config_value("PostgreSQL", "database") loop = asyncio.get_event_loop() app.db_pool = await asyncpg.create_pool(host=host, port=port, user=user, password=password, database=database, loop=loop) async with app.db_pool.acquire() as con: users = await con.fetchval(""" SELECT COUNT(*) FROM users; """) if not users: logger.error( "No existing users! Run `tsundoku --create-user` to create a new user." )
async def insert_user(username: str, password: str): host = get_config_value("PostgreSQL", "host") port = get_config_value("PostgreSQL", "port") user = get_config_value("PostgreSQL", "user") db_password = get_config_value("PostgreSQL", "password") database = get_config_value("PostgreSQL", "database") con = await asyncpg.connect(host=host, port=port, user=user, password=db_password, database=database) pw_hash = hasher.hash(password) await con.execute( """ INSERT INTO users (username, password_hash) VALUES ($1, $2); """, username, pw_hash) await con.close()
def run(with_ui: bool = True): host = get_config_value("Tsundoku", "host") port = get_config_value("Tsundoku", "port") if with_ui: app.register_blueprint(ux_blueprint) auth.init_app(app) app.run(host=host, port=port, use_reloader=True)
def build_api_url(self) -> str: """ Builds the URL to make requests to the Deluge WebAPI. Returns ------- str The API's URL. """ host = get_config_value("Deluge", "host") port = get_config_value("Deluge", "port") return f"http://{host}:{port}/json"
def run() -> None: host = get_config_value("Tsundoku", "host") port = get_config_value("Tsundoku", "port") loop: Union[asyncio.ProactorEventLoop, AbstractEventLoop] try: loop = asyncio.ProactorEventLoop() asyncio.set_event_loop(loop) except AttributeError: loop = asyncio.get_event_loop() app.logging_queue = Queue(loop=loop, maxsize=50) auth.init_app(app) app.run(host=host, port=port, use_reloader=True, loop=loop)
async def run(args: str) -> Tuple[str, Optional[bytes]]: git_loc = get_config_value("Tsundoku", "git_path") cmd = f"{git_loc} {args}" stdout: Optional[bytes] = None stderr: Optional[bytes] = None try: logger.debug(f"Git: Trying to execute `{cmd}` with shell") proc = await asyncio.subprocess.create_subprocess_shell( cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT, ) stdout, stderr = await proc.communicate() output_text = stdout.strip().decode() logger.debug(f"Git: output: {output_text}") except OSError: logger.debug(f"Git: command failed: {cmd}") else: if "not found" in output_text or "not recognized as an internal or external command" in output_text: logger.debug(f"Git: Unable to find executable with command {cmd}") elif "fatal: " in output_text or stderr: logger.error("Git: Returned bad info. Bad installation?") return output_text, stderr
def run(args: str): git_loc = get_config_value("Tsundoku", "git_path") cmd = f"{git_loc} {args}" output = err = None try: logger.debug(f"Git: Trying to execute '{cmd}' with shell") p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) output, err = p.communicate() output = output.strip().decode() logger.debug(f"Git: output: {output}") except OSError: logger.debug(f"Git: command failed: {cmd}") else: if "not found" in output or "not recognized as an internal or external command" in output: logger.debug(f"Git: Unable to find executable with command {cmd}") elif "fatal: " in output or err: logger.error("Git: Returned bad info. Bad installation?") return output, err
async def update_check_needed() -> None: """ Compares the time between now and the last update check. If it has been more than 1 day, check for an update. """ should_we = get_config_value("Tsundoku", "do_update_checks") if not should_we: return every = get_config_value("Tsundoku", "check_every_n_days") frequency = 24 * every next_ = app.last_update_check + datetime.timedelta(hours=frequency) if next_ < datetime.datetime.utcnow(): await git.check_for_updates() app.last_update_check = datetime.datetime.utcnow()
def update_config(self) -> None: """ Updates the configuration for the task. """ interval = get_config_value("Tsundoku", "polling_interval", 900) try: self.interval = int(interval) except ValueError: logger.error(f"`{interval}` is an invalid polling interval, using default.") self.interval = 900 fuzzy_match_cutoff = get_config_value("Tsundoku", "fuzzy_match_cutoff", 90) try: self.fuzzy_match_cutoff = int(fuzzy_match_cutoff) except ValueError: logger.error(f"`{fuzzy_match_cutoff}` is an invalid fuzzy match cutoff, using default.") self.fuzzy_match_cutoff = 90
async def setup_db(): """ Creates a database pool for PostgreSQL interaction. """ host = get_config_value("PostgreSQL", "host") port = get_config_value("PostgreSQL", "port") user = get_config_value("PostgreSQL", "user") password = get_config_value("PostgreSQL", "password") database = get_config_value("PostgreSQL", "database") loop = asyncio.get_event_loop() app.db_pool = await asyncpg.create_pool(host=host, port=port, user=user, password=password, database=database, loop=loop)
def __init__(self, app_context: AppContext): self.app = app_context.app self.loop = asyncio.get_running_loop() self.current_parser = None # keeps track of the current RSS feed parser. interval = get_config_value("Tsundoku", "polling_interval") try: self.interval = int(interval) except ValueError: raise InvalidPollerInterval(f"'{interval}' is an invalid polling interval.")
def __init__(self, session: aiohttp.ClientSession) -> None: client = get_config_value("TorrentClient", "client") host = get_config_value("TorrentClient", "host") port = get_config_value("TorrentClient", "port") secure = get_config_value("TorrentClient", "secure") password = get_config_value("TorrentClient", "password") self._client: Union[DelugeClient, qBittorrentClient] if client == "deluge": self._client = DelugeClient(session, host=host, port=port, secure=secure, auth=password) elif client == "qbittorrent": username = get_config_value("TorrentClient", "username") auth = {"username": username, "password": password} self._client = qBittorrentClient(session, auth=auth, host=host, port=port, secure=secure) else: logger.error("Invalid TorrentClient in Configuration") self.session = session
async def migrate(): host = get_config_value("PostgreSQL", "host") port = get_config_value("PostgreSQL", "port") user = get_config_value("PostgreSQL", "user") db_password = get_config_value("PostgreSQL", "password") database = get_config_value("PostgreSQL", "database") try: con = await asyncpg.connect(host=host, user=user, password=db_password, port=port, database=database) except asyncpg.InvalidCatalogNameError: sys_con = await asyncpg.connect(host=host, user=user, password=db_password, port=port, database="template1") await sys_con.execute(f""" CREATE DATABASE "{database}" OWNER "{user}"; """) await sys_con.close() con = await asyncpg.connect(host=host, user=user, password=db_password, port=port, database=database) await con.close() backend = get_backend( f"postgres://{user}:{db_password}@{host}:{port}/{database}") migrations = read_migrations("migrations") logger.info("Applying database migrations...") with backend.lock(): backend.apply_migrations(backend.to_apply(migrations)) logger.info("Database migrations applied.")
def update_config(self) -> None: """ Updates the configuration for the task. """ complete_check = get_config_value("Tsundoku", "complete_check_interval", 15) try: self.complete_check = int(complete_check) except ValueError: logger.error( f"`{complete_check}` is an invalid complete check interval, using default." ) self.complete_check = 15
async def ensure_authorization(self): """ Authorizes with the Deluge WebAPI. This has to use the aiohttp ClientSession itself due to some recursion thingys. """ payload = { "id": self._request_counter, "method": "auth.check_session", "params": [] } headers = { "Accept": "application/json", "Content-Type": "application/json" } auth_status = await self.session.post( self.url, json=payload, headers=headers ) auth_status = await auth_status.json(content_type=None) self._request_counter += 1 result = auth_status.get("result") if not result: password = get_config_value("Deluge", "password") payload = { "id": self._request_counter, "method": "auth.login", "params": [password] } auth_request = await self.session.post( self.url, json=payload, headers=headers ) auth_request = await auth_request.json(content_type=None) self._request_counter += 1 error = auth_request.get("error") if error: raise DelugeAuthorizationError(error["message"]) return result
def get_injector(resources: List[str]) -> Any: try: locale = get_config_value("Tsundoku", "locale") except KeyError: locale = "en" loader = FluentResourceLoader("l10n/{locale}") resources = [f"{r}.ftl" for r in resources] fluent = FluentLocalization([locale, "en"], resources, loader) fluent._ = fluent.format_value return fluent
def _load_parsers(): """ Load all of the custom RSS parsers into the app. """ app.rss_parsers = [] required_attrs = ("name", "url", "version", "get_show_name", "get_episode_number") for parser in get_config_value("Tsundoku", "parsers"): spec = importlib.util.find_spec(parser) if spec is None: logger.error(f"Parser '{parser}' Not Found") raise exceptions.ParserNotFound(parser) lib = importlib.util.module_from_spec(spec) try: spec.loader.exec_module(lib) except Exception as e: logger.error(f"Parser '{parser}' Failed") raise exceptions.ParserFailed(parser, e) from e try: setup = getattr(lib, "setup") except AttributeError: logger.error(f"Parser '{parser}' Missing Setup Function") raise exceptions.ParserMissingSetup(parser) try: new_context = app.app_context() parser_object = setup(new_context.app) for func in required_attrs: if not hasattr(parser_object, func): logger.error( f"Parser '{parser}' missing attr/function '{func}'") raise exceptions.ParserMissingRequiredFunction( f"{parser}: missing attr/function '{func}'") app.rss_parsers.append(parser_object) except Exception as e: logger.error(f"Parser '{parser}' Failed: {e}") raise exceptions.ParserFailed(parser, e) from e logger.info( "Loaded Parser {0.name} v{0.version}".format(parser_object))
def setup_logging(app: Any) -> None: formatter = logging.Formatter( "%(asctime)s [%(levelname)s] %(name)s: %(message)s") dictConfig({ "version": 1, "disable_existing_loggers": False, "formatters": { "default": { "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" } }, "handlers": { "stream": { "class": "logging.StreamHandler", "formatter": "default" }, "file": { "filename": "tsundoku.log", "class": "logging.FileHandler", "formatter": "default", "encoding": "utf-8" } }, "loggers": { "tsundoku": { "handlers": ["stream", "file"], "level": get_config_value("Tsundoku", "log_level", default="info").upper(), "propagate": True } } }) handler = SocketHandler(app) handler.setFormatter(formatter) logger.addHandler(handler) logger.debug("Logging successfully configured")
async def load_parsers(): """ Load all of the custom RSS parsers into the app. """ parsers = [f"parsers.{p}" for p in get_config_value("Tsundoku", "parsers")] app.rss_parsers = [] required_functions = ["get_show_name", "get_episode_number"] for parser in parsers: spec = importlib.util.find_spec(parser) if spec is None: raise exceptions.ParserNotFound(parser) lib = importlib.util.module_from_spec(spec) try: spec.loader.exec_module(lib) except Exception as e: raise exceptions.ParserFailed(parser, e) from e try: setup = getattr(lib, "setup") except AttributeError: raise exceptions.ParserMissingSetup(parser) try: new_context = app.app_context() parser_object = setup(new_context.app) for func in required_functions: if not hasattr(parser_object, func): raise exceptions.ParserMissingRequiredFunction( f"{parser}: missing {func}") app.rss_parsers.append(parser_object) except Exception as e: raise exceptions.ParserFailed(parser, e) from e
async def insert_locale() -> dict: # Inserts the user's locale into jinja2 variables. locale = get_config_value("Tsundoku", "locale", default="en") return {"LOCALE": locale}
async def backport_psql() -> None: if not HAS_ASYNCPG: return async with acquire() as con: try: await con.execute(""" SELECT * FROM _yoyo_migration; """) rows = await con.fetchall() except OperationalError: rows = [] if rows: return host = get_config_value("PostgreSQL", "host") port = get_config_value("PostgreSQL", "port") user = get_config_value("PostgreSQL", "user") db_password = get_config_value("PostgreSQL", "password") database = get_config_value("PostgreSQL", "database") try: con = await asyncpg.connect(host=host, user=user, password=db_password, port=port, database=database) except asyncpg.InvalidCatalogNameError: sys_con = await asyncpg.connect(host=host, user=user, password=db_password, port=port, database="template1") await sys_con.execute(f""" CREATE DATABASE "{database}" OWNER "{user}"; """) await sys_con.close() con = await asyncpg.connect(host=host, user=user, password=db_password, port=port, database=database) backend = get_backend( f"postgres://{user}:{db_password}@{host}:{port}/{database}") migrations = read_migrations("migrations") first_sqlite = migrations.items[14:][0] migrations.items = migrations.items[:14] with backend.lock(): backend.apply_migrations(backend.to_apply(migrations)) migrations.items = [first_sqlite] backend = get_backend(f"sqlite:///{fp}") with backend.lock(): backend.apply_migrations(backend.to_apply(migrations)) users = await con.fetch(""" SELECT id, username, password_hash, created_at, api_key::TEXT FROM users; """) shows = await con.fetch(""" SELECT id, title, desired_format, desired_folder, season, episode_offset, created_at FROM shows; """) show_entry = await con.fetch(""" SELECT id, show_id, episode, current_state, torrent_hash, file_path, last_update FROM show_entry; """) kitsu_info = await con.fetch(""" SELECT show_id, kitsu_id, cached_poster_url, show_status, slug, last_updated FROM kitsu_info; """) webhook_base = await con.fetch(""" SELECT id, name, base_service, base_url, content_fmt FROM webhook_base; """) webhook = await con.fetch(""" SELECT show_id, base FROM webhook; """) webhook_trigger = await con.fetch(""" SELECT show_id, base, trigger FROM webhook_trigger; """) await con.close() async with acquire() as sqlite: await sqlite.executemany( """ INSERT INTO users VALUES (:id, :username, :password_hash, :created_at, :api_key); """, [dict(user) for user in users]) await sqlite.executemany( """ INSERT INTO shows VALUES (:id, :title, :desired_format, :desired_folder, :season, :episode_offset, :created_at); """, [dict(show) for show in shows]) await sqlite.executemany( """ INSERT INTO show_entry VALUES (:id, :show_id, :episode, :current_state, :torrent_hash, :file_path, :last_update); """, [dict(entry) for entry in show_entry]) await sqlite.executemany( """ INSERT INTO kitsu_info VALUES (:show_id, :kitsu_id, :cached_poster_url, :show_status, :slug, :last_updated); """, [dict(info) for info in kitsu_info]) await sqlite.executemany( """ INSERT INTO webhook_base VALUES (:id, :name, :base_service, :base_url, :content_fmt); """, [dict(wh_base) for wh_base in webhook_base]) await sqlite.executemany( """ INSERT INTO webhook VALUES (:show_id, :base); """, [dict(wh) for wh in webhook]) await sqlite.executemany( """ INSERT INTO webhook_trigger VALUES (:show_id, :base, :trigger); """, [dict(trigger) for trigger in webhook_trigger])
and starts it. """ async def bg_task(): app.downloader = Downloader(app.app_context()) await app.downloader.start() asyncio.create_task(bg_task()) @app.route("/") async def index(): return "placeholder" @app.after_serving async def cleanup(): """ Closes the database pool and the aiohttp ClientSession on script closure. """ await app.db_pool.close() await app.session.close() host = get_config_value("Tsundoku", "host") port = get_config_value("Tsundoku", "port") def run(): app.run(host=host, port=port)