def _add_list_entries(self, list_name, list_json): """ Helper method to add all list entries into a medialist (watching/paused/ptw) Params: list_name: the name that the list appears to be from Anilist ("Watching") list_json: anilist's raw api response (json format) {'data':'MediaListCollection'} Returns: A list with populated Anilist Media entries (a list of dicts) """ try: entries = list() media_lists = list_json['data']['MediaListCollection']['lists'] for media_list in media_lists: if list_name.lower() == media_list['name'].lower(): for entry in media_list['entries']: entries.append(entry['media']) return entries except: LoggingUtils.warning( "Kishi was unable to process list entries for {}".format( list_name), color=LoggingUtils.YELLOW) raise Exception()
def _kitsu_basic_search(self, title): """ This is a quick Kitsu search implementation from the Hitsu 2A module. Params: title - the title of the show (in provided request) to search for Returns: Kitsu's JSON response """ title_lower = title.lower() request_url = requests.utils.requote_uri(KITSU_API_URL + title_lower) LoggingUtils.debug("Created Kitsu url - {}".format(request_url)) try: kitsu_res = requests.get(request_url) try: kitsu_res_json = kitsu_res.json() except: LoggingUtils.warning("Kitsu response did not properly parse into JSON", color=LoggingUtils.YELLOW) raise Exception() return kitsu_res_json except: LoggingUtils.error("There was an error when attempting to contact Kitsu", color=LoggingUtils.RED) raise Exception()
def _load_audio_info(cls, pi, info): """ Method to load proper Japanese audio stream data into ProbeInfo Preferred order of audio tracks: FLAC >> MP3 >> AAC. All else is no good TODO: Handle Commentary streams """ # If there's only one audio stream, load that - regardless of what it is # If there are multiple tracks, load in order of preference, but if no tags - don't load any streams = info['streams'] audio_stream_count = 0 for stream in streams: if stream['codec_type'].lower() == "audio": audio_stream_count += 1 if audio_stream_count == 1: # There is only one stream, pull its data for stream in streams: if stream['codec_type'].lower() == "audio": pi._audio_stream_index = int(stream['index']) pi._audio_codec_name = str(stream['codec_name']).lower() pi._audio_codec_long_name = stream['codec_long_name'] if 'tags' in stream and 'language' in stream['tags']: pi._audio_tags_language = str(stream['tags']['language']).lower() break else: for stream in streams: if stream['codec_type'].lower() == "audio": if 'tags' in stream: if 'language' in stream['tags']: lang = str(stream['tags']['language']).lower() if lang == "jpn" or lang == "jp": if stream['codec_name'] == "flac": cls._set_audio_info(pi, stream) elif stream['codec_name'] == "mp3" or stream['codec_name'] == "aac": if pi._audio_codec_name != "flac": cls._set_audio_info(pi, stream) else: if pi._audio_codec_name != "flac" and pi._audio_codec_name != "mp3" and pi._audio_codec_name != "aac": cls._set_audio_info(pi, stream) # All streams iterated but none were Japanese - this means the streams aren't tagged properly. # In this case, do not set anything - This will be a signal that the audio streams aren't reliable # However, for now, we should just set the first stream anyways and log a warning if not pi.audio_codec_name: LoggingUtils.warning("Unable to determine which audio stream to index, using first available...", color=LoggingUtils.RED) for stream in streams: if stream['codec_type'].lower() == "audio": pi._audio_stream_index = int(stream['index']) pi._audio_codec_name = str(stream['codec_name']).lower() pi._audio_codec_long_name = stream['codec_long_name'] if 'tags' in stream and 'language' in stream['tags']: pi._audio_tags_language = str(stream['tags']['language']).lower() break else: LoggingUtils.debug("Audio stream index was set in multiselect, proceeding...")
def _kishi_list(self, user): """ Helper method to get all of a user's anime list. Params: user: String, username of Anilist user Returns: A tuple of three lists. The first list is all the Watching The second list is all the PTW The third list is all the Paused Throws an exception if anything goes wrong. This should be caught by any method using this. """ watching = list() paused = list() ptw = list() # Anilist API is much nicer to play with. try: # Make the request to Anilist, and pass in the userName as the user query anilist_res = requests.post(self._ANILIST_API_URL, json={ 'query': self._ANILIST_USER, 'variables': { 'userName': user } }) if anilist_res.status_code != 200: LoggingUtils.debug( "Anilist returned a bad status code when attempting to get {}'s lists" .format(user)) raise Exception() try: anilist_res_json = anilist_res.json() except: LoggingUtils.debug( "Anilist returned a response that was not parseable into JSON" ) raise Exception() watching = self._add_list_entries("Watching", anilist_res_json) paused = self._add_list_entries("Paused", anilist_res_json) ptw = self._add_list_entries("Planning", anilist_res_json) except: LoggingUtils.warning( "Kishi was unable to properly contact Anilist", color=LoggingUtils.YELLOW) raise Exception() return (watching, paused, ptw)
def destroy_temp_folder(cls) -> None: if not cls.TEMP_PATH: LoggingUtils.debug("No temp folder exists, doing nothing", color=LoggingUtils.GREEN) return try: shutil.rmtree(cls.TEMP_PATH) LoggingUtils.debug("Cleared the temp folder path at {}".format(cls.TEMP_PATH), color=LoggingUtils.GREEN) cls.TEMP_PATH = str() except: LoggingUtils.warning("Was unable to clear the temp folder path at {}".format(cls.TEMP_PATH), color=LoggingUtils.RED)
def is_user_watching_id(self, user, show_id): """ Determines whether or not an Anilist user is watching a show Checks by show ID Params: user: username to look up id: id of the show to look up Returns: a boolean - True if watching, False if not """ try: show_id = int(show_id) # Get the int equivalent value of the ID except: # Why would you not pass an integer in? LoggingUtils.warning( "Kishi ID search requires an input that can be converted to an int. Returning FALSE", color=LoggingUtils.YELLOW) return False try: watching, paused, ptw = self._kishi_list(user) for show in watching: if show_id == show['id']: LoggingUtils.debug("Found show ID {} in {}".format( show_id, "watching")) return True for show in paused: if show_id == show['id']: LoggingUtils.debug("Found show ID {} in {}".format( show_id, "paused")) return True for show in ptw: if show_id == show['id']: LoggingUtils.debug("Found show ID {} in {}".format( show_id, "planning")) return True LoggingUtils.debug("Didn't find a match for {}".format(show_id)) return False except: # If any errors are encountered, return True (default assumption) LoggingUtils.warning( "An error was encountered while contacting Anilist. Defaulting to TRUE", color=LoggingUtils.YELLOW) return True
def destroy_temp_file(cls) -> None: if not cls.TEMP_PATH: LoggingUtils.debug("No rclone tempf ile exists, doing nothing", color=LoggingUtils.GREEN) return try: os.remove(cls.TEMP_PATH) LoggingUtils.debug("Cleared the rclone temp file at " + cls.TEMP_PATH, color=LoggingUtils.GREEN) cls.TEMP_PATH = str() except: LoggingUtils.warning("Unable to clear the rclone temp file", color=LoggingUtils.RED)
def send(json_dict: str, urls: List[Dict[str, str]]) -> bool: """ Sends notifications. Returns bool if all successful Assumes the passed in string is already in JSON dict form. """ for url in urls: LoggingUtils.info("Sending request to url {}".format(url['url'])) headers = dict() headers['Content-Type'] = "application/json" if 'auth' in url and url['auth']: headers['Authorization'] = url['auth'] headers['authorization'] = url['auth'] try: res = requests.post(url['url'], json=json_dict, headers=headers, timeout=5) if res.status_code == 200: LoggingUtils.info( "Successfully sent with return of 200 OK") elif res.status_code == 201: LoggingUtils.info( "Successfully sent with return of 201 Created") elif res.status_code == 202: LoggingUtils.info( "Successfully sent with return of 202 Accepted") else: LoggingUtils.info( "Sent request with a return of {}".format( res.status_code)) except requests.exceptions.ConnectionError: LoggingUtils.warning( "Connection error occured while sending to {}".format( url['url']), color=LoggingUtils.RED) except requests.exceptions.MissingSchema: LoggingUtils.warning("Missing http/https schema for {}".format( url['url']), color=LoggingUtils.RED) except requests.exceptions.Timeout: LoggingUtils.warning( "Timeout occured while sending to {}".format(url['url']), color=LoggingUtils.RED) except: LoggingUtils.warning( "Unknown error occured while sending to {}".format( url['url']), color=LoggingUtils.RED) return True
def _run(command: Iterable[str], error_message: str) -> int: """ A wrapping over subprocess.run(). This sets the stderr, stdout, and handles converting SIGINT signals to quit jobs. """ LoggingUtils.debug("Running command {}".format(' '.join(command))) result = subprocess.run(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if result.returncode == 255: LoggingUtils.warning( "User killed running ffmpeg process, canceling operation and adding to failed job queue.", color=LoggingUtils.RED) raise WorkerCancelledError(helper=True) elif result.returncode != 0: raise FFmpegError(error_message, result.stderr.decode('utf-8')) else: return result.returncode # == 0
def send(cls, job: Job, hisha: HishaInfo, webhooks: List[str]): embed = cls._generate_embed(job, hisha, webhooks) LoggingUtils.info("Sending out Discord webhook notifications", color=LoggingUtils.LMAGENTA) for webhook in webhooks: try: requests.post(webhook, json=embed, timeout=5) except: LoggingUtils.warning( "There was an error when sending out a Discord webhook to: {}" .format(webhook), color=LoggingUtils.YELLOW) LoggingUtils.info("Done sending out webhook notifications", color=LoggingUtils.GREEN) return
def is_user_watching_names(self, user, show_name): """ Determines whether or not an Anilist user is watching a show Checks by show name Params: user: username to look up show_name: name of the show to look up. this should already be the anilist name. Returns: a boolean - True if watching, False if not """ try: watching, paused, ptw = self._kishi_list(user) for show in watching: for title in show['title'].values(): if self._check_equality_regex(title, show_name): LoggingUtils.debug("Matched {} to {} in {}".format( title, show_name, "watching")) return True for show in paused: for title in show['title'].values(): if self._check_equality_regex(title, show_name): LoggingUtils.debug("Matched {} to {} in {}".format( title, show_name, "paused")) return True for show in ptw: for title in show['title'].values(): if self._check_equality_regex(title, show_name): LoggingUtils.debug("Matched {} to {} in {}".format( title, show_name, "planning")) return True LoggingUtils.debug("Didn't find a match for {}".format(show_name)) return False except: # If any errors are encountered, return True (default assumption) LoggingUtils.warning( "An error was encountered while contacting Anilist. Defaulting to TRUE", color=LoggingUtils.YELLOW) return True
def _load_subtitle_info(cls, pi, info): streams = info['streams'] subtitle_stream_count = 0 for stream in streams: if stream['codec_type'].lower() == "subtitle": subtitle_stream_count += 1 if subtitle_stream_count == 1: for stream in streams: if stream['codec_type'].lower() == "subtitle": pi._subtitle_main_index = int(stream['index']) break else: for stream in streams: if stream['codec_type'].lower() == "subtitle": if 'tags' in stream: if 'language' in stream['tags']: language = stream['tags']['language'] if "eng" in language.lower(): LoggingUtils.debug("Set main subtitle index based on language detection") pi._subtitle_main_index = int(stream['index']) elif 'title' in stream['tags']: title = stream['tags']['title'] if title.lower() == "english": LoggingUtils.debug("Set main subtitle index based on title detection") pi._subtitle_main_index = int(stream['index']) # Set the OP/ED/Signs track elif ("op" in title.lower() and "ed" in title.lower()) or "sign" in title.lower(): LoggingUtils.debug("Set secondary subtitle index based on title detection") pi._subtitle_extra_index = int(stream['index']) # If no subs were selected, default to the first available if pi.subtitle_main_index < 1: LoggingUtils.warning("Unable to determine which subtitle stream to index, using first available...", color=LoggingUtils.RED) for stream in streams: if stream['codec_type'].lower() == "subtitle": pi._subtitle_main_index = int(stream['index']) break else: LoggingUtils.debug("Subtitle stream was detected in multiselected, proceeding...")
def _get_main_studio_info(self, studios): """ Goes through the studio edges and returns the main (studio name, siteurl) Params: studios - The studios body from the Anilist GraphQL json response Returns: A tuple (studio name: str, site url: str), or None if not found """ try: edges = studios['edges'] for edge in edges: LoggingUtils.debug("Checking edge {}".format(edge['node']['name'])) if edge['isMain']: LoggingUtils.debug("Found main studio edge, returning tuple") node = edge['node'] return (node['name'], node['siteUrl']) # If a main studio isn't found, return None LoggingUtils.debug("Didn't find any main studio edge, returning default") return ("Unknown", ANILIST_URL) except: LoggingUtils.warning("Didn't find any main studio edge, returning default", color=LoggingUtils.YELLOW) return ("Unknown", ANILIST_URL)
def _anilist(self, query, search, status): """ This helper method handles making requests to Anilist Returns the response in JSON form Params: query: The type of query (single, page) to request for search: the name of the show to search for status: the status of the show to search for """ try: # Make request to Anlist and substitute the variables properly ani = requests.post(self._API_URL, json={'query': query, 'variables': { 'search': search, 'status': status} }) if ani.status_code != 200: LoggingUtils.warning("Anilist returned a bad HTTP code when attempting to connect") raise Exception() try: # Try to get the response as a JSON object ani_json = ani.json() except: LoggingUtils.warning("Anilist response did not properly parse into JSON", color=LoggingUtils.YELLOW) raise Exception() # Return the data provided by the request response return ani_json['data'] except: LoggingUtils.error("There was an error when attempting to contact Anilist", color=LoggingUtils.RED) raise Exception()
def _check_episode_exists(job: Job, rclone_config: str, source: str) -> bool: """Given a source, check if Job file exists in source""" LoggingUtils.debug("Checking for episode in source: {}".format(source)) try: # Call rclone to check whether or not something exists command = [BinUtils.rclone] LoggingUtils.debug("Using temporary rclone file at " + rclone_config, color=LoggingUtils.YELLOW) command.extend(["--config={}".format(rclone_config)]) command.extend(["lsjson", "-R", source + job.show]) response = Rclone._run( command, RcloneLSJsonError("Rclone failed to run lsjson", "", job.show, job.episode)) # Get the episode list in that folder episode_list = json.loads(response.stdout.decode('utf-8')) # Check if our job episode exists for episode in episode_list: if episode['Name'] == job.episode: LoggingUtils.debug( "Found a match for episode in source {}".format( source)) return True LoggingUtils.debug( "Didn't find a match for episode in source {}".format(source)) return False # This except block is usually hit if the source provided doesn't exist. except: LoggingUtils.warning( "An error occured while checking source {}, does it exist?". format(source)) return False
qs = sys.argv[1:] qs = [q for q in qs if q] # Remove empty elements LoggingUtils.info("*** Listening on {}...".format(', '.join(qs)), color=LoggingUtils.LGREEN) while True: with Connection(): try: redis_conn = Redis(host=WorkerConf.redis_host, port=WorkerConf.redis_port, password=WorkerConf.redis_password, socket_keepalive=True, socket_timeout=180, health_check_interval=60) w = Worker(qs, connection=redis_conn, name=WORKER_NAME) w.work() except RedisConnectionError as rce: LoggingUtils.critical( "Lost connection to Redis instance, shutting down.", color=LoggingUtils.LRED) sys.exit() except WorkerCancelledError: LoggingUtils.warning("Worker killed externally, shutting down...") sys.exit() except TimeoutError: # We expect a timeout error to occur as this forces the worker to reregister # Silently handle and let the loop continue # LoggingUtils.debug("Timeout error caught, handling silently...") pass