Beispiel #1
0
    def _load_audio_info(cls, pi, info):
        """
        Method to load proper Japanese audio stream data into ProbeInfo

        Preferred order of audio tracks:
        FLAC >> MP3 >> AAC. All else is no good
        TODO: Handle Commentary streams
        """
        # If there's only one audio stream, load that - regardless of what it is
        # If there are multiple tracks, load in order of preference, but if no tags - don't load any
        streams = info['streams']
        audio_stream_count = 0

        for stream in streams:
            if stream['codec_type'].lower() == "audio":
                audio_stream_count += 1

        if audio_stream_count == 1:
            # There is only one stream, pull its data
            for stream in streams:
                if stream['codec_type'].lower() == "audio":
                    pi._audio_stream_index = int(stream['index'])
                    pi._audio_codec_name = str(stream['codec_name']).lower()
                    pi._audio_codec_long_name = stream['codec_long_name']

                    if 'tags' in stream and 'language' in stream['tags']:
                        pi._audio_tags_language = str(stream['tags']['language']).lower()
                    break
        else:
            for stream in streams:
                if stream['codec_type'].lower() == "audio":
                    if 'tags' in stream:
                        if 'language' in stream['tags']:
                            lang = str(stream['tags']['language']).lower()
                            if lang == "jpn" or lang == "jp":
                                if stream['codec_name'] == "flac":
                                    cls._set_audio_info(pi, stream)
                                elif stream['codec_name'] == "mp3" or stream['codec_name'] == "aac":
                                    if pi._audio_codec_name != "flac":
                                        cls._set_audio_info(pi, stream)
                                else:
                                    if pi._audio_codec_name != "flac" and pi._audio_codec_name != "mp3" and pi._audio_codec_name != "aac":
                                        cls._set_audio_info(pi, stream)
            # All streams iterated but none were Japanese - this means the streams aren't tagged properly.
            # In this case, do not set anything - This will be a signal that the audio streams aren't reliable

            # However, for now, we should just set the first stream anyways and log a warning
            if not pi.audio_codec_name:
                LoggingUtils.warning("Unable to determine which audio stream to index, using first available...", color=LoggingUtils.RED)
                for stream in streams:
                    if stream['codec_type'].lower() == "audio":
                        pi._audio_stream_index = int(stream['index'])
                        pi._audio_codec_name = str(stream['codec_name']).lower()
                        pi._audio_codec_long_name = stream['codec_long_name']

                        if 'tags' in stream and 'language' in stream['tags']:
                            pi._audio_tags_language = str(stream['tags']['language']).lower()
                        break
            else:
                LoggingUtils.debug("Audio stream index was set in multiselect, proceeding...")
Beispiel #2
0
    def upload(job: Job, destinations: List[str], upload_file: str,
               rclone_config: str, flags: str) -> bool:
        """
        Upload the completed new hardsub file into the rclone destinations
        Returns a boolean based on success

        job: Job to do! This is the job of the HARDSUB file
        destinations: list of rlcone destinations (e.g., EncoderConf.uploading_destinations)
        upload_file: Path to the file to be uploaded
        rclone_config: Path to the rclone config file
        flags: rclone flag

        This method will upload the file and include its show name:
        e.g., 'temp.mp4' --> destination/show/episode.mp4
        """
        for dest in destinations:
            LoggingUtils.debug("Uploading to {}".format(dest))
            rclone_dest = PathUtils.clean_directory_path(
                dest) + PathUtils.clean_directory_path(job.show) + job.episode
            command = [BinUtils.rclone]

            LoggingUtils.debug("Using temporary rclone file at " +
                               rclone_config,
                               color=LoggingUtils.YELLOW)
            command.extend(["--config={}".format(rclone_config)])

            command.extend(["copyto", upload_file, rclone_dest])
            command.extend(flags.split())
            Rclone._run(
                command,
                RcloneUploadError(
                    "An error occured when rclone was uploading a file", "",
                    job.episode, dest))
Beispiel #3
0
    def _add_list_entries(self, list_name, list_json):
        """
        Helper method to add all list entries into a medialist (watching/paused/ptw)
        Params:
            list_name: the name that the list appears to be from Anilist ("Watching")
            list_json: anilist's raw api response (json format) {'data':'MediaListCollection'}

        Returns: A list with populated Anilist Media entries (a list of dicts)
        """
        try:

            entries = list()

            media_lists = list_json['data']['MediaListCollection']['lists']
            for media_list in media_lists:
                if list_name.lower() == media_list['name'].lower():
                    for entry in media_list['entries']:
                        entries.append(entry['media'])

            return entries

        except:
            LoggingUtils.warning(
                "Kishi was unable to process list entries for {}".format(
                    list_name),
                color=LoggingUtils.YELLOW)
            raise Exception()
Beispiel #4
0
    def _download_episode(job: Job, source: str, tempfolder: str,
                          rclone_config: str, flags: str) -> str:
        """Helper to download file from remote to local temp"""
        # Note 1: The file will always ben downloaded as "temp.mkv"
        rclone_src_file = source + PathUtils.clean_directory_path(
            job.show) + job.episode
        rclone_dest_file = tempfolder + "temp"
        LoggingUtils.debug("Sourcing file from \"{}\"".format(rclone_src_file))
        LoggingUtils.debug(
            "Downloading to temp file at \"{}\"".format(rclone_dest_file),
            color=LoggingUtils.YELLOW)

        LoggingUtils.debug("Beginning download...", color=LoggingUtils.YELLOW)
        command = [BinUtils.rclone]

        LoggingUtils.debug("Using temporary rclone file at " + rclone_config,
                           color=LoggingUtils.YELLOW)
        command.extend(["--config={}".format(rclone_config)])

        command.extend(["copyto", rclone_src_file, rclone_dest_file])
        command.extend(flags.split())

        Rclone._run(
            command,
            RcloneDownloadError(
                "Unable to copy episode file to local folder using rclone", "",
                job.show, job.episode))

        LoggingUtils.debug("Download complete.", color=LoggingUtils.GREEN)
        return rclone_dest_file
Beispiel #5
0
    def download(job: Job, sources: List[str], tempfolder: str,
                 rclone_config: str, flags: str) -> str:
        """
        Download the provided episode from sources
        Returns the path of the downloaded file

        job: Job to do!
        sources: list of rclone sources (EncoderConf.downloading_sources)
        tmppath: Path of the temporary folder
        rclone_config: Path to the rclone config file
        flags: rclone flags
        """

        # Step 1: Download the episode if possible
        dl_source = None
        for source in sources:
            if Rclone._check_episode_exists(job, rclone_config, source):
                LoggingUtils.debug(
                    "Found episode in source {}, downloading...".format(
                        source),
                    color=LoggingUtils.GREEN)
                dl_source = source
                break
        # If the file was not found in any sources, return False for failure
        if not dl_source:
            raise RcloneDownloadNotFoundError(
                "No sources contained the episode, cancelling operation", "",
                job.show, job.episode)

        # Download the episode
        tempfolder = PathUtils.clean_directory_path(tempfolder)
        episode_src_file = Rclone._download_episode(job, dl_source, tempfolder,
                                                    rclone_config, flags)

        return episode_src_file
Beispiel #6
0
def handle_redis_connection_error(error):
    LoggingUtils.critical("It appears that Redis is down.")
    response = {
        "success": False,
        "error": {
            "type": "Redis Connection",
            "message": "Redis connection error has occured."
        }
    }
    return jsonify(response), 500
Beispiel #7
0
 def _clean_string(self, str1):
     """
     Cleans a string of potentially problematic characters
     """
     try:
         clean_str = str1.replace('"', '')
         LoggingUtils.debug("Cleaned {} to {}".format(str1, clean_str))
         return clean_str
     except:
         LoggingUtils.debug("Cleaner was not provided a valid title, returning None")
         return None
 def get_temp_file(cls, rcs: RcloneConfigStore) -> str:
     if not cls.TEMP_PATH:
         LoggingUtils.debug("No rclone temp file detected, creating one.",
                            color=LoggingUtils.YELLOW)
         _, cls.TEMP_PATH = tempfile.mkstemp(suffix=".conf")
         with open(cls.TEMP_PATH, 'w') as rconf:
             rconf.write(rcs.content)
         LoggingUtils.debug("Created a temporary file for rclone.conf at " +
                            cls.TEMP_PATH,
                            color=LoggingUtils.GREEN)
     return cls.TEMP_PATH
Beispiel #9
0
    def get_temp_folder(cls) -> str:
        """
        Returns the path to the temp folder.
        Creates the temp folder if it does not exist.
        """

        if not cls.TEMP_PATH:
            LoggingUtils.debug("No temp folder path detected, creating one.", color=LoggingUtils.YELLOW)
            cls.TEMP_PATH = tempfile.mkdtemp()
            LoggingUtils.debug("Created a temp folder path at {}".format(cls.TEMP_PATH), color=LoggingUtils.GREEN)

        return cls.TEMP_PATH
Beispiel #10
0
def notify():

    LoggingUtils.info("Received a request for notify")

    authorized = RequestAuthorizer.authorize(request.headers)
    # Check authorization
    if not authorized:
        LoggingUtils.debug("Returning 401 http status code",
                           color=LoggingUtils.YELLOW)
        return "Unauthorized request", 401

    job = JobGenerator.create_from_json(request.get_json())
    # Create a job instance
    if not job:
        LoggingUtils.debug("Returning 400 http status code",
                           color=LoggingUtils.YELLOW)
        return "Malformed request", 400

    # Enqueue job
    notify_queue.enqueue(notify_worker,
                         args=(job,
                               NotifierConf.create_notifier_config_store()),
                         job_timeout=JOB_TIMEOUT,
                         result_ttl=RESULT_TTL,
                         failure_ttl=FAILURE_TTL,
                         job_id=_create_job_id(job.episode, "notify"))
    LoggingUtils.info("Enqueued a new notify job to the 'notify' queue",
                      color=LoggingUtils.CYAN)

    return "Request accepted", 200
Beispiel #11
0
    def _akari_fetch_retry(self, user, listurl, listname, times=5):
        """
        Jikan.moe is susceptible to randomly failing. This method allows us to try multiple times before really "failing"

        Params: See self._akari_list

        Returns: See self._akari_list if successful, or raises an Exception() otherwise
        """

        for i in range(times):
            try:
                LoggingUtils.debug(
                    "Attempt #{} to contact Jikan.moe for {}{}{}".format(
                        i + 1, CYAN, listname, ENDC))
                anime = self._akari_list(user, listurl, listname)
                LoggingUtils.debug("Attempt #{} {}succeeded{}".format(
                    i + 1, GREEN, ENDC))
                return anime
            except:
                # Sleep 5 seconds, and then try again
                LoggingUtils.debug(
                    "Attempt #{} {}failed{}, sleeping 5 seconds and trying again..."
                    .format(i + 1, RED, ENDC))
                time.sleep(5)

        # If this point is reached, then there has been too many errors. Raise an exception
        LoggingUtils.error("Akari was unable to contact Jikan.moe")
        raise Exception()
Beispiel #12
0
def encode_lp():

    LoggingUtils.info("Received a request under the low priority encode route")

    authorized = RequestAuthorizer.authorize(request.headers)
    # Check authorization
    if not authorized:
        LoggingUtils.debug("Returning 401 http status code",
                           color=LoggingUtils.YELLOW)
        return "Unauthorized request", 401

    job = JobGenerator.create_from_json(request.get_json())
    # Create a job instance
    if not job:
        LoggingUtils.debug("Returning 400 http status code",
                           color=LoggingUtils.YELLOW)
        return "Malformed request", 400

    # Enqueue job
    encode_lp_queue.enqueue(encode_worker,
                            args=(job, RcloneConf.get_config(),
                                  EncoderConf.create_encoder_config_store()),
                            job_timeout=JOB_TIMEOUT,
                            result_ttl=RESULT_TTL,
                            failure_ttl=FAILURE_TTL,
                            job_id=_create_job_id(job.episode, "encode"))
    LoggingUtils.info("Enqueued a new encoder job to the 'encode' queue",
                      color=LoggingUtils.CYAN)

    return "Request accepted", 200
Beispiel #13
0
    def _check_equality_regex(self, str1, str2):
        """
        Checks for equality of two strings without considering punctuation
        Returns a boolean to indicate equality
        """

        LoggingUtils.debug("Comparing {} and {} without punctuation".format(
            str1, str2))

        try:
            re_str1 = re.sub(r'[^\w\s]', '', str1)
            re_str2 = re.sub(r'[^\w\s]', '', str2)
            return bool(re_str1 == re_str2)
        except:
            return False
Beispiel #14
0
    def _sig_handler(sig, frame):
        LoggingUtils.critical(
            "SIG command {} detected, killing all running rclone processes...".
            format(sig),
            color=LoggingUtils.LRED)

        current_proc = psutil.Process()
        children = current_proc.children(recursive=True)

        for child in children:
            LoggingUtils.debug(
                "Killing child rclone process with PID {}".format(child.pid))
            kill(child.pid, signal.SIGTERM)

        raise WorkerCancelledError(helper=True)
Beispiel #15
0
 def _run(command: Iterable[str], error_message: str) -> int:
     """
     A wrapping over subprocess.run(). This sets the stderr, stdout, and handles converting
     SIGINT signals to quit jobs.
     """
     LoggingUtils.debug("Running command {}".format(' '.join(command)))
     result = subprocess.run(command,
                             stderr=subprocess.PIPE,
                             stdout=subprocess.PIPE)
     if result.returncode == 255:
         LoggingUtils.warning(
             "User killed running ffmpeg process, canceling operation and adding to failed job queue.",
             color=LoggingUtils.RED)
         raise WorkerCancelledError(helper=True)
     elif result.returncode != 0:
         raise FFmpegError(error_message, result.stderr.decode('utf-8'))
     else:
         return result.returncode  # == 0
Beispiel #16
0
    def is_user_watching_names(self, user, show_name):
        """
        Determines whether or not an Anilist user is watching a show
        Checks by show name

        Params:
            user: username to look up
            show_name: name of the show to look up. this should already be the anilist name.

        Returns: a boolean - True if watching, False if not
        """
        try:
            watching, paused, ptw = self._kishi_list(user)

            for show in watching:
                for title in show['title'].values():
                    if self._check_equality_regex(title, show_name):
                        LoggingUtils.debug("Matched {} to {} in {}".format(
                            title, show_name, "watching"))
                        return True

            for show in paused:
                for title in show['title'].values():
                    if self._check_equality_regex(title, show_name):
                        LoggingUtils.debug("Matched {} to {} in {}".format(
                            title, show_name, "paused"))
                        return True

            for show in ptw:
                for title in show['title'].values():
                    if self._check_equality_regex(title, show_name):
                        LoggingUtils.debug("Matched {} to {} in {}".format(
                            title, show_name, "planning"))
                        return True

            LoggingUtils.debug("Didn't find a match for {}".format(show_name))
            return False

        except:
            # If any errors are encountered, return True (default assumption)
            LoggingUtils.warning(
                "An error was encountered while contacting Anilist. Defaulting to TRUE",
                color=LoggingUtils.YELLOW)
            return True
Beispiel #17
0
    def create_job_for_hardsub(src_job: Job, hardsub_file: str) -> Job:

        episode_name = src_job.episode.replace(".mkv", ".mp4")
        episode_filesize = getsize(hardsub_file)
        episode_sub = "hardsub"

        LoggingUtils.debug(
            "Creating new Job instance with the following info:",
            color=LoggingUtils.MAGENTA)
        LoggingUtils.debug("Job.show: {}".format(src_job.show),
                           color=LoggingUtils.MAGENTA)
        LoggingUtils.debug("Job.episode: {}".format(episode_name),
                           color=LoggingUtils.MAGENTA)
        LoggingUtils.debug("Job.filesize: {}".format(episode_filesize),
                           color=LoggingUtils.MAGENTA)
        LoggingUtils.debug("Job.sub: {}".format(episode_sub),
                           color=LoggingUtils.MAGENTA)

        return Job(src_job.show, src_job.episode.replace(".mkv", ".mp4"),
                   getsize(hardsub_file), episode_sub)
Beispiel #18
0
    def _kitsu_basic_search(self, title):
        """
        This is a quick Kitsu search implementation from the Hitsu 2A module.

        Params:
            title - the title of the show (in provided request) to search for

        Returns: Kitsu's JSON response
        """
        title_lower = title.lower()
        request_url = requests.utils.requote_uri(KITSU_API_URL + title_lower)
        LoggingUtils.debug("Created Kitsu url - {}".format(request_url))

        try:
            kitsu_res = requests.get(request_url)

            try:
                kitsu_res_json = kitsu_res.json()
            except:
                LoggingUtils.warning("Kitsu response did not properly parse into JSON", color=LoggingUtils.YELLOW)
                raise Exception()

            return kitsu_res_json

        except:
            LoggingUtils.error("There was an error when attempting to contact Kitsu", color=LoggingUtils.RED)
            raise Exception()
Beispiel #19
0
    def _load_subtitle_info(cls, pi, info):

        streams = info['streams']
        subtitle_stream_count = 0
        for stream in streams:
            if stream['codec_type'].lower() == "subtitle":
                subtitle_stream_count += 1
        
        if subtitle_stream_count == 1:
            for stream in streams:
                if stream['codec_type'].lower() == "subtitle":
                    pi._subtitle_main_index = int(stream['index'])
                    break
        else:
            for stream in streams:
                if stream['codec_type'].lower() == "subtitle":
                    if 'tags' in stream:
                        if 'language' in stream['tags']:
                            language = stream['tags']['language']
                            if "eng" in language.lower():
                                LoggingUtils.debug("Set main subtitle index based on language detection")
                                pi._subtitle_main_index = int(stream['index'])
                        elif 'title' in stream['tags']:
                            title = stream['tags']['title']
                            if title.lower() == "english":
                                LoggingUtils.debug("Set main subtitle index based on title detection")
                                pi._subtitle_main_index = int(stream['index'])
                            # Set the OP/ED/Signs track
                            elif ("op" in title.lower() and "ed" in title.lower()) or "sign" in title.lower():
                                LoggingUtils.debug("Set secondary subtitle index based on title detection")
                                pi._subtitle_extra_index = int(stream['index'])
            # If no subs were selected, default to the first available
            if pi.subtitle_main_index < 1:
                LoggingUtils.warning("Unable to determine which subtitle stream to index, using first available...", color=LoggingUtils.RED)
                for stream in streams:
                    if stream['codec_type'].lower() == "subtitle":
                        pi._subtitle_main_index = int(stream['index'])
                        break
            else:
                LoggingUtils.debug("Subtitle stream was detected in multiselected, proceeding...")
Beispiel #20
0
    def add_font(job: Job, src_file: str, tempfolder: str) -> bool:
        """Adds the OpenSans-Semibold.ttf font file"""
        info = Haikan.scan(src_file)
        binary = FFmpeg._binary(info)
        dest_temp_file = PathUtils.clean_directory_path(
            tempfolder) + "temp2.mkv"
        LoggingUtils.debug(
            "Creating new fonted file at {}".format(dest_temp_file))

        command = [binary, "-i", src_file
                   ]  # The base command, this is akin to "ffmpeg -i temp.mkv"
        command.extend([
            "-attach", AssetUtils.opensans_semibold
        ])  # The attach command, "-attach OpenSans-Semibold.ttf"
        command.extend([
            "-metadata:s:{}".format(info.streams),
            "mimetype=application/x-truetype-font"
        ])  # Add metadata
        command.extend(
            ["-c:a", "copy", "-c:v", "copy", "-c:s", "copy", dest_temp_file])

        FFmpeg._run(
            command,
            "Error occured while ffmpeg was attaching font to the episode")

        # Replace the original file with this
        LoggingUtils.debug(
            "Successfully added font to file, now replacing original...",
            color=LoggingUtils.GREEN)
        shutil.move(dest_temp_file, src_file)
        LoggingUtils.debug(
            "Successfully replaced original file with fonted episode",
            color=LoggingUtils.GREEN)
Beispiel #21
0
    def _run(command: Iterable[str], run_error: RcloneError) -> str:
        """
        Helper method to run commands. Rclone doesn't properly catch SIGINT signals,
        so we need to actually catch it on our own to kill rclone processes.
        Because of this, we need to temporarily change the sigint handler and then revert it for RQ.
        """
        rq_sigint_handler = signal.getsignal(signal.SIGINT)
        rq_sigterm_handler = signal.getsignal(signal.SIGTERM)

        signal.signal(signal.SIGINT, Rclone._sig_handler)
        signal.signal(signal.SIGTERM, Rclone._sig_handler)

        LoggingUtils.debug("Running command {}".format(' '.join(command)))
        try:
            response = subprocess.run(command,
                                      stdout=subprocess.PIPE,
                                      stderr=subprocess.PIPE,
                                      timeout=3600)
        except subprocess.TimeoutExpired:
            raise RcloneRunError(
                "Timeout expired when running rclone command {}".format(
                    ' '.join(command)), response.stdout.decode('utf-8'), '',
                '')
        except WorkerCancelledError:
            # Cleanly pass on the Worker Cancelled Error
            raise WorkerCancelledError()
        except:
            raise RcloneRunError(
                "Some kind of error occured while running rclone command {}".
                format(' '.join(command)), response.stdout.decode('utf-8'), '',
                '')

        if response.returncode != 0:
            raise run_error

        signal.signal(signal.SIGINT, rq_sigint_handler)
        signal.signal(signal.SIGTERM, rq_sigterm_handler)

        return response
Beispiel #22
0
    def _check_episode_exists(job: Job, rclone_config: str,
                              source: str) -> bool:
        """Given a source, check if Job file exists in source"""
        LoggingUtils.debug("Checking for episode in source: {}".format(source))
        try:
            # Call rclone to check whether or not something exists
            command = [BinUtils.rclone]

            LoggingUtils.debug("Using temporary rclone file at " +
                               rclone_config,
                               color=LoggingUtils.YELLOW)
            command.extend(["--config={}".format(rclone_config)])

            command.extend(["lsjson", "-R", source + job.show])
            response = Rclone._run(
                command,
                RcloneLSJsonError("Rclone failed to run lsjson", "", job.show,
                                  job.episode))

            # Get the episode list in that folder
            episode_list = json.loads(response.stdout.decode('utf-8'))
            # Check if our job episode exists
            for episode in episode_list:
                if episode['Name'] == job.episode:
                    LoggingUtils.debug(
                        "Found a match for episode in source {}".format(
                            source))
                    return True
            LoggingUtils.debug(
                "Didn't find a match for episode in source {}".format(source))
            return False
        # This except block is usually hit if the source provided doesn't exist.
        except:
            LoggingUtils.warning(
                "An error occured while checking source {}, does it exist?".
                format(source))
            return False
Beispiel #23
0
    def _kishi_list(self, user):
        """
        Helper method to get all of a user's anime list.

        Params:
            user: String, username of Anilist user

        Returns: A tuple of three lists.
            The first list is all the Watching
            The second list is all the PTW
            The third list is all the Paused

        Throws an exception if anything goes wrong. This should be caught by any method using this.
        """

        watching = list()
        paused = list()
        ptw = list()

        # Anilist API is much nicer to play with.
        try:
            # Make the request to Anilist, and pass in the userName as the user query
            anilist_res = requests.post(self._ANILIST_API_URL,
                                        json={
                                            'query': self._ANILIST_USER,
                                            'variables': {
                                                'userName': user
                                            }
                                        })

            if anilist_res.status_code != 200:
                LoggingUtils.debug(
                    "Anilist returned a bad status code when attempting to get {}'s lists"
                    .format(user))
                raise Exception()

            try:
                anilist_res_json = anilist_res.json()
            except:
                LoggingUtils.debug(
                    "Anilist returned a response that was not parseable into JSON"
                )
                raise Exception()

            watching = self._add_list_entries("Watching", anilist_res_json)
            paused = self._add_list_entries("Paused", anilist_res_json)
            ptw = self._add_list_entries("Planning", anilist_res_json)

        except:
            LoggingUtils.warning(
                "Kishi was unable to properly contact Anilist",
                color=LoggingUtils.YELLOW)
            raise Exception()

        return (watching, paused, ptw)
Beispiel #24
0
    def destroy_temp_folder(cls) -> None:

        if not cls.TEMP_PATH:
            LoggingUtils.debug("No temp folder exists, doing nothing", color=LoggingUtils.GREEN)
            return

        try:
            shutil.rmtree(cls.TEMP_PATH)
            LoggingUtils.debug("Cleared the temp folder path at {}".format(cls.TEMP_PATH), color=LoggingUtils.GREEN)
            cls.TEMP_PATH = str()
        except:
            LoggingUtils.warning("Was unable to clear the temp folder path at {}".format(cls.TEMP_PATH), color=LoggingUtils.RED)
Beispiel #25
0
    def search(self, show):
        """
        Searches for a show and returns its information from Anilist
        """
        airing = self._single_search(show, "RELEASING")
        if airing:
            LoggingUtils.info("Creating HishaInfo for {} in RELEASING".format(show))
            return self._create_hisha_info(airing, show)

        finished = self._page_search(show, "FINISHED")
        if finished:
            LoggingUtils.info("Creating HishaInfo for {} in FINISHED".format(show))
            return self._create_hisha_info(finished, show)

        not_yet_released = self._single_search(show, "NOT_YET_RELEASED")
        if not_yet_released:
            LoggingUtils.info("Creating HishaInfo for {} in NOT_YET_RELEASED".format(show))
            return self._create_hisha_info(not_yet_released, show)

        # None of the three found a result, so create a dummy Hisha object and return it
        LoggingUtils.info("Creating HishaInfo for {} with default values".format(show))
        return self._create_hisha_info(None, show)
    def destroy_temp_file(cls) -> None:

        if not cls.TEMP_PATH:
            LoggingUtils.debug("No rclone tempf ile exists, doing nothing",
                               color=LoggingUtils.GREEN)
            return

        try:
            os.remove(cls.TEMP_PATH)
            LoggingUtils.debug("Cleared the rclone temp file at " +
                               cls.TEMP_PATH,
                               color=LoggingUtils.GREEN)
            cls.TEMP_PATH = str()
        except:
            LoggingUtils.warning("Unable to clear the rclone temp file",
                                 color=LoggingUtils.RED)
Beispiel #27
0
    def _get_main_studio_info(self, studios):
        """
        Goes through the studio edges and returns the main (studio name, siteurl)

        Params:
            studios - The studios body from the Anilist GraphQL json response

        Returns: A tuple (studio name: str, site url: str), or None if not found
        """
        try:
            edges = studios['edges']
            for edge in edges:
                LoggingUtils.debug("Checking edge {}".format(edge['node']['name']))
                if edge['isMain']:
                    LoggingUtils.debug("Found main studio edge, returning tuple")
                    node = edge['node']
                    return (node['name'], node['siteUrl'])
            # If a main studio isn't found, return None
            LoggingUtils.debug("Didn't find any main studio edge, returning default")
            return ("Unknown", ANILIST_URL)
        except:
            LoggingUtils.warning("Didn't find any main studio edge, returning default", color=LoggingUtils.YELLOW)
            return ("Unknown", ANILIST_URL)
Beispiel #28
0
    def send(cls, job: Job, hisha: HishaInfo, webhooks: List[str]):

        embed = cls._generate_embed(job, hisha, webhooks)

        LoggingUtils.info("Sending out Discord webhook notifications",
                          color=LoggingUtils.LMAGENTA)
        for webhook in webhooks:
            try:
                requests.post(webhook, json=embed, timeout=5)
            except:
                LoggingUtils.warning(
                    "There was an error when sending out a Discord webhook to: {}"
                    .format(webhook),
                    color=LoggingUtils.YELLOW)
        LoggingUtils.info("Done sending out webhook notifications",
                          color=LoggingUtils.GREEN)

        return
Beispiel #29
0
    def is_user_watching_id(self, user, malID, times=5):
        """
        Is a user watching this show or not?

        Params:
            user: username to lookup
            malID: malID to match against
        """
        LoggingUtils.debug("{}Now finding{} if \"{}\" is in {}'s list".format(
            CYAN, ENDC, malID, user))
        anime_list = self.akari_list(user, times)
        for show in anime_list:
            if str(show['mal_id']) == str(malID):
                LoggingUtils.debug("\"{}\" was found in {}'s list".format(
                    malID, user))
                return True

        LoggingUtils.debug("\"{}\" was not found in {}'s list".format(
            malID, user))
        return False
Beispiel #30
0
    def is_user_watching_names(self, user, show_name, times=5):
        """
        Is a user watching this show or not?

        Params:
            user: username to lookup
            show_name: show name to match against

        Returns True if the show was found in the list, false if not
        """
        LoggingUtils.debug("{}Now finding{} if \"{}\" is in {}'s list".format(
            CYAN, ENDC, show_name, user))
        anime_list = self.akari_list(user, times)
        for show in anime_list:
            if show['title'] == show_name:
                LoggingUtils.debug("\"{}\" was found in {}'s list".format(
                    show_name, user))
                return True

        LoggingUtils.debug("\"{}\" was not found in {}'s list".format(
            show_name, user))
        return False