Exemple #1
0
def _clean_episode_name(unclean_name): 
    """
    Clean the episode name for new usage.
    Parameter unclean_name should only be the file name, no paths.
    """
    info = anitopy.parse(unclean_name)

    new_name = info['anime_title']

    if 'anime_season' in info:
        Ayumi.debug('Found anime_season "{}"'.format(info['anime_season']))
        new_name = new_name + " S" + str(info['anime_season'])

    if 'episode_number' in info:
        Ayumi.debug('Found episode_number "{}"'.format(info['episode_number']))
        new_name = new_name + " - " + str(info['episode_number'])

    if 'video_resolution' in info:
        Ayumi.debug('Found video_resolution "{}"'.format(info['video_resolution']))
        new_name = new_name + " [{}]".format(info['video_resolution'])

    if 'other' in info and 'uncensored' in info['other'].lower():
        Ayumi.debug('Detected this episode is uncensored, adding "(Uncensored)" to the title.')

    _, ext = os.path.splitext(unclean_name)
    new_name += ext

    Ayumi.debug('Complete new file name: {}'.format(new_name))
    return new_name
Exemple #2
0
def _check_exists(config: str, source: str, job: Job) -> bool:
    """
    Checks if the jobs exists under the source.
    Note: Does not validate under which path, just validates that jobs exists somewhere in:
    source/(...probably job.show)/job.episode
    """
    try:
        response = _run([
            "rclone", "--config={}".format(config), "lsjson", "-R",
            "{}/{}/".format(source, job.show)
        ])
        episode_list = loads(response.stdout.decode('utf-8'))
        for episode in episode_list:
            Ayumi.debug("Checking {} against episode {}".format(
                job.episode, episode['Name']))
            if episode['Name'] == job.episode:
                Ayumi.info("Found episode {} in {}".format(
                    job.episode, source))
                return True
        Ayumi.info("Didn't find episode {} in {}".format(job.episode, source))
        return False
    except:
        # Typically hit if the source doesn't exist.
        Ayumi.warning(
            "Error occured while checking source {} - does it exist?".format(
                source))
        return False
Exemple #3
0
def find_closest_title(title):
    """
    Finds the closest title from Anilist for Airing and Not_Yet_Released shows
    """
    now = datetime.datetime.now()
    date_next_month = int((now + datetime.timedelta(weeks=4)).strftime("%Y%m%d"))
    date_last_month = int((now - datetime.timedelta(weeks=4)).strftime("%Y%m%d"))
    shows = list()
    heap = list()

    shows.extend(_get_shows("RELEASING"))
    shows.extend(_get_shows("NOT_YET_RELEASED", start_date=date_next_month))
    shows.extend(_get_shows("FINISHED", end_date=date_last_month))

    for show in shows:
        ratio = _similarity(title, show)
        Ayumi.debug('Matched "{}" against "{}" for a ratio of {}'.format(title, show, ratio))
        heapq.heappush(heap, (ratio, show))

    top_5 = heapq.nlargest(5, heap)
    Ayumi.info("Displaying (up to) top 5 matches of {} results:".format(len(heap)), color=Ayumi.LBLUE)
    for top in top_5:
        Ayumi.info("{}: {}".format(top[1], top[0]), color=Ayumi.LBLUE)

    if top_5:
        Ayumi.info('Returning top match: {}'.format(top_5[0][1]), color=Ayumi.LGREEN)
        return top_5[0][1]
    else:
        Ayumi.warning("No shows were fetched by Naomi, returning None", color=Ayumi.LYELLOW)
        return None
Exemple #4
0
def _get_kitsu_id(title):
    """Gets Kitsu's ID for a show." Returns -1 if not found."""
    try:
        kitsu_id = _kitsu_basic_search(title)['data'][0]['id']
        return int(kitsu_id)
    except:
        Ayumi.debug("Error occured when fetching Kitsu ID, returning -1.")
        return -1
Exemple #5
0
def _parse_shows_map(show_title) -> Tuple[str, str]:
    if "->" in show_title:
        original_title, override_title = show_title.split(" -> ")
        Ayumi.debug(
            "Found '->' in title, mapping (unstripped) {} to {}".format(original_title, override_title))
        return (_strip_title(original_title), override_title)
    else:
        return (_strip_title(show_title), show_title)
Exemple #6
0
def _load_history() -> List[str]:
    try:
        with open("data.json", "r") as data:
            history = json.load(data)
        Ayumi.debug("Loaded history: {}".format(history))
    except:
        history = []
        Ayumi.debug("No history loaded - using empty list")
    return history
Exemple #7
0
def download(job: Job, sources: List[str], tempfolder: str, config: str,
             flags: str) -> str:
    """
    Download the provided episode from sources
    Returns the path of the downloaded file
    job: Job to do!
    sources: list of rclone sources (EncoderConf.downloading_sources)
    tmppath: Path of the temporary folder
    rclone_config: Path to the rclone config file
    flags: rclone flags
    """
    for source in sources:
        Ayumi.debug("Checking for existence from source: {}".format(source))
        if _check_exists(config, source, job):
            Ayumi.info(
                "Now downloading episode from source: {}".format(source))

            src_file = "{}/{}/{}".format(_clean(source), job.show, job.episode)
            Ayumi.debug("Sourcing from rclone path: {}".format(src_file))
            dest_file = "{}/{}".format(_clean(tempfolder), "temp")
            Ayumi.debug("Downloading to destination: {}".format(dest_file))

            command = [
                "rclone", "--config={}".format(config), "copyto", src_file,
                dest_file
            ]
            command.extend(flags.split())

            Ayumi.debug("Now running command: {}".format(" ".join(command)))
            Ayumi.info("Now starting download.", color=Ayumi.LCYAN)

            try:
                _run(command)
            except ShikyouResponseException:
                Ayumi.error(
                    "Rclone command returned a bad return code, contact the developer.",
                    color=Ayumi.LRED)
                raise ShikyouResponseException()
            except ShikyouTimeoutException:
                Ayumi.error("Rclone command timed out, are your sources okay?",
                            color=Ayumi.LRED)
                raise ShikyouTimeoutException()

            Ayumi.info("Completed downloading files.", color=Ayumi.LGREEN)
            return dest_file
        else:
            Ayumi.debug(
                "Requested episode doesn't exist under source {}".format(
                    source))

    Ayumi.warning("No download sources contained the file.")
    return None
Exemple #8
0
def _clean_title(title):
    """
    Removes potentially problematic characters.
    """
    try:
        clean_str = title.replace('"', '')
        Ayumi.debug("Cleaned {} to {}.".format(title, clean_str))
        return clean_str
    except:
        Ayumi.debug(
            "Cleaner wasn't provided a valid title ({}), returning None.".
            format(title))
        return None
Exemple #9
0
def _fetch_retry(user, listurl, listname, times=5):
    """
    Jikan.moe is susceptible to randomly failing. This method allows us to try multiple times before really "failing"

    Params: See fetch_list()

    Returns: See fetch_list() if successful, or raises an Exception() otherwise
    """
    for i in range(times):
        try:
            Ayumi.debug("Attempt #{} to contact Jikan.moe for {}".format(
                i + 1, listname))
            anime = _fetch_list(user, listurl, listname)
            Ayumi.debug("Attempt #{} succeeded".format(i + 1))
            return anime
        except:
            # Sleep 5 seconds, and then try again
            Ayumi.debug(
                "Attempt #{} failed, sleeping 5 seconds and trying again...".
                format(i + 1))
            sleep(5)

    # If this point is reached, then there has been too many errors. Raise an exception
    Ayumi.debug("Akari was unable to contact Jikan.moe")
    raise Exception()
Exemple #10
0
def _query_request(query, search, status) -> dict:
    """
    Makes requests to Anlist, returns response in JSON.

    Query: One of the Queries objects.
    Search: Name of show to search for.
    Status: Status of the show to search for.
    """
    try:
        Ayumi.debug(
            "Making request to {}, searching for {} under status {}".format(
                ANILIST_API_URL, search, status))
        ani_res = requests.post(ANILIST_API_URL,
                                json={
                                    'query': query,
                                    'variables': {
                                        'search': search,
                                        'status': status
                                    }
                                },
                                timeout=10)

        if ani_res.status_code != 200:
            Ayumi.warning(
                "Anilist returned unaccepted HTTP code {} upon request.".
                format(ani_res.status_code),
                color=Ayumi.LRED)
            raise Exception()

        # Get request response as JSON object.
        try:
            ani_json = ani_res.json()
            return ani_json['data']
        except:
            Ayumi.warning("Anilist returned a non-JSON response.",
                          color=Ayumi.LRED)
            raise Exception()

    except requests.exceptions.Timeout:
        Ayumi.warning("Request to Anilist timed out.", color=Ayumi.LRED)
        raise Exception()
    except requests.exceptions.ConnectionError:
        Ayumi.warning("Unable to contact Anilist, maybe it's down?",
                      color=Ayumi.LRED)
        raise Exception()
Exemple #11
0
def is_user_watching_id(user, malID, times=5):
    """
    Is a user watching this show or not?

    Params:
        user: username to lookup
        malID: malID to match against
    """
    Ayumi.debug("Now finding if \"{}\" is in {}'s list".format(malID, user))
    anime_list = akari_list(user, times)
    for show in anime_list:
        if str(show['mal_id']) == str(malID):
            Ayumi.info("\"{}\" was found in {}'s list".format(malID, user),
                       color=Ayumi.LGREEN)
            return True

    Ayumi.info("\"{}\" was not found in {}'s list".format(malID, user),
               color=Ayumi.LYELLOW)
    return False
Exemple #12
0
def is_user_watching_id(user, show_id):
    """
    Determines whether or not an Anilist user is watching a show
    Checks by show ID

    Params:
        user: username to look up
        id: id of the show to look up

    Returns: a boolean - True if watching, False if not
    """

    try:
        show_id = int(show_id)  # Get the int equivalent value of the ID
    except:
        # Why would you not pass an integer in?
        Ayumi.critical(
            "Kishi ID search requires an input that can be converted to an int. Returning FALSE"
        )
        return False

    try:

        watching, paused, ptw = _kishi_list(user)

        for show in watching:
            if show_id == show['id']:
                Ayumi.debug("Found show ID {} in {}".format(
                    show_id, "watching"))
                return True

        for show in paused:
            if show_id == show['id']:
                Ayumi.debug("Found show ID {} in {}".format(show_id, "paused"))
                return True

        for show in ptw:
            if show_id == show['id']:
                Ayumi.debug("Found show ID {} in {}".format(
                    show_id, "planning"))
                return True

        Ayumi.debug("Didn't find a match for {}".format(show_id))
        return False

    except:
        # If any errors are encountered, return True (default assumption)
        Ayumi.warning(
            "An error was encountered while contacting Anilist. Defaulting to TRUE"
        )
        return True
Exemple #13
0
def _generate_new_filename(dl_file):
    info = anitopy.parse(dl_file)
    new_dl_file = info['anime_title']
    if 'anime_season' in info:
        Ayumi.debug('Found anime_season "{}"'.format(info['anime_season']))
        new_dl_file = new_dl_file + " S" + str(info['anime_season'])
    if 'episode_number' in info:
        Ayumi.debug('Found episode_number "{}"'.format(info['episode_number']))
        new_dl_file = new_dl_file + " - " + str(info['episode_number'])
    if 'video_resolution' in info:
        Ayumi.debug('Found video_resolution "{}"'.format(
            info['video_resolution']))
        new_dl_file = new_dl_file + " [{}]".format(info['video_resolution'])
    if 'other' in info and 'uncensored' in info['other'].lower():
        Ayumi.debug(
            'Detected this episode is uncensored, adding "(Uncensored)" to the title.')
        new_dl_file += " (Uncensored)"
    _, ext = os.path.splitext(dl_file)
    new_dl_file += ext
    Ayumi.debug('returning new_dl_file: {}'.format(new_dl_file))
    return new_dl_file
Exemple #14
0
def _get_main_studio_info(studios):
    """
    Goes through the studio edges and returns the main (studio name, siteurl)

    Params:
        studios - The studios body from the Anilist GraphQL json response

    Returns: A tuple (studio name: str, site url: str), or None if not found
    """
    try:
        edges = studios['edges']
        for edge in edges:
            Ayumi.debug("Checking edge {}".format(edge['node']['name']))
            if edge['isMain']:
                Ayumi.debug("Found main studio edge, returning tuple")
                node = edge['node']
                return (node['name'], node['siteUrl'])
        # If a main studio isn't found, return None
        Ayumi.debug("Didn't find any main studio edge, returning None")
        return (None, None)
    except Exception as e:
        Ayumi.warning(
            "Didn't find any main studio edge due to error, returning None")
        Ayumi.warning(e)
        return (None, None)
Exemple #15
0
def is_user_watching_names(user, show_name, times=5):
    """
    Is a user watching this show or not?

    Params:
        user: username to lookup
        show_name: show name to match against

    Returns True if the show was found in the list, false if not
    """
    Ayumi.debug("Now finding if \"{}\" is in {}'s list".format(
        show_name, user))
    anime_list = akari_list(user, times)
    for show in anime_list:
        if show['title'] == show_name:
            Ayumi.info("\"{}\" was found in {}'s list".format(show_name, user),
                       color=Ayumi.LGREEN)
            return True

    Ayumi.info("\"{}\" was not found in {}'s list".format(show_name, user),
               color=Ayumi.LYELLOW)
    return False
Exemple #16
0
def upload(job: Job, destinations: List[str], upload_file: str, config: str,
           flags: str) -> None:
    """
    Upload the completed new hardsub file into the rclone destinations
    Returns a boolean based on success
    job: Job to do! This is the job of the HARDSUB file
    destinations: list of rlcone destinations (e.g., EncoderConf.uploading_destinations)
    upload_file: Path to the file to be uploaded
    rclone_config: Path to the rclone config file
    flags: rclone flag
    This method will upload the file and include its show name:
    e.g., 'temp.mp4' --> destination/show/episode.mp4
    """

    for dest in destinations:

        rclone_dest = "{}/{}/{}".format(_clean(dest), job.show, job.episode)
        command = [
            "rclone", "--config={}".format(config), "copyto", upload_file,
            rclone_dest
        ]
        command.extend(flags.split())

        Ayumi.debug("Now running command: {}".format(" ".join(command)))
        Ayumi.info("Now uploading file to {}".format(dest))

        try:
            _run(command)
        except ShikyouResponseException:
            Ayumi.error(
                "Rclone command returned a bad return code, contact the developer.",
                color=Ayumi.LRED)
            raise ShikyouResponseException()
        except ShikyouTimeoutException:
            Ayumi.error("Rclone command timed out, are your sources okay?",
                        color=Ayumi.LRED)
            raise ShikyouTimeoutException()

    Ayumi.info("Completed uploading files.", color=Ayumi.LGREEN)
Exemple #17
0
def _kitsu_basic_search(title):
    """Quick Kitsu implementation"""
    title_lower = title.lower()
    request_url = requests.utils.requote_uri(KITSU_API_URL + title_lower)
    Ayumi.debug("Created Kitsu request URL: {}".format(request_url))

    try:
        kitsu_res = requests.get(request_url, timeout=10)

        try:
            kitsu_json = kitsu_res.json()
            return kitsu_json
        except:
            Ayumi.warning("Kitsu did not return a valid JSON response.",
                          color=Ayumi.RED)
            raise Exception()
    except requests.exceptions.Timeout:
        Ayumi.warning("Kitsu request timed out.", color=Ayumi.LRED)
        raise Exception()
    except requests.exceptions.ConnectionError:
        Ayumi.warning("Unable to contact Kitsu, maybe it's down?",
                      color=Ayumi.LRED)
        raise Exception()
Exemple #18
0
def is_user_watching_names(user, show_name):
    """
    Determines whether or not an Anilist user is watching a show
    Checks by show name

    Params:
        user: username to look up
        show_name: name of the show to look up. this should already be the anilist name.

    Returns: a boolean - True if watching, False if not
    """
    try:
        watching, paused, ptw = _kishi_list(user)

        for show in watching:
            for title in show['title'].values():
                if _check_equality(title, show_name):
                    Ayumi.debug("Matched {} to {} in {}".format(
                        title, show_name, "watching"))
                    return True

        for show in paused:
            for title in show['title'].values():
                if _check_equality(title, show_name):
                    Ayumi.debug("Matched {} to {} in {}".format(
                        title, show_name, "paused"))
                    return True

        for show in ptw:
            for title in show['title'].values():
                if _check_equality(title, show_name):
                    Ayumi.debug("Matched {} to {} in {}".format(
                        title, show_name, "planning"))
                    return True

        Ayumi.debug("Didn't find a match for {}".format(show_name))
        return False

    except:
        # If any errors are encountered, return True (default assumption)
        Ayumi.warning(
            "An error was encountered while contacting Anilist. Defaulting to TRUE"
        )
        return True
Exemple #19
0
def _load_shows_map() -> Dict[str, str]:
    shows_map = settings.get(
        'ACQUISITION_DOWNLOAD_BITTORRENT_SHOWS_MAP', [])
    Ayumi.debug("Fetched shows map from config: {}".format(shows_map))

    show_name_map = dict()
    Ayumi.debug("Creating a stripped title to override title mapping...")

    for show in shows_map:
        stripped_title, override_title = _parse_shows_map(show)
        show_name_map[stripped_title] = override_title
        Ayumi.debug("Mapped {} to key {}.".format(
            override_title, stripped_title))
    return show_name_map
Exemple #20
0
def rss(last_guid=None):

    try:
        with rabbitpy.Connection(
                'amqp://{username}:{password}@{host}:{port}/{vhost}'.format(
                    username=settings.get_fresh('RABBITMQ_USERNAME'),
                    password=settings.get_fresh('RABBITMQ_PASSWORD'),
                    host=settings.get_fresh('RABBITMQ_HOST'),
                    port=settings.get_fresh('RABBITMQ_PORT'),
                    vhost=settings.get_fresh('RABBITMQ_VHOST'))) as conn:
            with conn.channel() as channel:

                Ayumi.set_rabbitpy_channel(channel)
                channel.enable_publisher_confirms()

                while True:

                    Ayumi.info("Now starting feed fetch.", color=Ayumi.LCYAN)

                    feed = feedparser.parse(
                        settings.get('ACQUISITION_RSS_FEED_URL', None))
                    accepted_shows = _load_accepted_shows()
                    Ayumi.debug(
                        "Loaded accepted shows map: {}".format(accepted_shows))
                    history = _load_history()
                    new_history = list()

                    for entry in feed.entries:

                        # Fetch data first
                        title, link, guid = entry.title, entry.link, entry.guid
                        Ayumi.debug(
                            'Encountered RSS item with title "{}", and guid "{}"'
                            .format(title, guid))

                        # If feed item with last GUID encountered, do not process any further
                        if guid == last_guid:
                            Ayumi.debug(
                                "Encountered RSS item with last_guid {} matching argument, breaking and writing history."
                                .format(last_guid),
                                color=Ayumi.YELLOW)
                            break

                        # Check the title data
                        # Use the parsed title to match user provided titles.
                        parsed_title = anitopy.parse(title)['anime_title']
                        if _strip_title(parsed_title) not in accepted_shows:
                            Ayumi.info(
                                'Feed item with title "{}" (show title: "{}") is not in accepted shows, skipping.'
                                .format(title, parsed_title))
                        else:
                            if guid in history:
                                # This item has been previously processed, skip it.
                                Ayumi.info(
                                    'Feed item with title "{}" (show title: "{}") has already been processed, skipping.'
                                    .format(title, parsed_title),
                                    color=Ayumi.GREEN)
                            else:
                                # A new feeditem! Let us process it.
                                Ayumi.info(
                                    'Feed item with title "{}" (show title: "{}") is in accepted shows, processing.'
                                    .format(title, parsed_title),
                                    color=Ayumi.YELLOW)
                                message = rabbitpy.Message(
                                    channel,
                                    json.dumps({
                                        "title":
                                        title,
                                        "link":
                                        link,
                                        "guid":
                                        guid,
                                        "show_title":
                                        accepted_shows[_strip_title(
                                            parsed_title)]
                                    }))
                                acquisition_rss_exchange_name = settings.get(
                                    'ACQUISITION_RSS_EXCHANGE')
                                while not message.publish(
                                        acquisition_rss_exchange_name,
                                        mandatory=True):
                                    Ayumi.warning(
                                        'Failed to publish feed item with title "{}" to exchange "{}", retrying in 60s...'
                                        .format(title,
                                                acquisition_rss_exchange_name),
                                        color=Ayumi.RED)
                                    sleep(60)
                                Ayumi.info(
                                    'Published feed item with title "{}" to exchange "{}".'
                                    .format(
                                        title,
                                        acquisition_rss_exchange_name,
                                    ),
                                    color=Ayumi.LGREEN)

                            # Keep all items processed in the new history - it will be auto deleted by the expiry of the RSS
                            Ayumi.debug(
                                'Appending item "{}" with title "{}" (show title: "{}") to new_history for write.'
                                .format(guid, title, parsed_title),
                                color=Ayumi.YELLOW)
                            new_history.append(guid)

                    _write_history(new_history)

                    # Sleep till the next iteration
                    sleep_duration = settings.get(
                        'ACQUISITION_RSS_SLEEP_INTERVAL',
                        _DEFAULT_SLEEP_INTERVAL)
                    Ayumi.info(
                        "Now sleeping {} seconds.".format(sleep_duration),
                        color=Ayumi.LCYAN)
                    sleep(sleep_duration)

    except rabbitpy.exceptions.AMQPConnectionForced:
        Ayumi.rabbitpy_channel = None
        Ayumi.critical(
            "Operator manually closed RabbitMQ connection, shutting down.",
            color=Ayumi.RED)
        # Use return for now because in some cases, calling exit() may invoke the retry() header.
        return
Exemple #21
0
def _write_history(new_history) -> None:
    with open('data.json', 'w') as data:
        json.dump(new_history, data, indent=4)
    Ayumi.debug(
        "Wrote new_history to data.json. Contents: {}".format(new_history),
        color=Ayumi.YELLOW)
Exemple #22
0
def _page_search(search, status):
    """
    Searches for a show using the page query.
    Params:
        search - show to search for
        status - status to filter under.

    Returns: Show data if it's found, or None.
    """
    try:
        info = _query_request(PAGE_QUERY, search, status)['Page']['media']
    except:
        Ayumi.debug("No data provided for {} in {}, returning None.".format(
            search, status))
        return None

    for show in info:
        # Check if any of the titles match
        Ayumi.debug("Checking for matches in media titles...")
        for title in show['title'].values():
            if _check_equality(search, title):
                Ayumi.debug("Matched {} to {}, returning.".format(
                    search, title))
                return show
            else:
                Ayumi.debug("Did not match {} to {}.".format(search, title))

        # Check if any of the synonyms match
        Ayumi.debug("Checking for matches in media synonyms...")
        for title in show['synonyms']:
            if _check_equality(search, title):
                Ayumi.debug("Matched {} to {}, returning.".format(
                    search, title))
                return show
            else:
                Ayumi.debug("Did not match {} to {}.".format(search, title))

    # No matches, return None
    Ayumi.debug("Didn't find any matches for {} in {}, returning None.".format(
        search, status))
    return None
Exemple #23
0
def bittorrent():
    try:
        with rabbitpy.Connection('amqp://{username}:{password}@{host}:{port}/{vhost}'.format(
            username=settings.get('RABBITMQ_USERNAME'),
            password=settings.get('RABBITMQ_PASSWORD'),
            host=settings.get('RABBITMQ_HOST'),
            port=settings.get('RABBITMQ_PORT'),
            vhost=settings.get('RABBITMQ_VHOST')
        )) as conn:
            with conn.channel() as channel:

                Ayumi.set_rabbitpy_channel(channel)
                channel.enable_publisher_confirms()

                queue_name = settings.get('ACQUISITION_BITTORRENT_QUEUE')
                Ayumi.debug("Connecting to queue: {}".format(queue_name))
                queue = rabbitpy.Queue(channel, queue_name)
                queue.declare(passive=True)

                Ayumi.info('Now listening for messages on queue: {}...'.format(
                    queue_name), color=Ayumi.LYELLOW)

                for message in queue.consume(prefetch=1):

                    Ayumi.info(
                        "Received new message, starting...", color=Ayumi.CYAN)

                    feeditem_preprocess = _load_amqp_message_body(message)
                    Ayumi.debug('Loaded message raw: "{}"'.format(
                        feeditem_preprocess))
                    if not feeditem_preprocess or not metsuke.validate_feeditem(feeditem_preprocess):
                        Ayumi.error('Invalid message received, rejecting. Output: "{}"'.format(
                            feeditem_preprocess), color=Ayumi.RED)
                        message.reject()
                        continue

                    # Load initial data
                    feeditem: metsuke.FeedItem = metsuke.generate_feeditem(
                        feeditem_preprocess)
                    shows_map = _load_shows_map()
                    overload_title = feeditem.show_title
                    Ayumi.info(
                        'Setting overload title: "{}"'.format(overload_title))
                    # If there is a central override, use it instead.
                    if _strip_title(anitopy.parse(feeditem.title)['anime_title']) in shows_map:
                        central_overload_title = shows_map[_strip_title(
                            feeditem.title)]
                        Ayumi.info('Overwriting overload title with central overload title: "{}"'.format(
                            central_overload_title))
                        overload_title = central_overload_title

                    with tempfile.TemporaryDirectory() as temp_dir:

                        Ayumi.debug(
                            'Created temporary directory under path: "{}"'.format(temp_dir))

                        # Download the episode
                        try:
                            res = subprocess.run(
                                [
                                    "aria2c",
                                    "--seed-time=0",
                                    "--rpc-save-upload-metadata=false",
                                    "--bt-save-metadata=false",
                                    "--dir={}".format(temp_dir),
                                    feeditem.link
                                ]
                            )
                            if res.returncode != 0:
                                Ayumi.warning(
                                    "Aria2 did not return a 0 exit code, assuming download errored and nacking.", color=Ayumi.RED)
                                message.nack()
                                continue
                        except subprocess.TimeoutExpired:
                            Ayumi.warning(
                                "Download via webtorrent timed out - nacking.", color=Ayumi.RED)
                            message.nack()
                            continue

                        if res.returncode != 0:
                            Ayumi.warning(
                                "Webtorrent did not have a return code of 0, nacking.", color=Ayumi.RED)
                            message.nack()
                            continue

                        # Rename it
                        potential_files = [f for f in os.listdir(
                            temp_dir) if f.endswith(".mkv")]
                        Ayumi.debug(
                            "Loaded potential files: {}".format(potential_files))
                        if len(potential_files) != 1:
                            Ayumi.warning(
                                "Found more than one .mkv file, rejecting this job.", color=Ayumi.RED)
                            message.reject()
                            continue
                        dl_file = potential_files[0]
                        Ayumi.info('Found file: "{}"'.format(dl_file))
                        dl_file_path = os.path.abspath(
                            '{}/{}'.format(_clean_title(temp_dir), potential_files[0]))
                        Ayumi.debug(
                            'dl_file_path: "{}"'.format(dl_file_path))

                        # Remove unneeded files
                        # TODO: THIS IS A HOTFIX, CHANGE LOGIC IN B2
                        bad_files = [f for f in os.listdir(
                            temp_dir) if not f.endswith(".mkv")]
                        Ayumi.debug("Found bad files: {}".format(bad_files))
                        for bf in bad_files:
                            try:
                                Ayumi.debug("Removing bad file: {}".format(bf))
                                os.remove(
                                    '{}/{}'.format(_clean_title(temp_dir), bf))
                            except:
                                Ayumi.debug("Removing bad tree: {}".format(bf))
                                shutil.rmtree(
                                    '{}/{}'.format(_clean_title(temp_dir), bf))

                        # Move the file to proper layout with updated name
                        dl_file_new_name = _generate_new_filename(dl_file)
                        Ayumi.info('Generated new episode name: "{}"'.format(
                            dl_file_new_name))
                        dl_file_new_dir = "{}/{}".format(
                            temp_dir, overload_title)
                        Ayumi.debug(
                            'dl_file_new_dir: "{}"'.format(dl_file_new_dir))
                        dl_file_new_path = "{}/{}".format(
                            dl_file_new_dir, dl_file_new_name)
                        Ayumi.debug(
                            'dl_file_new_path: "{}"'.format(
                                dl_file_new_path))
                        Ayumi.debug('Moving "{}" to "{}"'.format(
                            dl_file_path, dl_file_new_path))
                        os.mkdir(dl_file_new_dir)
                        shutil.move(dl_file_path, dl_file_new_path)

                        # Upload the file to rclone destination
                        with tempfile.NamedTemporaryFile(suffix=".conf", mode="w+b") as rconf:
                            rconf.write(str.encode(
                                settings.get("RCLONE_CONFIG_FILE")))
                            rconf.flush()
                            Ayumi.debug(
                                'Created temporary rclone file under path: "{}"'.format(rconf.name))
                            rclone_dest = _clean_title(settings.get(
                                "ACQUISITION_BITTORRENT_RCLONE_DEST"))
                            rclone_flags = settings.get("RCLONE_FLAGS", "")
                            command = [
                                "rclone", "--config={}".format(rconf.name), "copy", temp_dir, rclone_dest]
                            command.extend(rclone_flags.split())
                            Ayumi.debug(
                                'Rclone command to be run: "{}"'.format(command))

                            try:
                                Ayumi.info(
                                    'Now uploading new blob to: "{}"'.format(rclone_dest))
                                rclone_res = subprocess.run(
                                    command, timeout=3600)
                                if rclone_res.returncode != 0:
                                    Ayumi.warning('Rclone returned non-zero code of {}, nacking.'.format(
                                        rclone_res.returncode), color=Ayumi.LRED)
                                    message.nack()
                            except subprocess.TimeoutExpired:
                                Ayumi.warning(
                                    'Rclone upload timed out, nacking.', color=Ayumi.LRED)
                                message.nack()
                                continue

                        # Fetch information on the file to create a job
                        new_message = rabbitpy.Message(channel, dumps(
                            {
                                "show": overload_title,
                                "episode": dl_file_new_name,
                                "filesize": int(os.path.getsize(dl_file_new_path)),
                                "sub": "SOFTSUB"
                            }
                        ))
                        acquisition_bittorrent_exchange_name = settings.get(
                            'ACQUISITION_BITTORRENT_EXCHANGE')
                        Ayumi.info('Sending to exchange: "{}"'.format(
                            acquisition_bittorrent_exchange_name), color=Ayumi.CYAN)
                        while not new_message.publish(acquisition_bittorrent_exchange_name, mandatory=True):
                            Ayumi.warning(
                                "Failed to publish feed item, trying again in 60 seconds")
                            sleep(60)
                        Ayumi.info("Published feed item with title: " +
                                   overload_title, color=Ayumi.LGREEN)

                    message.ack()

    except rabbitpy.exceptions.AMQPConnectionForced:
        Ayumi.warning(
            "Operator manually closed RabbitMQ connection, shutting down.", color=Ayumi.LYELLOW)
        return
Exemple #24
0
def _get_shows(status: str, **kwargs):
    """
    Fetch all the airing, recently finished, and soon to be airing shows
    Run string siimlarity and return the string most likely to be the same show.
    """

    shows = list()
    page = 1
    has_next_page = True

    while has_next_page:

        Ayumi.info("Now requesting shows from page {} of status {}...".format(page, status), color=Ayumi.CYAN)

        variables = {
            'page': page,
            'status': status
        }

        k_year = kwargs.get('year')
        if k_year:
            Ayumi.debug("Set seasonYear argument to {}".format(k_year), color=Ayumi.CYAN)
            variables['seasonYear'] = int(k_year)

        k_sd = kwargs.get('start_date')
        if k_sd:
            Ayumi.debug("Set startDate_lesser argument to {}".format(k_sd), color=Ayumi.CYAN)
            variables['startDate'] = int(k_sd)
        
        k_ed = kwargs.get('end_date')
        if k_ed:
            Ayumi.debug("Set endDate_greater argument to {}".format(k_ed), color=Ayumi.CYAN)
            variables['endDate'] = int(k_ed)

        try:
            ani_res = requests.post(
                ANILIST_API_URL,
                json={
                    'query': QUERY,
                    'variables': variables
                })
        except requests.exceptions.ConnectionError:
            Ayumi.warning("Unable to contact Anilist, the site or your connection may be down.", color=Ayumi.LRED)
            return shows

        if ani_res.status_code != 200:
            Ayumi.warning("Anilist returned unaccepted HTTP code {} upon request.".format(ani_res.status_code), color=Ayumi.LRED)
            return shows

        try:
            ani_json = ani_res.json()['data']['Page']
            has_next_page = ani_json['pageInfo']['hasNextPage']
            page += 1

            for media in ani_json['media']:
                for media_title in media['title'].values():
                    if media_title:
                        Ayumi.debug("Adding show {} to show list".format(media_title))
                        shows.append(media_title)

        except:
            Ayumi.warning("Unable to parse JSON response from Anilist.", color=Ayumi.LRED)
            return shows

    return shows
Exemple #25
0
def consume():
    try:
        with rabbitpy.Connection('amqp://{username}:{password}@{host}:{port}/{vhost}'.format(
            username=settings.get('RABBITMQ_USERNAME'),
            password=settings.get('RABBITMQ_PASSWORD'),
            host=settings.get('RABBITMQ_HOST'),
            port=settings.get('RABBITMQ_PORT'),
            vhost=settings.get('RABBITMQ_VHOST')
        )) as conn:
            with conn.channel() as channel:

                Ayumi.set_rabbitpy_channel(channel)

                queue = rabbitpy.Queue(channel, settings.get('DISTRIBUTORS_RCLONE_QUEUE'))
                queue.declare(passive=True)

                Ayumi.info("Now listening for messages from AMQP provider.", color=Ayumi.YELLOW)

                for message in queue.consume(prefetch=1):
                    try:
                        job = json.loads(message.body.decode('utf-8'))
                    except json.JSONDecodeError:
                        Ayumi.warning("Received a job that is invalid json, not processing.", color=Ayumi.LRED)
                        message.reject()
                        continue

                    Ayumi.info("Received a new job: {}".format(json.dumps(job)), color=Ayumi.CYAN)
                    if metsuke.validate(job):
                        Ayumi.debug("Loaded show: {}".format(job['show']))
                        Ayumi.debug("Loaded episode: {}".format(job['episode']))
                        Ayumi.debug("Loaded filesize: {}".format(job['filesize']))
                        Ayumi.debug("Loaded sub type: {}".format(job['sub']))

                        metsuke_job = metsuke.Job(
                            job['show'], job['episode'], job['filesize'], job['sub'])

                        with tempfile.NamedTemporaryFile(suffix=".conf", mode="w+b") as rconf, tempfile.TemporaryDirectory() as tempdir:
                            Ayumi.debug("Opening context managed rclone config file under path: {}.".format(rconf.name))
                            Ayumi.debug("Opening context managed rclone temporary directory under path: {}".format(tempdir))
                            rconf.write(str.encode(settings.get("RCLONE_CONFIG_FILE")))
                            rconf.flush()  # YOU MUST FLUSH THE FILE SO RCLONE CAN READ IT!
                            Ayumi.debug("Configurations written to temporary file. Size is {} bytes.".format(rconf.tell()))

                            dl_sources = None
                            up_dests = None
                            if job['sub'].lower() == "softsub":
                                dl_sources = settings.get("DISTRIBUTORS_RCLONE_SOFTSUB_DOWNLOAD")
                                up_dests = settings.get("DISTRIBUTORS_RCLONE_SOFTSUB_UPLOAD")
                            elif job['sub'].lower() == "hardsub":
                                dl_sources = settings.get("DISTRIBUTORS_RCLONE_HARDSUB_DOWNLOAD")
                                up_dests = settings.get("DISTRIBUTORS_RCLONE_HARDSUB_UPLOAD")

                            Ayumi.debug("Fresh fetched download sources as: {}".format(" ".join(dl_sources)))
                            Ayumi.debug("Fresh fetched upload sources as: {}".format(" ".join(up_dests)))

                            try:
                                temp_ep = shikyou.download(metsuke_job, dl_sources, tempdir, rconf.name, settings.get("RCLONE_FLAGS", ""))
                                if temp_ep:
                                    shikyou.upload(metsuke_job, up_dests, temp_ep, rconf.name, settings.get("RCLONE_FLAGS", ""))
                                else:
                                    Ayumi.warning("Unable to find requested job in any sources, nacking...", color=Ayumi.RED)
                                    message.nack()
                                    continue
                            except shikyou.ShikyouResponseException:
                                Ayumi.critical("Rclone threw an unexpected response code, rejecting.", color=Ayumi.RED)
                                message.reject()
                                continue
                            except shikyou.ShikyouTimeoutException:
                                Ayumi.warning("Rclone timed out whilhe executing, nacking.", color=Ayumi.RED)
                                message.nack()
                                continue

                        Ayumi.debug("Closed context managed rclone config file.")
                        Ayumi.debug("Closed context managed temporary directory.")

                    else:
                        Ayumi.warning("Received a job that Metsuke was not able to validate.", color=Ayumi.LRED)
                        Ayumi.warning(json.dumps(job), color=Ayumi.LRED)

                    Ayumi.info("Completed processing this message for {}".format(job['episode']), color=Ayumi.LGREEN)
                    message.ack()

    except rabbitpy.exceptions.AMQPConnectionForced:

        Ayumi.rabbitpy_channel = None
        Ayumi.critical("Operator manually closed RabbitMQ connection, shutting down.", color=Ayumi.RED)
        # Use return for now because in some cases, calling exit() may invoke the retry() header.
        return
Exemple #26
0
def _fetch_list(user, listurl, listname):
    """
    Helper method to get a user's anime list.
    Doesn't handle errors, just raises them. Needs to be called by a wrapper.

    Params:
        User: Username of MAL user
        listurl: URL to use to fetch info
        listname: Watching, Plan to Watch (for logging)
    """

    anime = list()

    try:
        jikan_res = requests.get(listurl.format(user, ""))

        # Verify status code
        if jikan_res.status_code != 200:
            Ayumi.debug(
                "jikan mode returned a bad status code on attempt to get {}'s {} list."
                .format(user, listname))
            raise Exception()

        # Make sure an anime actually exists undeer this name
        try:
            jikan_res_json = jikan_res.json()
        except:
            jikan_res_json = dict()  # Clean handling

        if 'anime' not in jikan_res_json:
            Ayumi.debug(
                "Jikan.moe did not return an anime list on attempt to get {}'s {} list."
                .format(user, listname))
            raise Exception()

        # Add all anime in the first page.
        for entry in jikan_res_json['anime']:
            anime.append(entry)
            Ayumi.debug("Added {} show {} to processing list.".format(
                listname, entry['title']))

        page = 2
        while (len(jikan_res_json['anime']) == 300):
            jikan_res = requests.get(listurl.format(user, str(page)))
            if jikan_res.status_code != 200 or 'anime' not in jikan_res:
                Ayumi.debug(
                    "Jikan returned a bad status code when attempting to get {}'s page {} {} list."
                    .format(user, str(page), listname))
                raise Exception()

            try:
                jikan_res_json = jikan_res.json()
            except:
                jikan_res_json = dict()

            if 'anime' not in jikan_res_json:
                Ayumi.debug(
                    "Jikan.moe did not return an anime list on attempt to get {}'s page {} {} list."
                    .format(user, str(page), listname))
                raise Exception()

            for entry in jikan_res_json['anime']:
                anime.append(entry)
                Ayumi.debug(
                    "Added {} page {} show {} to processing list.".format(
                        listname, str(page), entry['title']))

            page += 1

        Ayumi.debug("Returning list with {} entires.".format(str(len(anime))))
        return anime

    except:
        # raise some kind of exception - somehow Jikan couldn't be reached
        Ayumi.debug(
            "Akari encountered an unknown error when attempting to fetch {}'s {} list"
            .format(user, listname))
        raise Exception()
Exemple #27
0
def _check_equality(name1, name2) -> bool:
    Ayumi.debug("Regex comparing: {} | {}".format(name1, name2))
    try:
        # Anilist sometimes has weird leading/trailing spaces
        re_str1 = re.sub(r'[^\w]', '', name1)
        Ayumi.debug("Name 1 without puncutation: {}".format(re_str1))
        re_str2 = re.sub(r'[^\w]', '', name2)
        Ayumi.debug("Name 2 without puncutation: {}".format(re_str2))

        if re_str1 == re_str2:
            Ayumi.debug("Both show names are matching, returning True.")
            return True
        else:
            Ayumi.debug("Show names do not match, returning False.")
            return False
    except:
        Ayumi.debug(
            "Error occured while matching show names, returning False.")
        return False
Exemple #28
0
def consume():
    try:
        with rabbitpy.Connection(
                'amqp://{username}:{password}@{host}:{port}/{vhost}'.format(
                    username=settings.get('RABBITMQ_USERNAME'),
                    password=settings.get('RABBITMQ_PASSWORD'),
                    host=settings.get('RABBITMQ_HOST'),
                    port=settings.get('RABBITMQ_PORT'),
                    vhost=settings.get('RABBITMQ_VHOST'))) as conn:
            with conn.channel() as channel:

                Ayumi.set_rabbitpy_channel(channel)

                queue = rabbitpy.Queue(
                    channel,
                    settings.get('NOTIFICATIONS_DISCORD_WEBHOOK_QUEUE',
                                 'nonexistent'))
                queue.declare(passive=True)

                Ayumi.info("Now listening for messages from AMQP provider.",
                           color=Ayumi.YELLOW)

                for message in queue.consume(prefetch=1):

                    try:
                        job = json.loads(message.body.decode('utf-8'))
                    except json.JSONDecodeError:
                        Ayumi.warning(
                            "Received a job that is invalid json, not processing.",
                            color=Ayumi.LRED)
                        message.reject()
                        continue

                    Ayumi.info("Received a new job: {}".format(
                        json.dumps(job)),
                               color=Ayumi.CYAN)
                    if metsuke.validate(job):
                        Ayumi.debug("Loaded show: {}".format(job['show']))
                        Ayumi.debug("Loaded episode: {}".format(
                            job['episode']))
                        Ayumi.debug("Loaded filesize: {}".format(
                            job['filesize']))
                        Ayumi.debug("Loaded sub type: {}".format(job['sub']))

                        embed = _generate_embed(metsuke.generate(job),
                                                hisha.search(job['show']))
                        Ayumi.info(
                            "Beginning sending embeds to webhook endpoints.",
                            color=Ayumi.CYAN)
                        for endpoint in settings.get(
                                'NOTIFICATIONS_DISCORD_WEBHOOK_ENDPOINTS'
                        ).to_list():
                            try:
                                requests.post(endpoint, json=embed, timeout=5)
                                Ayumi.debug(
                                    "Sent embed to {}".format(endpoint))
                            except:
                                Ayumi.warning(
                                    "Failed to send embed to {}".format(
                                        endpoint),
                                    color=Ayumi.RED)

                    else:
                        Ayumi.warning(
                            "Received a job that Metsuke was not able to validate.",
                            color=Ayumi.LRED)
                        Ayumi.warning(json.dumps(job), color=Ayumi.LRED)

                    Ayumi.info(
                        "Completed processing this message for {}".format(
                            job['episode']),
                        color=Ayumi.LGREEN)
                    message.ack()

    except rabbitpy.exceptions.AMQPConnectionForced:
        Ayumi.rabbitpy_channel = None
        Ayumi.critical(
            "Operator manually closed RabbitMQ connection, shutting down.",
            color=Ayumi.RED)

        # Use return for now because in some cases, calling exit() may invoke the retry() header.
        return
Exemple #29
0
@retry(delay=60, tries=3, exceptions=(shikyou.ShikyouResponseException, shikyou.ShikyouTimeoutException), backoff=1.5, max_delay=3600, logger=Ayumi.get_logger())
def on_new_file(src_path, channel):

    show_name = None
    episode_name = None

    new_file = src_path.replace(os.path.commonpath([settings.get('KOTEN_WATCH_PATH', DEFAULT_WATCH_PATH), src_path]) + "/", "")

    if m := util._show_manually_specified(new_file):
        Ayumi.info("Detected show name and episode name in event, using Mode 1.")
        show_name = m.group(1)
        episode_name = util._clean_episode_name(m.group(2))
        Ayumi.info("New show name: {}".format(show_name), color=Ayumi.LYELLOW)
        Ayumi.info("New episode name: {}".format(episode_name), color=Ayumi.LYELLOW)
    else:
        Ayumi.debug("Non-conformant episode provided, using Naomi to find show name.")
        episode_name = util._clean_episode_name(pathlib.PurePath(src_path).name)
        show_name = naomi.find_closest_title(anitopy.parse(new_file)['anime_title'])
        Ayumi.info("New show name: {}".format(show_name), color=Ayumi.LYELLOW)
        Ayumi.info("New episode name: {}".format(episode_name), color=Ayumi.LYELLOW)

        # There is an event where Anilist is down, and Naomi could return None.
        # In this case, use the assumed-parsed show as the title
        if not show_name:
            show_name = anitopy.parse(new_file)['anime_title']

    job = {
        "show": show_name,
        "episode": episode_name,
        "filesize": os.path.getsize(src_path),
        "sub": "softsub"