Exemplo n.º 1
0
def find_closest_title(title):
    """
    Finds the closest title from Anilist for Airing and Not_Yet_Released shows
    """
    now = datetime.datetime.now()
    date_next_month = int((now + datetime.timedelta(weeks=4)).strftime("%Y%m%d"))
    date_last_month = int((now - datetime.timedelta(weeks=4)).strftime("%Y%m%d"))
    shows = list()
    heap = list()

    shows.extend(_get_shows("RELEASING"))
    shows.extend(_get_shows("NOT_YET_RELEASED", start_date=date_next_month))
    shows.extend(_get_shows("FINISHED", end_date=date_last_month))

    for show in shows:
        ratio = _similarity(title, show)
        Ayumi.debug('Matched "{}" against "{}" for a ratio of {}'.format(title, show, ratio))
        heapq.heappush(heap, (ratio, show))

    top_5 = heapq.nlargest(5, heap)
    Ayumi.info("Displaying (up to) top 5 matches of {} results:".format(len(heap)), color=Ayumi.LBLUE)
    for top in top_5:
        Ayumi.info("{}: {}".format(top[1], top[0]), color=Ayumi.LBLUE)

    if top_5:
        Ayumi.info('Returning top match: {}'.format(top_5[0][1]), color=Ayumi.LGREEN)
        return top_5[0][1]
    else:
        Ayumi.warning("No shows were fetched by Naomi, returning None", color=Ayumi.LYELLOW)
        return None
Exemplo n.º 2
0
def _check_exists(config: str, source: str, job: Job) -> bool:
    """
    Checks if the jobs exists under the source.
    Note: Does not validate under which path, just validates that jobs exists somewhere in:
    source/(...probably job.show)/job.episode
    """
    try:
        response = _run([
            "rclone", "--config={}".format(config), "lsjson", "-R",
            "{}/{}/".format(source, job.show)
        ])
        episode_list = loads(response.stdout.decode('utf-8'))
        for episode in episode_list:
            Ayumi.debug("Checking {} against episode {}".format(
                job.episode, episode['Name']))
            if episode['Name'] == job.episode:
                Ayumi.info("Found episode {} in {}".format(
                    job.episode, source))
                return True
        Ayumi.info("Didn't find episode {} in {}".format(job.episode, source))
        return False
    except:
        # Typically hit if the source doesn't exist.
        Ayumi.warning(
            "Error occured while checking source {} - does it exist?".format(
                source))
        return False
Exemplo n.º 3
0
def _get_main_studio_info(studios):
    """
    Goes through the studio edges and returns the main (studio name, siteurl)

    Params:
        studios - The studios body from the Anilist GraphQL json response

    Returns: A tuple (studio name: str, site url: str), or None if not found
    """
    try:
        edges = studios['edges']
        for edge in edges:
            Ayumi.debug("Checking edge {}".format(edge['node']['name']))
            if edge['isMain']:
                Ayumi.debug("Found main studio edge, returning tuple")
                node = edge['node']
                return (node['name'], node['siteUrl'])
        # If a main studio isn't found, return None
        Ayumi.debug("Didn't find any main studio edge, returning None")
        return (None, None)
    except Exception as e:
        Ayumi.warning(
            "Didn't find any main studio edge due to error, returning None")
        Ayumi.warning(e)
        return (None, None)
Exemplo n.º 4
0
def observe():
    try:
        with rabbitpy.Connection('amqp://{username}:{password}@{host}:{port}/{vhost}'.format(
            username=settings.get('RABBITMQ_USERNAME'),
            password=settings.get('RABBITMQ_PASSWORD'),
            host=settings.get('RABBITMQ_HOST'),
            port=settings.get('RABBITMQ_PORT'),
            vhost=settings.get('RABBITMQ_VHOST')
        )) as conn:
            with conn.channel() as channel:

                channel.enable_publisher_confirms()
                Ayumi.set_rabbitpy_channel(channel)
                Ayumi.info("Now connected AMQP provider.", color=Ayumi.GREEN)

                event_handler = IzumiHandler(channel)
                observer = Observer()
                observer.schedule(event_handler, settings.get('KOTEN_WATCH_PATH', DEFAULT_WATCH_PATH), recursive=True)
                observer.start()
                Ayumi.info("Now observing: {}".format(settings.get('KOTEN_WATCH_PATH', DEFAULT_WATCH_PATH)), color=Ayumi.BLUE)

                try:
                    while True:
                        time.sleep(settings.get('KOTEN_SLEEP_INTERVAL', DEFAULT_SLEEP_INTERVAL))
                except:
                    Ayumi.warning("Detected SIGKILL or error, returning...", color=Ayumi.YELLOW)
                    observer.stop()
                observer.join()

    except rabbitpy.exceptions.AMQPConnectionForced:

        Ayumi.rabbitpy_channel = None
        Ayumi.critical("Operator manually closed RabbitMQ connection, shutting down.", color=Ayumi.RED)
        # Use return for now because in some cases, calling exit() may invoke the retry() header.
        return
Exemplo n.º 5
0
def _add_list_entries(list_name, list_json):
    """
    Helper method to add all list entries into a medialist (watching/paused/ptw)
    Params:
        list_name: the name that the list appears to be from Anilist ("Watching")
        list_json: anilist's raw api response (json format) {'data':'MediaListCollection'}

    Returns: A list with populated Anilist Media entries (a list of dicts)
    """
    try:

        entries = list()

        media_lists = list_json['data']['MediaListCollection']['lists']
        for media_list in media_lists:
            if list_name.lower() == media_list['name'].lower():
                for entry in media_list['entries']:
                    entries.append(entry['media'])

        return entries

    except:
        Ayumi.warning("Kishi was unable to process list entries for {}".format(
            list_name))
        raise Exception()
Exemplo n.º 6
0
def _load_amqp_message_body(message) -> Dict:
    try:
        feeditem = loads(message.body.decode('utf-8'))
        return feeditem
    except JSONDecodeError:
        Ayumi.warning(
            "Received an AMQP message that is invalid JSON, will not process...", color=Ayumi.RED)
        return None
Exemplo n.º 7
0
def download(job: Job, sources: List[str], tempfolder: str, config: str,
             flags: str) -> str:
    """
    Download the provided episode from sources
    Returns the path of the downloaded file
    job: Job to do!
    sources: list of rclone sources (EncoderConf.downloading_sources)
    tmppath: Path of the temporary folder
    rclone_config: Path to the rclone config file
    flags: rclone flags
    """
    for source in sources:
        Ayumi.debug("Checking for existence from source: {}".format(source))
        if _check_exists(config, source, job):
            Ayumi.info(
                "Now downloading episode from source: {}".format(source))

            src_file = "{}/{}/{}".format(_clean(source), job.show, job.episode)
            Ayumi.debug("Sourcing from rclone path: {}".format(src_file))
            dest_file = "{}/{}".format(_clean(tempfolder), "temp")
            Ayumi.debug("Downloading to destination: {}".format(dest_file))

            command = [
                "rclone", "--config={}".format(config), "copyto", src_file,
                dest_file
            ]
            command.extend(flags.split())

            Ayumi.debug("Now running command: {}".format(" ".join(command)))
            Ayumi.info("Now starting download.", color=Ayumi.LCYAN)

            try:
                _run(command)
            except ShikyouResponseException:
                Ayumi.error(
                    "Rclone command returned a bad return code, contact the developer.",
                    color=Ayumi.LRED)
                raise ShikyouResponseException()
            except ShikyouTimeoutException:
                Ayumi.error("Rclone command timed out, are your sources okay?",
                            color=Ayumi.LRED)
                raise ShikyouTimeoutException()

            Ayumi.info("Completed downloading files.", color=Ayumi.LGREEN)
            return dest_file
        else:
            Ayumi.debug(
                "Requested episode doesn't exist under source {}".format(
                    source))

    Ayumi.warning("No download sources contained the file.")
    return None
Exemplo n.º 8
0
def is_user_watching_id(user, show_id):
    """
    Determines whether or not an Anilist user is watching a show
    Checks by show ID

    Params:
        user: username to look up
        id: id of the show to look up

    Returns: a boolean - True if watching, False if not
    """

    try:
        show_id = int(show_id)  # Get the int equivalent value of the ID
    except:
        # Why would you not pass an integer in?
        Ayumi.critical(
            "Kishi ID search requires an input that can be converted to an int. Returning FALSE"
        )
        return False

    try:

        watching, paused, ptw = _kishi_list(user)

        for show in watching:
            if show_id == show['id']:
                Ayumi.debug("Found show ID {} in {}".format(
                    show_id, "watching"))
                return True

        for show in paused:
            if show_id == show['id']:
                Ayumi.debug("Found show ID {} in {}".format(show_id, "paused"))
                return True

        for show in ptw:
            if show_id == show['id']:
                Ayumi.debug("Found show ID {} in {}".format(
                    show_id, "planning"))
                return True

        Ayumi.debug("Didn't find a match for {}".format(show_id))
        return False

    except:
        # If any errors are encountered, return True (default assumption)
        Ayumi.warning(
            "An error was encountered while contacting Anilist. Defaulting to TRUE"
        )
        return True
Exemplo n.º 9
0
def _run(command: Iterable[str]) -> str:
    # Subprocess passes signals properly to the ffmpeg child, so we can just run it as is.

    try:
        response = subprocess.run(command, capture_output=True, timeout=3600)
    except subprocess.TimeoutExpired:
        Ayumi.warning("Command expired by timeout, killing...")
        raise ShikyouResponseException()

    if response.returncode != 0:
        Ayumi.warning(
            "Command caused return code of {}, returning None.".format(
                response.returncode))
        raise ShikyouResponseException()

    return response
Exemplo n.º 10
0
def is_user_watching_names(user, show_name):
    """
    Determines whether or not an Anilist user is watching a show
    Checks by show name

    Params:
        user: username to look up
        show_name: name of the show to look up. this should already be the anilist name.

    Returns: a boolean - True if watching, False if not
    """
    try:
        watching, paused, ptw = _kishi_list(user)

        for show in watching:
            for title in show['title'].values():
                if _check_equality(title, show_name):
                    Ayumi.debug("Matched {} to {} in {}".format(
                        title, show_name, "watching"))
                    return True

        for show in paused:
            for title in show['title'].values():
                if _check_equality(title, show_name):
                    Ayumi.debug("Matched {} to {} in {}".format(
                        title, show_name, "paused"))
                    return True

        for show in ptw:
            for title in show['title'].values():
                if _check_equality(title, show_name):
                    Ayumi.debug("Matched {} to {} in {}".format(
                        title, show_name, "planning"))
                    return True

        Ayumi.debug("Didn't find a match for {}".format(show_name))
        return False

    except:
        # If any errors are encountered, return True (default assumption)
        Ayumi.warning(
            "An error was encountered while contacting Anilist. Defaulting to TRUE"
        )
        return True
Exemplo n.º 11
0
def _query_request(query, search, status) -> dict:
    """
    Makes requests to Anlist, returns response in JSON.

    Query: One of the Queries objects.
    Search: Name of show to search for.
    Status: Status of the show to search for.
    """
    try:
        Ayumi.debug(
            "Making request to {}, searching for {} under status {}".format(
                ANILIST_API_URL, search, status))
        ani_res = requests.post(ANILIST_API_URL,
                                json={
                                    'query': query,
                                    'variables': {
                                        'search': search,
                                        'status': status
                                    }
                                },
                                timeout=10)

        if ani_res.status_code != 200:
            Ayumi.warning(
                "Anilist returned unaccepted HTTP code {} upon request.".
                format(ani_res.status_code),
                color=Ayumi.LRED)
            raise Exception()

        # Get request response as JSON object.
        try:
            ani_json = ani_res.json()
            return ani_json['data']
        except:
            Ayumi.warning("Anilist returned a non-JSON response.",
                          color=Ayumi.LRED)
            raise Exception()

    except requests.exceptions.Timeout:
        Ayumi.warning("Request to Anilist timed out.", color=Ayumi.LRED)
        raise Exception()
    except requests.exceptions.ConnectionError:
        Ayumi.warning("Unable to contact Anilist, maybe it's down?",
                      color=Ayumi.LRED)
        raise Exception()
Exemplo n.º 12
0
def _kitsu_basic_search(title):
    """Quick Kitsu implementation"""
    title_lower = title.lower()
    request_url = requests.utils.requote_uri(KITSU_API_URL + title_lower)
    Ayumi.debug("Created Kitsu request URL: {}".format(request_url))

    try:
        kitsu_res = requests.get(request_url, timeout=10)

        try:
            kitsu_json = kitsu_res.json()
            return kitsu_json
        except:
            Ayumi.warning("Kitsu did not return a valid JSON response.",
                          color=Ayumi.RED)
            raise Exception()
    except requests.exceptions.Timeout:
        Ayumi.warning("Kitsu request timed out.", color=Ayumi.LRED)
        raise Exception()
    except requests.exceptions.ConnectionError:
        Ayumi.warning("Unable to contact Kitsu, maybe it's down?",
                      color=Ayumi.LRED)
        raise Exception()
Exemplo n.º 13
0
def consume():
    try:
        with rabbitpy.Connection(
                'amqp://{username}:{password}@{host}:{port}/{vhost}'.format(
                    username=settings.get('RABBITMQ_USERNAME'),
                    password=settings.get('RABBITMQ_PASSWORD'),
                    host=settings.get('RABBITMQ_HOST'),
                    port=settings.get('RABBITMQ_PORT'),
                    vhost=settings.get('RABBITMQ_VHOST'))) as conn:
            with conn.channel() as channel:

                Ayumi.set_rabbitpy_channel(channel)

                queue = rabbitpy.Queue(
                    channel,
                    settings.get('NOTIFICATIONS_DISCORD_WEBHOOK_QUEUE',
                                 'nonexistent'))
                queue.declare(passive=True)

                Ayumi.info("Now listening for messages from AMQP provider.",
                           color=Ayumi.YELLOW)

                for message in queue.consume(prefetch=1):

                    try:
                        job = json.loads(message.body.decode('utf-8'))
                    except json.JSONDecodeError:
                        Ayumi.warning(
                            "Received a job that is invalid json, not processing.",
                            color=Ayumi.LRED)
                        message.reject()
                        continue

                    Ayumi.info("Received a new job: {}".format(
                        json.dumps(job)),
                               color=Ayumi.CYAN)
                    if metsuke.validate(job):
                        Ayumi.debug("Loaded show: {}".format(job['show']))
                        Ayumi.debug("Loaded episode: {}".format(
                            job['episode']))
                        Ayumi.debug("Loaded filesize: {}".format(
                            job['filesize']))
                        Ayumi.debug("Loaded sub type: {}".format(job['sub']))

                        embed = _generate_embed(metsuke.generate(job),
                                                hisha.search(job['show']))
                        Ayumi.info(
                            "Beginning sending embeds to webhook endpoints.",
                            color=Ayumi.CYAN)
                        for endpoint in settings.get(
                                'NOTIFICATIONS_DISCORD_WEBHOOK_ENDPOINTS'
                        ).to_list():
                            try:
                                requests.post(endpoint, json=embed, timeout=5)
                                Ayumi.debug(
                                    "Sent embed to {}".format(endpoint))
                            except:
                                Ayumi.warning(
                                    "Failed to send embed to {}".format(
                                        endpoint),
                                    color=Ayumi.RED)

                    else:
                        Ayumi.warning(
                            "Received a job that Metsuke was not able to validate.",
                            color=Ayumi.LRED)
                        Ayumi.warning(json.dumps(job), color=Ayumi.LRED)

                    Ayumi.info(
                        "Completed processing this message for {}".format(
                            job['episode']),
                        color=Ayumi.LGREEN)
                    message.ack()

    except rabbitpy.exceptions.AMQPConnectionForced:
        Ayumi.rabbitpy_channel = None
        Ayumi.critical(
            "Operator manually closed RabbitMQ connection, shutting down.",
            color=Ayumi.RED)

        # Use return for now because in some cases, calling exit() may invoke the retry() header.
        return
Exemplo n.º 14
0
def sig_handler(sig, frame):
    Ayumi.warning("SIG command {} detected, exiting...".format(sig),
                  color=Ayumi.LRED)
    sys.exit()
Exemplo n.º 15
0
def rss(last_guid=None):

    try:
        with rabbitpy.Connection(
                'amqp://{username}:{password}@{host}:{port}/{vhost}'.format(
                    username=settings.get_fresh('RABBITMQ_USERNAME'),
                    password=settings.get_fresh('RABBITMQ_PASSWORD'),
                    host=settings.get_fresh('RABBITMQ_HOST'),
                    port=settings.get_fresh('RABBITMQ_PORT'),
                    vhost=settings.get_fresh('RABBITMQ_VHOST'))) as conn:
            with conn.channel() as channel:

                Ayumi.set_rabbitpy_channel(channel)
                channel.enable_publisher_confirms()

                while True:

                    Ayumi.info("Now starting feed fetch.", color=Ayumi.LCYAN)

                    feed = feedparser.parse(
                        settings.get('ACQUISITION_RSS_FEED_URL', None))
                    accepted_shows = _load_accepted_shows()
                    Ayumi.debug(
                        "Loaded accepted shows map: {}".format(accepted_shows))
                    history = _load_history()
                    new_history = list()

                    for entry in feed.entries:

                        # Fetch data first
                        title, link, guid = entry.title, entry.link, entry.guid
                        Ayumi.debug(
                            'Encountered RSS item with title "{}", and guid "{}"'
                            .format(title, guid))

                        # If feed item with last GUID encountered, do not process any further
                        if guid == last_guid:
                            Ayumi.debug(
                                "Encountered RSS item with last_guid {} matching argument, breaking and writing history."
                                .format(last_guid),
                                color=Ayumi.YELLOW)
                            break

                        # Check the title data
                        # Use the parsed title to match user provided titles.
                        parsed_title = anitopy.parse(title)['anime_title']
                        if _strip_title(parsed_title) not in accepted_shows:
                            Ayumi.info(
                                'Feed item with title "{}" (show title: "{}") is not in accepted shows, skipping.'
                                .format(title, parsed_title))
                        else:
                            if guid in history:
                                # This item has been previously processed, skip it.
                                Ayumi.info(
                                    'Feed item with title "{}" (show title: "{}") has already been processed, skipping.'
                                    .format(title, parsed_title),
                                    color=Ayumi.GREEN)
                            else:
                                # A new feeditem! Let us process it.
                                Ayumi.info(
                                    'Feed item with title "{}" (show title: "{}") is in accepted shows, processing.'
                                    .format(title, parsed_title),
                                    color=Ayumi.YELLOW)
                                message = rabbitpy.Message(
                                    channel,
                                    json.dumps({
                                        "title":
                                        title,
                                        "link":
                                        link,
                                        "guid":
                                        guid,
                                        "show_title":
                                        accepted_shows[_strip_title(
                                            parsed_title)]
                                    }))
                                acquisition_rss_exchange_name = settings.get(
                                    'ACQUISITION_RSS_EXCHANGE')
                                while not message.publish(
                                        acquisition_rss_exchange_name,
                                        mandatory=True):
                                    Ayumi.warning(
                                        'Failed to publish feed item with title "{}" to exchange "{}", retrying in 60s...'
                                        .format(title,
                                                acquisition_rss_exchange_name),
                                        color=Ayumi.RED)
                                    sleep(60)
                                Ayumi.info(
                                    'Published feed item with title "{}" to exchange "{}".'
                                    .format(
                                        title,
                                        acquisition_rss_exchange_name,
                                    ),
                                    color=Ayumi.LGREEN)

                            # Keep all items processed in the new history - it will be auto deleted by the expiry of the RSS
                            Ayumi.debug(
                                'Appending item "{}" with title "{}" (show title: "{}") to new_history for write.'
                                .format(guid, title, parsed_title),
                                color=Ayumi.YELLOW)
                            new_history.append(guid)

                    _write_history(new_history)

                    # Sleep till the next iteration
                    sleep_duration = settings.get(
                        'ACQUISITION_RSS_SLEEP_INTERVAL',
                        _DEFAULT_SLEEP_INTERVAL)
                    Ayumi.info(
                        "Now sleeping {} seconds.".format(sleep_duration),
                        color=Ayumi.LCYAN)
                    sleep(sleep_duration)

    except rabbitpy.exceptions.AMQPConnectionForced:
        Ayumi.rabbitpy_channel = None
        Ayumi.critical(
            "Operator manually closed RabbitMQ connection, shutting down.",
            color=Ayumi.RED)
        # Use return for now because in some cases, calling exit() may invoke the retry() header.
        return
Exemplo n.º 16
0
def consume():
    try:
        with rabbitpy.Connection('amqp://{username}:{password}@{host}:{port}/{vhost}'.format(
            username=settings.get('RABBITMQ_USERNAME'),
            password=settings.get('RABBITMQ_PASSWORD'),
            host=settings.get('RABBITMQ_HOST'),
            port=settings.get('RABBITMQ_PORT'),
            vhost=settings.get('RABBITMQ_VHOST')
        )) as conn:
            with conn.channel() as channel:

                Ayumi.set_rabbitpy_channel(channel)

                queue = rabbitpy.Queue(channel, settings.get('DISTRIBUTORS_RCLONE_QUEUE'))
                queue.declare(passive=True)

                Ayumi.info("Now listening for messages from AMQP provider.", color=Ayumi.YELLOW)

                for message in queue.consume(prefetch=1):
                    try:
                        job = json.loads(message.body.decode('utf-8'))
                    except json.JSONDecodeError:
                        Ayumi.warning("Received a job that is invalid json, not processing.", color=Ayumi.LRED)
                        message.reject()
                        continue

                    Ayumi.info("Received a new job: {}".format(json.dumps(job)), color=Ayumi.CYAN)
                    if metsuke.validate(job):
                        Ayumi.debug("Loaded show: {}".format(job['show']))
                        Ayumi.debug("Loaded episode: {}".format(job['episode']))
                        Ayumi.debug("Loaded filesize: {}".format(job['filesize']))
                        Ayumi.debug("Loaded sub type: {}".format(job['sub']))

                        metsuke_job = metsuke.Job(
                            job['show'], job['episode'], job['filesize'], job['sub'])

                        with tempfile.NamedTemporaryFile(suffix=".conf", mode="w+b") as rconf, tempfile.TemporaryDirectory() as tempdir:
                            Ayumi.debug("Opening context managed rclone config file under path: {}.".format(rconf.name))
                            Ayumi.debug("Opening context managed rclone temporary directory under path: {}".format(tempdir))
                            rconf.write(str.encode(settings.get("RCLONE_CONFIG_FILE")))
                            rconf.flush()  # YOU MUST FLUSH THE FILE SO RCLONE CAN READ IT!
                            Ayumi.debug("Configurations written to temporary file. Size is {} bytes.".format(rconf.tell()))

                            dl_sources = None
                            up_dests = None
                            if job['sub'].lower() == "softsub":
                                dl_sources = settings.get("DISTRIBUTORS_RCLONE_SOFTSUB_DOWNLOAD")
                                up_dests = settings.get("DISTRIBUTORS_RCLONE_SOFTSUB_UPLOAD")
                            elif job['sub'].lower() == "hardsub":
                                dl_sources = settings.get("DISTRIBUTORS_RCLONE_HARDSUB_DOWNLOAD")
                                up_dests = settings.get("DISTRIBUTORS_RCLONE_HARDSUB_UPLOAD")

                            Ayumi.debug("Fresh fetched download sources as: {}".format(" ".join(dl_sources)))
                            Ayumi.debug("Fresh fetched upload sources as: {}".format(" ".join(up_dests)))

                            try:
                                temp_ep = shikyou.download(metsuke_job, dl_sources, tempdir, rconf.name, settings.get("RCLONE_FLAGS", ""))
                                if temp_ep:
                                    shikyou.upload(metsuke_job, up_dests, temp_ep, rconf.name, settings.get("RCLONE_FLAGS", ""))
                                else:
                                    Ayumi.warning("Unable to find requested job in any sources, nacking...", color=Ayumi.RED)
                                    message.nack()
                                    continue
                            except shikyou.ShikyouResponseException:
                                Ayumi.critical("Rclone threw an unexpected response code, rejecting.", color=Ayumi.RED)
                                message.reject()
                                continue
                            except shikyou.ShikyouTimeoutException:
                                Ayumi.warning("Rclone timed out whilhe executing, nacking.", color=Ayumi.RED)
                                message.nack()
                                continue

                        Ayumi.debug("Closed context managed rclone config file.")
                        Ayumi.debug("Closed context managed temporary directory.")

                    else:
                        Ayumi.warning("Received a job that Metsuke was not able to validate.", color=Ayumi.LRED)
                        Ayumi.warning(json.dumps(job), color=Ayumi.LRED)

                    Ayumi.info("Completed processing this message for {}".format(job['episode']), color=Ayumi.LGREEN)
                    message.ack()

    except rabbitpy.exceptions.AMQPConnectionForced:

        Ayumi.rabbitpy_channel = None
        Ayumi.critical("Operator manually closed RabbitMQ connection, shutting down.", color=Ayumi.RED)
        # Use return for now because in some cases, calling exit() may invoke the retry() header.
        return
Exemplo n.º 17
0
def bittorrent():
    try:
        with rabbitpy.Connection('amqp://{username}:{password}@{host}:{port}/{vhost}'.format(
            username=settings.get('RABBITMQ_USERNAME'),
            password=settings.get('RABBITMQ_PASSWORD'),
            host=settings.get('RABBITMQ_HOST'),
            port=settings.get('RABBITMQ_PORT'),
            vhost=settings.get('RABBITMQ_VHOST')
        )) as conn:
            with conn.channel() as channel:

                Ayumi.set_rabbitpy_channel(channel)
                channel.enable_publisher_confirms()

                queue_name = settings.get('ACQUISITION_BITTORRENT_QUEUE')
                Ayumi.debug("Connecting to queue: {}".format(queue_name))
                queue = rabbitpy.Queue(channel, queue_name)
                queue.declare(passive=True)

                Ayumi.info('Now listening for messages on queue: {}...'.format(
                    queue_name), color=Ayumi.LYELLOW)

                for message in queue.consume(prefetch=1):

                    Ayumi.info(
                        "Received new message, starting...", color=Ayumi.CYAN)

                    feeditem_preprocess = _load_amqp_message_body(message)
                    Ayumi.debug('Loaded message raw: "{}"'.format(
                        feeditem_preprocess))
                    if not feeditem_preprocess or not metsuke.validate_feeditem(feeditem_preprocess):
                        Ayumi.error('Invalid message received, rejecting. Output: "{}"'.format(
                            feeditem_preprocess), color=Ayumi.RED)
                        message.reject()
                        continue

                    # Load initial data
                    feeditem: metsuke.FeedItem = metsuke.generate_feeditem(
                        feeditem_preprocess)
                    shows_map = _load_shows_map()
                    overload_title = feeditem.show_title
                    Ayumi.info(
                        'Setting overload title: "{}"'.format(overload_title))
                    # If there is a central override, use it instead.
                    if _strip_title(anitopy.parse(feeditem.title)['anime_title']) in shows_map:
                        central_overload_title = shows_map[_strip_title(
                            feeditem.title)]
                        Ayumi.info('Overwriting overload title with central overload title: "{}"'.format(
                            central_overload_title))
                        overload_title = central_overload_title

                    with tempfile.TemporaryDirectory() as temp_dir:

                        Ayumi.debug(
                            'Created temporary directory under path: "{}"'.format(temp_dir))

                        # Download the episode
                        try:
                            res = subprocess.run(
                                [
                                    "aria2c",
                                    "--seed-time=0",
                                    "--rpc-save-upload-metadata=false",
                                    "--bt-save-metadata=false",
                                    "--dir={}".format(temp_dir),
                                    feeditem.link
                                ]
                            )
                            if res.returncode != 0:
                                Ayumi.warning(
                                    "Aria2 did not return a 0 exit code, assuming download errored and nacking.", color=Ayumi.RED)
                                message.nack()
                                continue
                        except subprocess.TimeoutExpired:
                            Ayumi.warning(
                                "Download via webtorrent timed out - nacking.", color=Ayumi.RED)
                            message.nack()
                            continue

                        if res.returncode != 0:
                            Ayumi.warning(
                                "Webtorrent did not have a return code of 0, nacking.", color=Ayumi.RED)
                            message.nack()
                            continue

                        # Rename it
                        potential_files = [f for f in os.listdir(
                            temp_dir) if f.endswith(".mkv")]
                        Ayumi.debug(
                            "Loaded potential files: {}".format(potential_files))
                        if len(potential_files) != 1:
                            Ayumi.warning(
                                "Found more than one .mkv file, rejecting this job.", color=Ayumi.RED)
                            message.reject()
                            continue
                        dl_file = potential_files[0]
                        Ayumi.info('Found file: "{}"'.format(dl_file))
                        dl_file_path = os.path.abspath(
                            '{}/{}'.format(_clean_title(temp_dir), potential_files[0]))
                        Ayumi.debug(
                            'dl_file_path: "{}"'.format(dl_file_path))

                        # Remove unneeded files
                        # TODO: THIS IS A HOTFIX, CHANGE LOGIC IN B2
                        bad_files = [f for f in os.listdir(
                            temp_dir) if not f.endswith(".mkv")]
                        Ayumi.debug("Found bad files: {}".format(bad_files))
                        for bf in bad_files:
                            try:
                                Ayumi.debug("Removing bad file: {}".format(bf))
                                os.remove(
                                    '{}/{}'.format(_clean_title(temp_dir), bf))
                            except:
                                Ayumi.debug("Removing bad tree: {}".format(bf))
                                shutil.rmtree(
                                    '{}/{}'.format(_clean_title(temp_dir), bf))

                        # Move the file to proper layout with updated name
                        dl_file_new_name = _generate_new_filename(dl_file)
                        Ayumi.info('Generated new episode name: "{}"'.format(
                            dl_file_new_name))
                        dl_file_new_dir = "{}/{}".format(
                            temp_dir, overload_title)
                        Ayumi.debug(
                            'dl_file_new_dir: "{}"'.format(dl_file_new_dir))
                        dl_file_new_path = "{}/{}".format(
                            dl_file_new_dir, dl_file_new_name)
                        Ayumi.debug(
                            'dl_file_new_path: "{}"'.format(
                                dl_file_new_path))
                        Ayumi.debug('Moving "{}" to "{}"'.format(
                            dl_file_path, dl_file_new_path))
                        os.mkdir(dl_file_new_dir)
                        shutil.move(dl_file_path, dl_file_new_path)

                        # Upload the file to rclone destination
                        with tempfile.NamedTemporaryFile(suffix=".conf", mode="w+b") as rconf:
                            rconf.write(str.encode(
                                settings.get("RCLONE_CONFIG_FILE")))
                            rconf.flush()
                            Ayumi.debug(
                                'Created temporary rclone file under path: "{}"'.format(rconf.name))
                            rclone_dest = _clean_title(settings.get(
                                "ACQUISITION_BITTORRENT_RCLONE_DEST"))
                            rclone_flags = settings.get("RCLONE_FLAGS", "")
                            command = [
                                "rclone", "--config={}".format(rconf.name), "copy", temp_dir, rclone_dest]
                            command.extend(rclone_flags.split())
                            Ayumi.debug(
                                'Rclone command to be run: "{}"'.format(command))

                            try:
                                Ayumi.info(
                                    'Now uploading new blob to: "{}"'.format(rclone_dest))
                                rclone_res = subprocess.run(
                                    command, timeout=3600)
                                if rclone_res.returncode != 0:
                                    Ayumi.warning('Rclone returned non-zero code of {}, nacking.'.format(
                                        rclone_res.returncode), color=Ayumi.LRED)
                                    message.nack()
                            except subprocess.TimeoutExpired:
                                Ayumi.warning(
                                    'Rclone upload timed out, nacking.', color=Ayumi.LRED)
                                message.nack()
                                continue

                        # Fetch information on the file to create a job
                        new_message = rabbitpy.Message(channel, dumps(
                            {
                                "show": overload_title,
                                "episode": dl_file_new_name,
                                "filesize": int(os.path.getsize(dl_file_new_path)),
                                "sub": "SOFTSUB"
                            }
                        ))
                        acquisition_bittorrent_exchange_name = settings.get(
                            'ACQUISITION_BITTORRENT_EXCHANGE')
                        Ayumi.info('Sending to exchange: "{}"'.format(
                            acquisition_bittorrent_exchange_name), color=Ayumi.CYAN)
                        while not new_message.publish(acquisition_bittorrent_exchange_name, mandatory=True):
                            Ayumi.warning(
                                "Failed to publish feed item, trying again in 60 seconds")
                            sleep(60)
                        Ayumi.info("Published feed item with title: " +
                                   overload_title, color=Ayumi.LGREEN)

                    message.ack()

    except rabbitpy.exceptions.AMQPConnectionForced:
        Ayumi.warning(
            "Operator manually closed RabbitMQ connection, shutting down.", color=Ayumi.LYELLOW)
        return
Exemplo n.º 18
0
def _get_shows(status: str, **kwargs):
    """
    Fetch all the airing, recently finished, and soon to be airing shows
    Run string siimlarity and return the string most likely to be the same show.
    """

    shows = list()
    page = 1
    has_next_page = True

    while has_next_page:

        Ayumi.info("Now requesting shows from page {} of status {}...".format(page, status), color=Ayumi.CYAN)

        variables = {
            'page': page,
            'status': status
        }

        k_year = kwargs.get('year')
        if k_year:
            Ayumi.debug("Set seasonYear argument to {}".format(k_year), color=Ayumi.CYAN)
            variables['seasonYear'] = int(k_year)

        k_sd = kwargs.get('start_date')
        if k_sd:
            Ayumi.debug("Set startDate_lesser argument to {}".format(k_sd), color=Ayumi.CYAN)
            variables['startDate'] = int(k_sd)
        
        k_ed = kwargs.get('end_date')
        if k_ed:
            Ayumi.debug("Set endDate_greater argument to {}".format(k_ed), color=Ayumi.CYAN)
            variables['endDate'] = int(k_ed)

        try:
            ani_res = requests.post(
                ANILIST_API_URL,
                json={
                    'query': QUERY,
                    'variables': variables
                })
        except requests.exceptions.ConnectionError:
            Ayumi.warning("Unable to contact Anilist, the site or your connection may be down.", color=Ayumi.LRED)
            return shows

        if ani_res.status_code != 200:
            Ayumi.warning("Anilist returned unaccepted HTTP code {} upon request.".format(ani_res.status_code), color=Ayumi.LRED)
            return shows

        try:
            ani_json = ani_res.json()['data']['Page']
            has_next_page = ani_json['pageInfo']['hasNextPage']
            page += 1

            for media in ani_json['media']:
                for media_title in media['title'].values():
                    if media_title:
                        Ayumi.debug("Adding show {} to show list".format(media_title))
                        shows.append(media_title)

        except:
            Ayumi.warning("Unable to parse JSON response from Anilist.", color=Ayumi.LRED)
            return shows

    return shows
Exemplo n.º 19
0
    # Send the notification to RabbitMQ
    # Because of threading reasons the publishing could fail, but we're not concerned with guaranteeing it.
    try:
        message = rabbitpy.Message(
            channel,
            job,
            properties={
                "content_type": "application/json",
                "delivery_mode": 2
            })

        if message.publish(settings.get('KOTEN_EXCHANGE')):
            Ayumi.info("Job successfully published to RabbitMQ", color=Ayumi.LGREEN)
        else:
            Ayumi.warning("Job unsuccessfully published to RabbitMQ", color=Ayumi.LRED)
            raise Exception()

    except:
        # RabbitPy could have timed out, etc. at this point
        Ayumi.warning("Some kind of error occured when attempting to publish to RabbitMQ, printing json body.", color=Ayumi.LYELLOW)
        Ayumi.warning(str(job))

    if settings.get('KOTEN_CLEANUP'):
        Ayumi.info("Cleanup mode enabled, removing original file.", color=Ayumi.LYELLOW)

        # Two cleanup modes - only the file itself, or possibly also the parent folder.
        if util._show_manually_specified(new_file):
            Ayumi.info("Mode 1 detected, removing file and parent directory if allowed.")
            p = pathlib.Path(src_path)
            p.unlink()