示例#1
0
    def __init__(self, media_tweet: TweetCache, trigger: str = TRIGGER_SELF):
        self._log = logging.getLogger(__name__)
        self._trigger = trigger
        self.tweet_cache = media_tweet
        self.media = TweetManager.extract_media(media_tweet.tweet) or []
        self._downloads_enabled = config.getboolean('SauceNao', 'download_files', fallback=False)
        self._previews_enabled = config.getboolean('TraceMoe', 'enabled', fallback=False)

        # SauceNao
        self.minsim_mentioned = float(config.get('SauceNao', 'min_similarity_mentioned', fallback=50.0))
        self.minsim_monitored = float(config.get('SauceNao', 'min_similarity_monitored', fallback=65.0))
        self.minsim_searching = float(config.get('SauceNao', 'min_similarity_searching', fallback=70.0))
        self.persistent = config.getboolean('Twitter', 'enable_persistence', fallback=False)
        self.anime_link = config.get('SauceNao', 'source_link', fallback='anidb').lower()
        self.sauce = SauceNao(
                api_key=config.get('SauceNao', 'api_key', fallback=None),
                min_similarity=min(self.minsim_mentioned, self.minsim_monitored, self.minsim_searching),
                priority=[21, 22, 5, 37, 25]
        )

        # Twython
        self.twython = Twython(config.get('Twitter', 'consumer_key'), config.get('Twitter', 'consumer_secret'),
                               config.get('Twitter', 'access_token'), config.get('Twitter', 'access_secret'))

        self._sauce_cache = {}
示例#2
0
    def __init__(self):
        self.enabled  = config.getboolean('Pixiv', 'enabled', fallback=False)
        self._refresh_token = config.get('Pixiv', 'refresh_token', fallback=None)
        self._log = logging.getLogger(__name__)

        self._pixiv = AppPixivAPI()
        self._pixiv.set_accept_language(config.get('Pixiv', 'language', fallback='en-US'))

        self._re_twitter = re.compile(r'^https?://(www.)?twitter.com/(?P<username>.+)$')

        if self.enabled:
            self._login()
示例#3
0
    def __init__(self):
        self.log = logging.getLogger(__name__)

        # Tweet Cache Manager
        self.twitter = TweetManager()
        self.twython = Twython(config.get('Twitter', 'consumer_key'),
                               config.get('Twitter', 'consumer_secret'),
                               config.get('Twitter', 'access_token'),
                               config.get('Twitter', 'access_secret'))

        self.anime_link = config.get('SauceNao',
                                     'source_link',
                                     fallback='anidb').lower()

        self.nsfw_previews = config.getboolean('TraceMoe',
                                               'nsfw_previews',
                                               fallback=False)
        self.failed_responses = config.getboolean('SauceNao',
                                                  'respond_to_failed',
                                                  fallback=True)
        self.ignored_indexes = [
            int(i) for i in config.get(
                'SauceNao', 'ignored_indexes', fallback='').split(',')
        ]

        # Pixiv
        self.pixiv = Pixiv()

        # Cache some information about ourselves
        self.my = api.me()
        self.log.info(f"Connected as: {self.my.screen_name}")

        # Image URL's are md5 hashed and cached here to prevent duplicate API queries. This is cleared every 24-hours.
        # I'll update this in the future to use a real caching mechanism (database or redis)
        self._cached_results = {}

        # A cached list of ID's for parent posts we've already processed
        # Used in the check_monitored() method to prevent re-posting sauces when posts are re-tweeted
        self._posts_processed = []

        # The ID cutoff, we populate this once via an initial query at startup
        try:
            self.mention_id = tweepy.Cursor(api.mentions_timeline,
                                            tweet_mode='extended',
                                            count=1).items(1).next().id
        except StopIteration:
            self.mention_id = 0

        try:
            self.self_id = tweepy.Cursor(api.user_timeline,
                                         tweet_mode='extended',
                                         count=1).items(1).next().id
        except StopIteration:
            self.self_id = 0

        self.monitored_since = {}
示例#4
0
    async def check_monitored(self) -> None:
        """
        Checks monitored accounts for any new tweets
        Returns:
            None
        """
        monitored_accounts = str(config.get('Twitter', 'monitored_accounts'))
        if not monitored_accounts:
            return

        monitored_accounts = [a.strip() for a in monitored_accounts.split(',')]

        for account in monitored_accounts:
            # Have we fetched a tweet for this account yet?
            if account not in self.monitored_since:
                # If not, get the last tweet ID from this account and wait for the next post
                tweet = next(
                    tweepy.Cursor(api.user_timeline,
                                  account,
                                  page=1,
                                  tweet_mode='extended').items())
                self.monitored_since[account] = tweet.id
                self.log.info(
                    f"[{account}] Monitoring tweets after {tweet.id}")
                continue

            # Get all tweets since our last check
            self.log.info(
                f"[{account}] Retrieving tweets since {self.monitored_since[account]}"
            )
            tweets = [
                *tweepy.Cursor(api.user_timeline,
                               account,
                               since_id=self.monitored_since[account],
                               tweet_mode='extended').items()
            ]
            self.log.info(f"[{account}] {len(tweets)} tweets found")
            for tweet in tweets:
                try:
                    # Update the ID cutoff before attempting to parse the tweet
                    self.monitored_since[account] = max(
                        [self.monitored_since[account], tweet.id])

                    # Make sure this isn't a comment / reply
                    if tweet.in_reply_to_status_id:
                        self.log.info(
                            f"[{account}] Tweet is a reply/comment; ignoring")
                        continue

                    # Make sure we haven't already processed this post
                    if tweet.id in self._posts_processed:
                        self.log.info(
                            f"[{account}] Post has already been processed; ignoring"
                        )
                        continue
                    self._posts_processed.append(tweet.id)

                    # Make sure this isn't a re-tweet
                    if 'RT @' in tweet.full_text or hasattr(
                            tweet, 'retweeted_status'):
                        self.log.info(f"[{account}] Retweeted post; ignoring")
                        continue

                    original_cache, media_cache, media = self.get_closest_media(
                        tweet, account)
                    self.log.info(
                        f"[{account}] Found new media post in tweet {tweet.id}: {media[0]}"
                    )

                    # Get the sauce
                    sauce_cache = await self.get_sauce(
                        media_cache,
                        log_index=account,
                        trigger=TRIGGER_MONITORED)
                    sauce = sauce_cache.sauce

                    self.log.info(
                        f"[{account}] Found {sauce.index} sauce for tweet {tweet.id}"
                        if sauce else
                        f"[{account}] Failed to find sauce for tweet {tweet.id}"
                    )

                    await self.send_reply(tweet_cache=original_cache,
                                          media_cache=media_cache,
                                          sauce_cache=sauce_cache,
                                          requested=False)
                except TwSauceNoMediaException:
                    self.log.info(
                        f"[{account}] No sauce found for tweet {tweet.id}")
                    continue
                except Exception as e:
                    self.log.exception(
                        f"[{account}] An unknown error occurred while processing tweet {tweet.id}: {e}"
                    )
                    continue
示例#5
0
        tweepy.api.API
    """
    auth = tweepy.OAuthHandler(key, secret)
    auth.set_access_token(token, token_secret)
    _api = tweepy.API(auth,
                      wait_on_rate_limit=True,
                      wait_on_rate_limit_notify=True)

    try:
        _api.verify_credentials()
    except Exception as e:
        logger.critical("Error loading API", exc_info=True)
        raise e
    logger.info("Twitter API ready")
    return _api


api = _twitter_api(config.get('Twitter', 'consumer_key'),
                   config.get('Twitter', 'consumer_secret'),
                   config.get('Twitter', 'access_token'),
                   config.get('Twitter', 'access_secret'))

if config.has_section('TwitterReadOnly'):
    readonly_api = _twitter_api(
        config.get('TwitterReadOnly', 'consumer_key'),
        config.get('TwitterReadOnly', 'consumer_secret'),
        config.get('TwitterReadOnly', 'access_token'),
        config.get('TwitterReadOnly', 'access_secret'))
else:
    readonly_api = None
示例#6
0
# Set up logging
import logging

from twsaucenao.config import config

logLevel = getattr(logging, str(config.get('Logging', 'log_level', fallback='ERROR')).upper())
logFormat = logging.Formatter("[%(asctime)s] %(levelname)s: %(message)s", "%Y-%m-%d %H:%M:%S")

log = logging.getLogger('twsaucenao')
log.setLevel(logLevel)

ch = logging.StreamHandler()
ch.setLevel(logLevel)
ch.setFormatter(logFormat)

log.addHandler(ch)
示例#7
0
import asyncio

from twsaucenao.config import config
from twsaucenao.log import log
from twsaucenao.models.database import TweetCache, TweetSauceCache
from twsaucenao.server import TwitterSauce

# Get our polling intervals
mentioned_interval = float(
    config.get('Twitter', 'mentioned_interval', fallback=15.0))
monitored_interval = float(
    config.get('Twitter', 'monitored_interval', fallback=60.0))
search_interval = float(config.get('Twitter', 'search_interval',
                                   fallback=60.0))

twitter = TwitterSauce()


# noinspection PyBroadException
async def mentions() -> None:
    """
    Respond to any mentions requesting sauce lookups
    Returns:
        None
    """
    while True:
        try:
            # Mentions
            await twitter.check_mentions()
            await asyncio.sleep(mentioned_interval)
        except Exception:
示例#8
0
    def __init__(self):
        self.log = logging.getLogger(__name__)

        # Tweet Cache Manager
        self.twitter = TweetManager()
        self.twython = Twython(config.get('Twitter', 'consumer_key'),
                               config.get('Twitter', 'consumer_secret'),
                               config.get('Twitter', 'access_token'),
                               config.get('Twitter', 'access_secret'))

        # SauceNao
        self.minsim_mentioned = float(
            config.get('SauceNao', 'min_similarity_mentioned', fallback=50.0))
        self.minsim_monitored = float(
            config.get('SauceNao', 'min_similarity_monitored', fallback=65.0))
        self.minsim_searching = float(
            config.get('SauceNao', 'min_similarity_searching', fallback=70.0))
        self.persistent = config.getboolean('Twitter',
                                            'enable_persistence',
                                            fallback=False)
        self.anime_link = config.get('SauceNao',
                                     'source_link',
                                     fallback='anidb').lower()
        self.sauce = SauceNao(api_key=config.get('SauceNao',
                                                 'api_key',
                                                 fallback=None),
                              min_similarity=min(self.minsim_mentioned,
                                                 self.minsim_monitored,
                                                 self.minsim_searching),
                              priority=[21, 22, 5])

        # Trace.moe
        self.tracemoe = None  # type: Optional[ATraceMoe]
        if config.getboolean('TraceMoe', 'enabled', fallback=False):
            self.tracemoe = ATraceMoe(
                config.get('TraceMoe', 'token', fallback=None))

        self.nsfw_previews = config.getboolean('TraceMoe',
                                               'nsfw_previews',
                                               fallback=False)

        # Pixiv
        self.pixiv = Pixiv()

        # Cache some information about ourselves
        self.my = api.me()
        self.log.info(f"Connected as: {self.my.screen_name}")

        # Image URL's are md5 hashed and cached here to prevent duplicate API queries. This is cleared every 24-hours.
        # I'll update this in the future to use a real caching mechanism (database or redis)
        self._cached_results = {}

        # A cached list of ID's for parent posts we've already processed
        # Used in the check_monitored() method to prevent re-posting sauces when posts are re-tweeted
        self._posts_processed = []

        # The ID cutoff, we populate this once via an initial query at startup
        try:
            self.since_id = tweepy.Cursor(api.mentions_timeline,
                                          tweet_mode='extended',
                                          count=1).items(1).next().id
        except StopIteration:
            self.since_id = 0
        self.monitored_since = {}
示例#9
0
import pysaucenao
import tweepy
from pony.orm import *
from pysaucenao import GenericSource
from pysaucenao.containers import PixivSource, SauceNaoResults, VideoSource

from twsaucenao.api import api
from twsaucenao.config import config
from twsaucenao.log import log

db = Database()

if config.has_section('MySQL'):
    db.bind(provider='mysql',
            host=config.get('MySQL', 'hostname'),
            user=config.get('MySQL', 'username'),
            passwd=config.get('MySQL', 'password'),
            db=config.get('MySQL', 'database'),
            charset='utf8mb4')
else:
    db.bind(provider='sqlite', filename='database.sqlite', create_db=True)

TRIGGER_MENTION = 'mentioned'
TRIGGER_MONITORED = 'monitored'
TRIGGER_SEARCH = 'searching'


# noinspection PyMethodParameters
class TweetCache(db.Entity):
    tweet_id = PrimaryKey(int, size=64)
示例#10
0
import logging
import os
import random
import typing
from configparser import ConfigParser

import tweepy

from twsaucenao.config import config

# Set up localization for use elsewhere in the application
_language = config.get('System', 'Language', fallback='english')
_language_config = ConfigParser()
_language_config.read(os.path.join('lang', f'{_language}.ini'), 'utf-8')


def lang(category: str,
         key: str,
         replacements: typing.Optional[dict] = None,
         default=None,
         user: typing.Optional[tweepy.models.User] = None):
    """
    Provides easy to use application localization in the form of ini configuration files

    Language strings can be added or altered in the data/lang folder
    """
    string = _language_config.get(category, key, fallback=default)  # type: str
    if string:
        if replacements:
            for rkey, rvalue in replacements.items():
                string = string.replace(f"{{{rkey}}}", str(rvalue))
示例#11
0
# Set up logging
import logging

import sentry_sdk

from twsaucenao.config import config

logLevel = getattr(logging, str(config.get('System', 'log_level', fallback='ERROR')).upper())
logFormat = logging.Formatter("[%(asctime)s] %(levelname)s: %(message)s", "%Y-%m-%d %H:%M:%S")

log = logging.getLogger('twsaucenao')
log.setLevel(logLevel)

ch = logging.StreamHandler()
ch.setLevel(logLevel)
ch.setFormatter(logFormat)

log.addHandler(ch)


# Unless you're running your own custom fork of saucebot, you probably don't need this.
if config.has_option('System', 'sentry_logging') and config.getboolean('System', 'sentry_logging'):
    sentry_sdk.init(config.get('System', 'sentry_dsn'), traces_sample_rate=0.25)
示例#12
0
# Set up logging
import logging

from twsaucenao.config import config

logLevel = getattr(
    logging,
    str(config.get('System', 'log_level', fallback='ERROR')).upper())
logFormat = logging.Formatter("[%(asctime)s] %(levelname)s: %(message)s",
                              "%Y-%m-%d %H:%M:%S")

log = logging.getLogger('twsaucenao')
log.setLevel(logLevel)

ch = logging.StreamHandler()
ch.setLevel(logLevel)
ch.setFormatter(logFormat)

log.addHandler(ch)
示例#13
0
                del image
                encoded = b64encode(data.getvalue()).decode("utf-8")
                response = await self.session.post(url,
                                                   json={
                                                       "image": encoded,
                                                       "filter": search_filter
                                                   })
            else:
                response = await self.session.get(url, params={"url": path})
            return loads(await response.text())
        elif isinstance(path, io.BufferedIOBase):
            encoded = b64encode(path.read()).decode("utf-8")
            response = await self.session.post(url,
                                               json={
                                                   "image": encoded,
                                                   "filter": search_filter
                                               })
            return loads(await response.text())
        else:
            with open(path, "rb") as f:
                encoded = b64encode(f.read()).decode("utf-8")
            response = await self.session.post(url,
                                               json={
                                                   "image": encoded,
                                                   "filter": search_filter
                                               })
            return loads(await response.text())


tracemoe = ATraceMoe(config.get('TraceMoe', 'token', fallback=None))