コード例 #1
0
 def run(self):
     gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.2)
     tf_config = tf.ConfigProto(gpu_options=gpu_options)
     with tf.Session(config=tf_config) as sess:
         # predictor = 学習済みのseq2seq
         predictor = predict.EasyPredictor(sess)
         while True:
             reply = self.queue.get()
             make_reply_text(reply, predictor)
コード例 #2
0
    def __init__(self, sess):
        consumer_key = os.getenv("consumer_key")
        consumer_secret = os.getenv("consumer_secret")
        access_token = os.getenv("access_token")
        access_token_secret = os.getenv("access_token_secret")

        self.auth = OAuthHandler(consumer_key, consumer_secret)
        self.auth.set_access_token(access_token, access_token_secret)
        self.api = tweepy.API(self.auth)
        self.predictor = predict.EasyPredictor(sess)
コード例 #3
0
def twitter_bot():
    tf_config = tf.ConfigProto(device_count={"GPU": 0},
                               log_device_placement=True)

    auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
    auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
    api = tweepy.API(auth)

    with tf.Session(config=tf_config) as sess:
        predictor = predict.EasyPredictor(sess)

        for tweet in tweets():
            status_id, status, bot_flag = tweet
            print("Processing {0}...".format(status.text))
            screen_name = status.author.screen_name
            replies = predictor.predict(status.text)
            if not replies:
                print("no reply")
                continue

            reply_body = replies[0]
            if reply_body is None:
                print("No reply predicted")
            else:
                try:
                    if is_contain(status.text, 'おすすめの本'):
                        special_reply(api,
                                      bot_flag,
                                      screen_name,
                                      status_id,
                                      code=1)
                    elif is_contain(status.text, '人工無能'):
                        special_reply(api,
                                      bot_flag,
                                      screen_name,
                                      status_id,
                                      code=2)

                    elif is_contain(status.text, 'ありがとう'):
                        special_reply(api,
                                      bot_flag,
                                      screen_name,
                                      status_id,
                                      code=3)

                    else:
                        post_reply(api, bot_flag, reply_body, screen_name,
                                   status_id)
                except tweepy.TweepError as e:
                    if e.api_code == 187:
                        pass
                    else:
                        raise
            mark_tweet_processed(status_id)
コード例 #4
0
def twitter_bot():
    # Only allocate part of the gpu memory when predicting.
    gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.2)
    tf_config = tf.ConfigProto(gpu_options=gpu_options)

    consumer_key = os.getenv("consumer_key")
    consumer_secret = os.getenv("consumer_secret")
    access_token = os.getenv("access_token")
    access_token_secret = os.getenv("access_token_secret")

    auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
    auth.set_access_token(access_token, access_token_secret)
    api = tweepy.API(auth)
    with tf.Session(config=tf_config) as sess:
        predictor = predict.EasyPredictor(sess)

        for tweet in tweets():
            status_id, status, bot_flag = tweet
            print("Processing {0}...".format(status.text))
            screen_name = status.author.screen_name
            replies = predictor.predict(status.text)
            if not replies:
                print("no reply")
                continue
            reply_body = replies[0]
            if reply_body is None:
                print("No reply predicted")
            else:
                try:
                    post_reply(api, bot_flag, reply_body, screen_name,
                               status_id)
                except tweepy.TweepError as e:
                    # duplicate status
                    if e.api_code == 187:
                        pass
                    else:
                        raise
            mark_tweet_processed(status_id)