class TestMarkovClass(unittest.TestCase): """ Test that the Markov wrapper class behaves as expected """ def setUp(self): self.markov = Markov(prefix="testclass", db=11) def test_add_line_to_index(self): line = ['i', 'ate', 'a', 'peach'] line1 = ['i', 'ate', 'one', 'peach'] line2 = ['i', 'ate', 'a', 'sandwich'] self.markov.add_line_to_index(line) self.markov.add_line_to_index(line1) self.markov.add_line_to_index(line2) self.assertEqual(self.markov.client.zscore("testclass:i:ate", "a"), 2.0) self.assertEqual(self.markov.client.zscore("testclass:ate:a", "peach"), 1.0) def test_score_for_line(self): self.test_add_line_to_index() line = ['i', 'ate', 'a', 'peach'] self.assertEqual(self.markov.score_for_line(line), 100) def test_generate(self): self.test_add_line_to_index() generated = self.markov.generate(max_words=3) assert len(generated) >= 2 assert len(generated) <= 3 generated = self.markov.generate(seed=['ate', 'one'], max_words=3) assert 'peach' in generated assert 'sandwich' not in generated def tearDown(self): """ clean up our redis keys """ keys = self.markov.client.keys(self.markov.prefix + "*") for key in keys: self.markov.client.delete(key)
class TestMarkovClass(unittest.TestCase): """ Test that the Markov wrapper class behaves as expected """ def setUp(self): self.markov = Markov(prefix="testclass",db=11) def test_add_line_to_index(self): line = ['i','ate','a','peach'] line1 = ['i','ate','one','peach'] line2 = ['i','ate','a', 'sandwich'] self.markov.add_line_to_index(line) self.markov.add_line_to_index(line1) self.markov.add_line_to_index(line2) self.assertEqual(self.markov.client.zscore("testclass:i:ate", "a"), 2.0) self.assertEqual(self.markov.client.zscore("testclass:ate:a", "peach"), 1.0) def test_score_for_line(self): self.test_add_line_to_index() line = ['i','ate','a','peach'] self.assertEqual(self.markov.score_for_line(line), 100) def test_generate(self): self.test_add_line_to_index() generated = self.markov.generate(max_words=3) assert len(generated) >= 2 assert len(generated) <= 3 generated = self.markov.generate(seed=['ate','one'], max_words=3) assert 'peach' in generated assert 'sandwich' not in generated def tearDown(self): """ clean up our redis keys """ keys = self.markov.client.keys(self.markov.prefix+"*") for key in keys: self.markov.client.delete(key)
def test_flush(self): m1 = Markov(prefix="one", db=5) m2 = Markov(prefix="two", db=5) line = ['i', 'ate', 'a', 'peach'] line1 = ['i', 'ate', 'one', 'peach'] line2 = ['i', 'ate', 'a', 'sandwich'] m1.add_line_to_index(line) m1.add_line_to_index(line1) m1.add_line_to_index(line2) important_line = ['we', 'all', 'have', 'phones'] m2.add_line_to_index(important_line) r = redis.Redis(db=5) assert len(r.keys("one:*")) == 6 assert len(r.keys("two:*")) == 3 m1.flush(prefix="one") assert len(r.keys("one:*")) == 0 assert len(r.keys("two:*")) == 3 m2.flush(prefix="two") assert len(r.keys("one:*")) == 0 assert len(r.keys("two:*")) == 0
def test_flush(self): m1 = Markov(prefix="one", db=5) m2 = Markov(prefix="two", db=5) line = ['i','ate','a','peach'] line1 = ['i','ate','one','peach'] line2 = ['i','ate','a', 'sandwich'] m1.add_line_to_index(line) m1.add_line_to_index(line1) m1.add_line_to_index(line2) important_line = ['we', 'all', 'have', 'phones'] m2.add_line_to_index(important_line) r = redis.Redis(db=5) assert len(r.keys("one:*")) == 6 assert len(r.keys("two:*")) == 3 m1.flush(prefix="one") assert len(r.keys("one:*")) == 0 assert len(r.keys("two:*")) == 3 m2.flush(prefix="two") assert len(r.keys("one:*")) == 0 assert len(r.keys("two:*")) == 0
def index_feed(target): # load config if os.path.isfile("credentials.json"): try: with open ("credentials.json", "r") as f: credentials = json.load(f) except ValueError: print "credentials.json is malformed." exit() else: empty_credentials = {u'consumer-key': u'', u'consumer-secret': u'', u'access-token': u'', u'access-token-secret': '', u'user-name': u''} with open ("credentials.json", "w") as f: json.dump(empty_credentials, f, indent=4, separators=(',', ':')) print "credentials.json is missing. i've created a blank one for you." exit() # authenticate auth = tweepy.OAuthHandler(credentials["consumer-key"], credentials["consumer-secret"]) auth.set_access_token(credentials["access-token"], credentials["access-token-secret"]) # connect to the api api = tweepy.API(auth) #add tweets to redis tweet_data = Markov(target) #show my tweets try: my_tweets = api.user_timeline(target, count=20000) for tweet in my_tweets: cleaned_tweet = clean_tweet(tweet.text) tweet_string = " ".join(cleaned_tweet) print 'indexing tweet: "' + tweet_string + '"' tweet_data.add_line_to_index(cleaned_tweet) except Exception as e: print e
class MarkovPlugin(object): def __init__(self): self.markov = Markov(prefix='irc') self.silent = False self.dumb = False self.throttles = [] @admin_command def silence(self, connection, event): self.silent = True @admin_command def unsilence(self, connection, event): self.silent = False @admin_command def dumb_markov(self, connection, event): self.dumb = True @admin_command def learn_markov(self, connection, event): self.dumb = False @admin_command def flush_brain(self, connection, event): connection.logger.info('FLUSHING MARKOV BRAIN!!!') self.markov.flush() @admin_command def set_brain(self, connection, event): prefix = event.command_params[0] connection.logger.info('SETTING MARKOV BRAIN {prefix: %s}' % prefix) self.markov.prefix = prefix def is_ignored(self, user): t = self.get_throttle(user) return t and t.count > 3 @plugin_hook def on_privmsg(self, connection, event): user = connection.users[event.nick] if self.is_ignored(user): connection.logger.warning('IGNORED {nick: %s}' % event.nick) self.do_throttle(connection.bot.ioloop, user, timedelta(minutes=1)) return m = re.match(QUERY_REGEX.format(connection.nick), event.text) if not self.silent and m: tokens = tokenize_line(m.group('message')) self.do_reply(connection, event, tokens) ioloop = connection.bot.ioloop self.do_throttle(ioloop, user) elif not m and not self.dumb and not event.command: connection.logger.info('Learning {message: %s}' % event.text) message = event.text tokens = tokenize_line(message) self.learn_message(tokens) def do_reply(self, connection, event, tokens): connection.logger.info('Loading reply {tokens: %s}' % repr(tokens)) reply = load_reply_from_markov(self.markov, event.nick, tokens) if reply: connection.reply_with_nick(event, reply) else: connection.reply_with_nick(event, ('I have nothing to' ' say yet. Teach me more!')) def do_throttle(self, ioloop, user, timeout=TIMEOUT): throttle = self.get_throttle(user) if throttle: ioloop.remove_timeout(throttle.io_timeout) throttle.count += 1 else: throttle = MarkovThrottle(user) self.throttles.append(throttle) rem_user_thrtl = partial(self.remove_throttle, user) throttle.io_timeout = ioloop.add_timeout(timeout, rem_user_thrtl) def remove_throttle(self, user): for t in list(self.throttles): if t.user is user: self.throttles.remove(t) def get_throttle(self, user): for t in self.throttles: if t.user is user: return t else: return None def learn_message(self, tokens): self.markov.add_line_to_index(tokens)
class TestMarkovClass(unittest.TestCase): """ Test that the Markov wrapper class behaves as expected """ def setUp(self): self.markov = Markov(prefix="testclass", db=11) def test_add_line_to_index(self): line = ['i', 'ate', 'a', 'peach'] line1 = ['i', 'ate', 'one', 'peach'] line2 = ['i', 'ate', 'a', 'sandwich'] self.markov.add_line_to_index(line) self.markov.add_line_to_index(line1) self.markov.add_line_to_index(line2) self.assertEqual(self.markov.client.zscore("testclass:i:ate", "a"), 2.0) self.assertEqual(self.markov.client.zscore("testclass:ate:a", "peach"), 1.0) def test_score_for_line(self): self.test_add_line_to_index() line = ['i', 'ate', 'a', 'peach'] self.assertEqual(self.markov.score_for_line(line), 100) def test_generate(self): self.test_add_line_to_index() generated = self.markov.generate(max_words=3) assert len(generated) >= 2 assert len(generated) <= 3 generated = self.markov.generate(seed=['ate', 'one'], max_words=3) assert 'peach' in generated assert 'sandwich' not in generated def test_flush(self): m1 = Markov(prefix="one", db=5) m2 = Markov(prefix="two", db=5) line = ['i', 'ate', 'a', 'peach'] line1 = ['i', 'ate', 'one', 'peach'] line2 = ['i', 'ate', 'a', 'sandwich'] m1.add_line_to_index(line) m1.add_line_to_index(line1) m1.add_line_to_index(line2) important_line = ['we', 'all', 'have', 'phones'] m2.add_line_to_index(important_line) r = redis.Redis(db=5) assert len(r.keys("one:*")) == 6 assert len(r.keys("two:*")) == 3 m1.flush(prefix="one") assert len(r.keys("one:*")) == 0 assert len(r.keys("two:*")) == 3 m2.flush(prefix="two") assert len(r.keys("one:*")) == 0 assert len(r.keys("two:*")) == 0 def tearDown(self): """ clean up our redis keys """ keys = self.markov.client.keys(self.markov.prefix + "*") for key in keys: self.markov.client.delete(key)
class TestMarkovClass(unittest.TestCase): """ Test that the Markov wrapper class behaves as expected """ def setUp(self): self.markov = Markov(prefix="testclass", db=11) def test_add_line_to_index(self): line = ['i','ate','a','peach'] line1 = ['i','ate','one','peach'] line2 = ['i','ate','a', 'sandwich'] self.markov.add_line_to_index(line) self.markov.add_line_to_index(line1) self.markov.add_line_to_index(line2) self.assertEqual(self.markov.client.zscore("testclass:i:ate", "a"), 2.0) self.assertEqual(self.markov.client.zscore("testclass:ate:a", "peach"), 1.0) def test_score_for_line(self): self.test_add_line_to_index() line = ['i','ate','a','peach'] self.assertEqual(self.markov.score_for_line(line), 100) def test_generate(self): self.test_add_line_to_index() generated = self.markov.generate(max_words=3) assert len(generated) >= 2 assert len(generated) <= 3 generated = self.markov.generate(seed=['ate','one'], max_words=3) assert 'peach' in generated assert 'sandwich' not in generated def test_flush(self): m1 = Markov(prefix="one", db=5) m2 = Markov(prefix="two", db=5) line = ['i','ate','a','peach'] line1 = ['i','ate','one','peach'] line2 = ['i','ate','a', 'sandwich'] m1.add_line_to_index(line) m1.add_line_to_index(line1) m1.add_line_to_index(line2) important_line = ['we', 'all', 'have', 'phones'] m2.add_line_to_index(important_line) r = redis.Redis(db=5) assert len(r.keys("one:*")) == 6 assert len(r.keys("two:*")) == 3 m1.flush(prefix="one") assert len(r.keys("one:*")) == 0 assert len(r.keys("two:*")) == 3 m2.flush(prefix="two") assert len(r.keys("one:*")) == 0 assert len(r.keys("two:*")) == 0 def tearDown(self): """ clean up our redis keys """ keys = self.markov.client.keys(self.markov.prefix+"*") for key in keys: self.markov.client.delete(key)