Example #1
0
import sqlite3
from sqlite3 import Error
from markov_chain import MarkovChain

chain = MarkovChain()

database = '/home/drue/Deployment/star_trek_club/star_trek_db.sqlite3'
connection = sqlite3.connect(database)
cursor = connection.cursor()

char_name = 'PICARD'

cursor.execute('SELECT id FROM characters WHERE name=?', (char_name, ))
char_id = cursor.fetchone()[0]

cursor.execute('SELECT line FROM lines WHERE character_id=?', (char_id, ))
for result in cursor.fetchall():
    chain.train(result[0].replace('...', '').replace('--', ''))

chain.save_training(f'bin/star_trek/{char_name}.bin')
                                    max_id=next_id,
                                    lang='en')
        for t in tweets['statuses']:
            if EXCLUDE_WORDS.search(t['full_text']) is None:
                tweet = TEXT_ONLY.sub(' ', t['full_text'])
                tweet = RETWEET.sub(' ', tweet)
                tweet = USER_NAME.sub(' ', tweet)
                tweet = LINKS.sub(' ', tweet)
                tweet = AMPERSAND.sub('and', tweet)
                tweet = TYPO_HASHTAGS.sub(fix_hashtag, tweet)
                tweet = TYPO_PERIOD.sub(fix_period, tweet)
                tweet = TYPO_QUESTION.sub(fix_question, tweet)
                tweet = TYPO_EXCLAMATION.sub(fix_exclamation, tweet)
                tweet = LONE_PUNCTUATION.sub(' ', tweet)
                tweet = GT.sub('>', tweet)
                tweet = LT.sub('<', tweet)
                chain.train(tweet)
    print(f'len(chain.tree): {len(chain.tree)}')

chain.bulk_adjust_weights(fitness_functions=[
    aw_mult(aw_favor_complexity, .001),
    aw_mult(aw_favor_punctuation, .00015),
    aw_mult(aw_favor_alternating_complexity, .1)
],
                          iterations=len(chain.tree))

chain.save_training('bin/twitter/trending.bin')

print(
    f'Sample tweet: {chain.generate_tweet(append_tag="Category: #trending")}')
        break
print(f'# of tweets: {len(tweets)}')

for t in tweets:
    # if 'retweeted_status' in t:
    # 	continue
    # tweet = USER_NAME.sub(' ', t['full_text'])
    tweet = LINKS.sub(' ', t['full_text'])
    tweet = AMPERSAND.sub('&', tweet)
    tweet = GT.sub('>', tweet)
    tweet = LT.sub('<', tweet)
    chain.train(tweet)
print(f'length of chain: {len(chain.tree)}\n')

# chain.bulk_adjust_weights(fitness_functions=[aw_mult(aw_favor_complexity, .001), aw_mult(aw_favor_punctuation, .00015), aw_mult(aw_favor_alternating_complexity, .1)], iterations=len(chain.tree))
chain.save_training(f'bin/twitter/{user}.bin')

tweets = [tweets[i] for i in range(100)]
tweets.reverse()

print(f'{"—"*64}\n')
replies = 0
for t in tweets:
    replies += 1
    print(f'___tweet #{replies}___\n')
    print(f'___original tweet:___\n{t["full_text"]}\n')

    words = sorted(re.split(r'[^a-zA-Z#]', t['full_text']),
                   key=lambda w: len(w),
                   reverse=True)
    begin = None
Example #4
0
	print(f'search_term: {user}')
	tweets = twit.statuses.user_timeline(screen_name=user, count=200, tweet_mode='extended', include_rts=False, trim_user=True)
	for t in tweets:
		if EXCLUDE_WORDS.search(t['full_text']) is None:
			tweet = TEXT_ONLY.sub(' ', t['full_text'])
			tweet = USER_NAME.sub(' ', tweet)
			tweet = LINKS.sub(' ', tweet)
			tweet = TYPO_HASHTAGS.sub(fix_hashtag, tweet)
			tweet = TYPO_PERIOD.sub(fix_period, tweet)
			tweet = TYPO_QUESTION.sub(fix_question, tweet)
			tweet = TYPO_EXCLAMATION.sub(fix_exclamation, tweet)
			tweet = LONE_PUNCTUATION.sub(' ', tweet)
			tweet = AMPERSAND.sub('and', tweet)
			tweet = GT.sub('>', tweet)
			tweet = LT.sub('<', tweet)
			chain.train(tweet)
		# chain.train(t['full_text'])
	print(f'len(chain.tree): {len(chain.tree)}')

chain.bulk_adjust_weights(fitness_functions=[aw_mult(aw_favor_complexity, .001), aw_mult(aw_favor_punctuation, .00015), aw_mult(dg_disfavor_consecutive_hashtags, .001)], iterations=len(chain.tree))

print('Sample tweet:', chain.generate_tweet())

# chain.save_training('bin/twitter/apologists.bin')
# chain.save_training('bin/twitter/atheists.bin')
chain.save_training('bin/twitter/news.bin')
# chain.save_training('bin/twitter/newagers.bin')
# chain.save_training('bin/twitter/churches.bin')
# chain.save_training('bin/twitter/trumpsterfire.bin')
# chain.save_training('bin/twitter/meta.bin')
Example #5
0
    next_id = re.split(r'\D+', tweets['search_metadata']['next_results'])[1]
    try:
        tweets = twit.search.tweets(q=query,
                                    count=100,
                                    lang='en',
                                    result_type='recent',
                                    tweet_mode='extended',
                                    include_entities=False,
                                    max_id=next_id)
    except Exception as e:
        print('____an error occurred____')
        print(f'____search ended at i = {i+1}____')
        break
    for t in tweets['statuses']:
        if EXCLUDE_WORDS.search(t['full_text']) is None:
            tweet = clean_tweet(t['full_text'])
            chain.train(tweet)
print(f'____len(chain.tree) = {len(chain.tree)}____')

print('____adjusting weights, this may take a moment____')
chain.bulk_adjust_weights(fitness_functions=[
    aw_mult(aw_favor_complexity, .001),
    aw_mult(dg_disfavor_consecutive_hashtags, .001)
],
                          iterations=len(chain.tree))
print('____done____')

chain.save_training('bin/twitter/beliefs.bin')

print('____sample tweet____:\n', chain.generate_tweet())
Example #6
0
from markov_chain import MarkovChain
from markov_algorithms import *

chain = MarkovChain()

chain.train_on_file(filename='training_txt/quran.txt', verbose=True)
chain.train_on_file(filename='training_txt/new_testament.csv', verbose=True)

print(f'len(chain.tree): {len(chain.tree)}\n')

# print('Adjusting weights. This may take a while.\n_\n')
# chain.bulk_adjust_weights(fitness_functions=[aw_mult(aw_favor_complexity, .001), aw_mult(aw_favor_punctuation, .00015), aw_mult(aw_favor_alternating_complexity, .1)], iterations=len(chain.tree))

chain.save_training('bin/quran_testament.bin')

for i in range(8):
    print(chain.generate_tweet(append_tag=None, follow=False), '\n_\n')