Example #1
0
def markovme():
    instructions = []
    routes = json.loads(request.args['routes'])
    for route in routes:
        seed = route["instruction"].split()[:1]
        line = " ".join(markov.generate(model, 1, seed))
        while (len(line) > 100):
            line = " ".join(markov.generate(model, 1, seed))
            line = line.partition(".")[0].capitalize() + '.'
        instructions.append(line)
    return jsonify(status='success', result=instructions)
Example #2
0
def new(username, include):
    try:
        if not tweets.find_one({"username": username}):
            tweets.insert_one({
                "username": username,
                "tweets": scrape(username)
            })
        obj = tweets.find_one({"username": username})

        model, rmodel = train([e['full_text'] for e in obj["tweets"]])
        if include:
            text = generate_with(model, rmodel, include)
        else:
            text = generate(model)
        return json.dumps({
            'success':
            True,
            'message':
            ' '.join(text),
            'name':
            obj['tweets'][0]['user']['name'],
            'avatar':
            obj['tweets'][0]['user']['profile_image_url_https']
        })
    except:
        return json.dumps({
            'success': False,
            'message': 'Oops! An error occurred.',
        })
Example #3
0
	def test_generate(self):
		model = {
				('a', 'b'): ['c'],
				('b', 'c'): ['d'],
				('c', 'd'): [None]}
		self.assertEqual(['a', 'b', 'c', 'd'],
				markov.generate(model, 2, ('a', 'b')))
Example #4
0
def handle_query():
    print(request.form)
    #source = SOURCES[int(request.form['source'])]
    source = "RedditSubmission"
    author = request.form['author']
    length = request.form['length']
    subreddit = "askreddit"
    if length:
        length = int(length)
    else:
        length = 500
    filters = []
    if author != '':
        filters.append('author="{0}"'.format(author))
    if subreddit != '':
        filters.append('UPPER(subreddit)=UPPER("{0}")'.format(subreddit))
    query = """
    SELECT text, url, title FROM {0}
    """.format(source)
    if filters:
        query += "WHERE " + ",".join(filters)
    print(query)

    result = db.execute(query)
    texts, urls, titles = zip(*result)

    print(len(result))
    generated_text = markov.generate(" ".join(titles), length)

    return jsonify(generated_text)
async def on_message(message):
    # print(message)
    if message.author == client.user:
        return

    if message.content.lower().startswith("$tellme"):
        starting = message.content.split()[1].lower()
        num = int(message.content.split()[2])
        out = " ".join(markov.generate(starting, num))
        await message.channel.send(out)

    if message.content.lower().startswith('hows it going bot alan'):
        await message.channel.send(" ".join(markov.generate("i'm", 50)))

    if message.content.startswith('give me a sentence bot alan'):
        await message.channel.send(" ".join(markov.generate("the", 50)))
Example #6
0
def main_example():
    """Show a usage example with the example data"""
    corpus = '\n'.join(parse_wiki.get_entries_from_example())
    # print(corpus, end='\n\n')
    model = markov.train(corpus, order=6)
    start = random.choice(tuple(model.keys()))
    print(''.join(start) + ''.join(
        markov.generate(model, start=start, maxlen=1000, stop_items='.')))
Example #7
0
 def generate_text(self, text, n, seed=None, max_iterations=100):
     model = self.current_model
     if self.text_changed or not self.current_model:
         model = markov.build_model(text, n)
         self.current_model = model
         self.text_changed = False
     new_text = markov.generate(model, n, seed, max_iterations)
     return new_text
Example #8
0
 def test_generate_number_of_sentences(self):
     corpus = parse(
         "This is some text. This is some more text. And some more. Have some more text."
     )
     for x in range(1, 5):
         generated = generate(corpus, x)
         number_of_sentences = generated.count(".")
         self.assertEqual(x, number_of_sentences)
Example #9
0
def markovme():
    instructions = []
    routes = json.loads(request.args['routes'])

    for route in routes:
        seed = route["instruction"].split()[:2]
        line = " ".join(markov.generate(markov_model, 2, seed))
        instructions.append(line)
    return jsonify(status='success', result=instructions)
Example #10
0
def listen(socketInUse):
  while True:
    line = socketInUse.recv(4096)
    if line != "":
      if line != "\r\n":
        if "PING" in line:
          socketInUse.send("PONG\r\n")
        if "PRIVMSG" in line:
          sender = line[1:line.find('!')]
          message = line[line.rfind(':')+1:-2]
          if "nomad" in message.lower():
            if sender == owner and "quit" in message:
              exit()
            print sender,"says",message
            response = markov.generate(1000, table)
            while len(response) == 0:
              response = markov.generate(1000, table)
            s.send('PRIVMSG #nomad '+response[random.randint(0,1000)]+'\r\n')
Example #11
0
	def test_max_iterations(self):
		model = {
				('a', 'b'): ['c'],
				('b', 'c'): ['d'],
				('c', 'd'): ['c'],
				('d', 'c'): ['d']}
		expected = list('abcdcdc')
		self.assertEqual(markov.generate(model, 2, ('a', 'b'), max_iterations=5),
				expected)
Example #12
0
 def test_max_iterations(self):
     model = {
         ('a', 'b'): ['c'],
         ('b', 'c'): ['d'],
         ('c', 'd'): ['c'],
         ('d', 'c'): ['d']
     }
     expected = list('abcdcdc')
     self.assertEqual(
         markov.generate(model, 2, ('a', 'b'), max_iterations=5), expected)
Example #13
0
    def run(self):
        while True:
            event, values = self.window.read()
            #print(event, values)
            if event in (None, 'Exit'):
                break
            if event == '-B1-':
                # Generates text based on the markov chain module created in markov.py
                # the user can choose an order and the maximum iterations. Each generation
                # is printed on it's own separate line.
                if len(values['-IN-']) > 1:
                    order = 2
                    max_iterations = 100
                    if len(values['-D1-']) > 0 and len(values['-IN2-']) > 0:
                        order = int(values['-D1-'])
                        max_iterations = int(values['-IN2-'])
                    text = self.generate_text(values['-IN-'], order, None,
                                              max_iterations)
                    self.window['-OUTPUT-'].print(text, end='')
            if event == '-FILE-':
                # Opens a file to read into the input box
                if values['-FILE-']:
                    with open(values['-FILE-'], 'r') as f:
                        text = f.read()
                        self.window['-IN-'].update(text)
            if event == '-IN-':
                self.text_changed = True
                # Autocomplete like feature that allows dynamic text generation based on
                # the last few characters of the text. Changes dynamically as the user types.
                if values['-DG-']:
                    order = 2
                    max_iterations = 100
                    if len(values['-D1-']) > 0 and len(values['-IN2-']) > 0:
                        order = int(values['-D1-'])
                        max_iterations = int(values['-IN2-'])
                    order = min(order, len(values['-IN-'][:-1]))
                    # TODO: Add dynamic model updating when user types in new text
                    model = markov.build_model(values['-IN-'][:-1], order,
                                               self.current_model)
                    new_text = markov.generate(model, order,
                                               values['-IN-'][-order - 1:-1],
                                               max_iterations)
                    self.window['-OUTPUT-'].update('')
                    self.window['-OUTPUT-'].print(values['-IN-'][:-1], end='')
                    self.window['-OUTPUT-'].print(new_text[order:],
                                                  text_color='white',
                                                  background_color='red',
                                                  end='')
                    self.current_text = values['-IN-'][:-1]

            if event == 'Clear':
                # Clear the output box
                self.window['-OUTPUT-'].update('')

        self.window.close()
Example #14
0
def markov_single():
    """
    Generates a quote using a markov chain, optionally constraining input to a speaker or submitter.
    """
    submitter = request.args.get('submitter')
    speaker = request.args.get('speaker')
    query = query_builder(None, None, submitter, speaker)
    if query.all() is None:
        return "none"
    markov.reset()
    markov.parse([quote.quote for quote in query.all()])
    return jsonify(markov.generate())
Example #15
0
async def send_tweet_diana():
    await cerise.wait_until_ready()
    while not cerise.is_closed():
        action = random.randint(0, 1)
        if action == 0:
            for i in range(8):
                await diana.retweet(random.choice(random.choice(diana.followers).tweets))
                await asyncio.sleep(900000)
        else:
            output = markov.generate('diana', k=2, n=random.randint(1, 50))
            await diana.send(output)
            await asyncio.sleep(7200000)
Example #16
0
    def test_generate(self):
        """
        Test the generate function
        """
        self.test_add_line_to_index()
        generated = generate(self.client, prefix=self.prefix, max_words=3)
        assert len(generated) >= 2
        assert len(generated) <= 3
        generated = generate(self.client, seed=['ate','one'], prefix=self.prefix, max_words=3)
        assert generated[2] == 'peach'
        assert 'sandwich' not in generated

        #test that relevant terms will be chosen when the relevant_terms argument is passed in
        generated = generate(self.client, relevant_terms=["peach",], prefix=self.prefix)       
        assert 'peach' in generated
        generated = generate(self.client, relevant_terms=["sandwich",], prefix=self.prefix)
        assert 'sandwich' in generated

        #there are no pizza keys!
        generated = generate(self.client, relevant_terms=["pizza",], prefix=self.prefix)
        assert len(generated) == 0
Example #17
0
def generate_custom(req):
    text = req['text']
    length = 100
    if 'length' in req:
        length = req['length']

    degree = 1
    if 'degree' in req:
        degree = req['degree']

    gen_text = markov.generate(text, length, degree)

    response = {'text': gen_text}
    return response
Example #18
0
    def test_generate(self):
        """
        Test the generate function
        """
        self.test_add_line_to_index()
        generated = generate(self.client, prefix=self.prefix, max_words=3)
        assert len(generated) >= 2
        assert len(generated) <= 3
        generated = generate(self.client,
                             seed=['ate', 'one'],
                             prefix=self.prefix,
                             max_words=3)
        assert generated[2] == 'peach'
        assert 'sandwich' not in generated

        #test that relevant terms will be chosen when the relevant_terms argument is passed in
        generated = generate(self.client,
                             relevant_terms=[
                                 "peach",
                             ],
                             prefix=self.prefix)
        assert 'peach' in generated
        generated = generate(self.client,
                             relevant_terms=[
                                 "sandwich",
                             ],
                             prefix=self.prefix)
        assert 'sandwich' in generated

        #there are no pizza keys!
        generated = generate(self.client,
                             relevant_terms=[
                                 "pizza",
                             ],
                             prefix=self.prefix)
        assert len(generated) == 0
Example #19
0
def create_chain(type, id):
    chain = Chain(c, type)
    if not os.path.exists('static/images/{}/chain_{}'.format(type, id)):
        os.makedirs('static/images/{}/chain_{}'.format(type, id))

    if not os.path.exists('chain_data/{}/chain_{}'.format(type, id)):
        os.makedirs('chain_data/{}/chain_{}'.format(type, id))

    z = markov.noise()
    img = markov.generate(z)
    img_name = '{}/chain_{}/0.jpg'.format(type, id)
    markov.save_image(img, 'static/images/' + img_name)
    z = z.detach().numpy()
    chain.add_link(z, img_name)
    print(chain.type, chain.id)
    generate_proposal(chain)
    print('Created chain:', '{}/chain_{}'.format(type, id))
    return chain
Example #20
0
def generate_proposal(chain):
    # chain_key = '{}/{}'.format(chain_type, chain_id)
    # chain = chains[chain_key]
    print('z shape', chain.get_z().shape)
    new_z = markov.mutate(chain.get_z())
    # print(new_z)

    new_image = markov.generate(new_z)
    print(new_image.shape)
    index = len(chain)

    proposal_name = '{}/chain_{}/{}.jpg'.format(chain.type, chain.id, index)
    markov.save_image(new_image, 'static/images/' + proposal_name)
    chain.add_proposal(new_z, proposal_name)

    if index % 100 == 0:
        dump(chain, 'chain_data/{}/chain_{}/{}'.format(chain.type, chain.id,
                                                       index))

    return proposal_name
Example #21
0
def markovme():
    instructions = []
    routes = json.loads(request.args['routes'])
    print(routes)
    for route in routes:
        seed = route["instruction"].split()[:1]
        line = " ".join(markov.generate(model, 1, seed))
        doc = nlp(line)
        nouns = [item.text for item in doc if item.pos_ == 'NOUN']
        adjs = [item.text for item in doc if item.pos_ == 'ADJ']
        while (len(line) > 110):
            if (randint(1, 10) > 9):
                line = line.partition(choice(nouns))[0] + choice(nouns)
                line = " ".join(
                    line.split()).capitalize().partition(".")[0] + '.'
            else:
                line = line.partition(choice(adjs))[0] + choice(adjs)
                line = " ".join(
                    line.split()).capitalize().partition(".")[0] + '.'
        instructions.append(line)
    return jsonify(status='success', result=instructions)
Example #22
0
APP_KEY = ''
APP_SECRET = ''
ACCESS_TOKEN = ''
ACCESS_SECRET = ''

twitter = Twython(APP_KEY, APP_SECRET, ACCESS_TOKEN, ACCESS_SECRET)

# read the list of cities and generate city names

with open("ut-cities.txt") as f:
    text_cities = f.read()

line_cities = text_cities.strip()
model = markov.build_model(line_cities, 2)
markov_ut = markov.generate(model, 2)

full_list_cities = ''.join(markov_ut)
list_ut = full_list_cities.split()


def count_letters(word):
    return len(word) - word.count(' ')


final_list = []
for line in list_ut:
    if (15 > count_letters(line) > 4) and (line.istitle() is True):
        final_list.append(line)

ut_cities0 = final_list[0] + ', UT' + "\n"
Example #23
0
async def generate(ctx):
    await ctx.send(markov.generate())
Example #24
0
def parse(buffer):
    global channel, users

    replacement = ""
    capitalise = False
    lowercase = False
    all_capitals = False
    if not buffer:
        return ""

    regex = re.search(r"==(.+)$", buffer)
    pattern = None
    if regex:
        buffer = buffer[:len(buffer) - len(regex.group(0))]
        pattern = re.escape(regex.group(1)).replace("\\*", "*").replace(
            "\\?", "?").replace("?", ".").replace("*", ".*")

    if buffer[-1] == "^":
        capitalise = True
        buffer = buffer[:-1]
    elif buffer[-1] == "_":
        lowercase = True
        buffer = buffer[:-1]

    if buffer == buffer.upper() and buffer.upper() != buffer.lower():
        all_capitals = True

    buffer = buffer.lower()
    if "|" in buffer:
        buffer = choice(buffer.split("|"))

    for character in buffer:
        if character == "u":
            replacement += get_word_from_bank(bank=users, pattern=pattern)
        elif character in ("🚖", "🚗", "🚘", "🚙"):
            replacement += get_word_from_bank(file="banks/automerken.txt",
                                              pattern=pattern)
        elif character in ("🚲"):
            replacement += get_word_from_bank(file="banks/fietsmerken.txt",
                                              pattern=pattern)
        elif character in ("🚂", "🚄", "🚅", "🚞", "🚆", "🚇"):
            replacement += get_word_from_bank(file="banks/treinen.txt",
                                              pattern=pattern)
        elif character in ("✈"):
            replacement += get_word_from_bank(file="banks/vliegtuigen.txt",
                                              pattern=pattern)
            break
        elif character in ("🪐"):
            replacement += get_word_from_bank(file="banks/planeten.txt",
                                              pattern=pattern)
        elif character == "v":
            replacement += get_word_from_bank(
                bank=["a", "e", "i", "o", "u", "y"], pattern=pattern)
        elif character == "c":
            replacement += get_word_from_bank(bank=[
                "b", "c", "d", "f", "g", "h", "j", "k", "l", "m", "n", "p",
                "q", "r", "s", "t", "v", "w", "x", "z"
            ],
                                              pattern=pattern)
        elif character == "n":
            noun = get_word_from_bank(file="banks/zelfstnw.txt",
                                      pattern=pattern).split("|")
            if len(noun) == 1:
                replacement += noun[0]
            elif len(noun) == 2:
                replacement += noun[0]
            else:
                replacement += noun[0] + noun[1]
        elif character == "e":
            replacement += get_word_from_bank(file="banks/bedrijven.txt",
                                              pattern=pattern)
        elif character == "g":
            replacement += get_word_from_bank(file="banks/genres.txt",
                                              pattern=pattern)
        elif character == "i":
            which = "lit"
            if len(buffer) > 1 and buffer[1] == ":":
                bits = buffer.split(":")
                bits[1] = bits[1].replace("list", "")
                if bits[1] in ("lit", "shit"):
                    which = bits[1]
            try:
                replacement += requests.get("https://api.tnl.social/list/%s" %
                                            which,
                                            timeout=5).text
            except (requests.RequestException, ConnectionRefusedError) as e:
                replacement += "stijn" if which == "lit" else "poep"
            break
        elif character == "a":
            replacement += get_word_from_bank(file="banks/bijvnw.txt",
                                              pattern=pattern)
        elif character == "s":
            replacement += get_word_from_bank(file="banks/scheldwoorden.txt",
                                              pattern=pattern)
        elif character == "p":
            replacement += get_word_from_bank(file="banks/beroemdheden.txt",
                                              pattern=pattern)
        elif character == "b":
            replacement += get_word_from_bank(file="banks/partijen.txt",
                                              pattern=pattern)
        elif character in ("l", "🏙", "🌃", "🌆", "🌇"):
            replacement += get_word_from_bank(file="banks/plaatsen.txt",
                                              pattern=pattern)
        elif character in ("🏳", "🌍", "🌎", "🌏", "🗺"):
            replacement += get_word_from_bank(file="banks/landen.txt",
                                              pattern=pattern)
        elif character == "<":
            try:
                index = max(1, int(buffer[1:]))
            except ValueError:
                replacement = character + buffer
                break
            if index <= len(replacements):
                replacement = replacements[index - 1]
                break
        elif character == "#":
            nmin = None
            if len(buffer) > 1 and buffer[1] == ":":
                bits = buffer.split(":")[1].split("-")
                if len(bits) == 2:
                    try:
                        nmin = int(bits[0])
                        nmax = int(bits[1])
                        if nmin >= nmax:
                            raise ValueError()
                    except ValueError:
                        nmin = 0
                        nmax = 100
            if not nmin:
                nmin = 0
                nmax = 100
            replacement += str(choice(range(nmin, nmax)))
            break
        elif character == "m" and buffer[1] == ":":
            corpus = buffer.split(":")[1]
            if corpus in ("speld", "surpator", "linus"):
                try:
                    replacement += requests.get(
                        "https://api.tnl.social/markov/%s" % corpus,
                        timeout=5).text
                except (requests.RequestException,
                        ConnectionRefusedError) as e:
                    replacement += "stijn wint markovcompetitie 2020 en nu krijgt hij elke week basilicumplantje dat direct sterft " if corpus == "speld" else "Actie tegen hondelulzonneklepwafelijzers in het OV"
                break

            corpus += ".txt"
            sentence = generate(corpus, pattern=pattern, max_attempts=250)
            if sentence is not None:
                replacement = sentence
                break
            else:
                replacement += character
        else:
            replacement += character

    if lowercase:
        replacement = replacement.lower()

    if capitalise:
        replacement = replacement.title()

    if all_capitals:
        replacement = replacement.upper()

    replacements.append(replacement)
    return replacement
Example #25
0
def next_word(word):
	ret = markov.generate(model, n, seed=word, max_iterations=1)
	while ( (len(ret) < 2) or (len(ret[1].strip()) <1) ):
		ret = markov.generate(model, n, max_iterations=1)
	print 'next word: ' + ret[1].strip()
	return ret[1].strip()
Example #26
0
# build model
model = {}

lines = []
for line in open(file_name, 'r'):
	line = line.strip()
	words = line.split(' ')
	upper_words = []
	for word in words:
		upper_word = word.upper()
		# filter out non alpha but leave apostrophes
		for char in upper_word:
			if not char.isalpha() and char is not "'":
				upper_word = upper_word.replace(char, "")
		upper_words.append(upper_word)
	lines.append(upper_words)

model = markov.generate_model_from_token_lists(lines, n)

# save pickle
with open('yelp_markov.pickle', 'wb') as handle:
	pickle.dump(model, handle)

print random.choice(model.keys())

# print model
print markov.generate(model, n, max_iterations=3)

def nextword(word):
	return markov.generate(model, n, seed=word, max_iterations=1)
Example #27
0
while c < files:
	names.append(binascii.hexlify(os.urandom(2)).decode('ascii') + '.html')
	url = '/trumpify/trump/' + names[c]
	pagelist.append('randomlinks[' + str(c) + ']=\"' + url + '\" \n')
	i = open('site/template.html', 'r')
	itext = i.read()
	i.close()	
	c += 1
for page in pagelist:
	pagestr += page
i = open('site/index.html', 'w')
itext = itext.replace(jsinsert, pagestr)
i.write(itext)
i.close()
for name in names:
	seed = random.randint(0, 1)
	print(name + '\n')
	f = open('site/index.html', 'r')
	ftext = f.read()
	f.close()
	if seed == 0:
		cmd = subprocess.check_output(['sh','dada-execute','trumpspeech.pb'], universal_newlines=True).replace('\n', '')
	else:
		cmd = markov.generate('trumpspeech.txt', 400)
	time.sleep(1)
	newtext = ftext.replace(insertspot, startinsert + cmd + endinsert)
	f = open('site/trump/' + name, 'w')
	f.write(newtext)
	f.close()
print(pagestr)
Example #28
0
def handle_privmsg(sock,line,state_change,state_file,lines_since_write,lines_since_sort_chk):
	global gen_cmd
	
	#get some information (user, nick, host, etc.)
	success,info,line=get_token(line,' ')
	info=info.lstrip(':')
	success,nick,info=get_token(info,'!')
	success,realname,info=get_token(info,'@')
	success,hostmask,info=get_token(info,' ')
	success,privmsg_cmd,line=get_token(line,' ')
	success,channel,line=get_token(line,' ')
	
	if(line.startswith(':')):
		line=line[1:]
	
	#debug
	log_line('['+channel+'] <'+nick+'> '+line)
	
	#ignore blacklisted users,
	#but throw some output on the console so we know that's happening
	if nick in ignored_users:
		print('Warn: ignored line from '+nick+' because their nick is blacklisted (ignored)')
		return (lines_since_write,lines_since_sort_chk)
	
	#strip trailing whitespace because users expect that to not matter
	line=line.rstrip(' ').rstrip("\t")
	
	#and now because whitespace is gone it's possible to have a blank line
	#so ignore blank lines
	if(line==''):
		return (lines_since_write,lines_since_sort_chk)
	
	#if they PM'd us, then PM 'em right back
	#that'll show 'em
	is_pm=False
	if(channel==bot_nick):
		is_pm=True
		channel=nick
	
	success,cmd,line_post_cmd=get_token(line,' ')
	
	dbg_str=''
	
	#at ente's request; allow users in "debug" channels to read the bot's mind
#	net_dbg=False
	net_dbg=True
	
	cmd_esc='!'
	
	#support question/answer style markov chain-ing stuff
	if(cmd.startswith(bot_nick)):
		output,dbg_str=markov.gen_from_str(state_change,use_pg,db_login,irc_str_map(line_post_cmd),random.randint(0,1)+1,retries_left=3)
		
		#if it didn't have that word as a starting state,
		#then just go random (fall back functionality)
		if(output==''):
			output,dbg_str=markov.generate(state_change,use_pg=use_pg,db_login=db_login,back_gen=False)
		
		#prevent generating commands directed towards other bots,
		#if configured to do that
		if(not gen_cmd):
			if(output.startswith('!')):
				output='\\'+output
		
		dbg_str='[dbg] generated from line \"'+line_post_cmd+'\"'+"\n"+dbg_str
		
		py3queueln(sock,'PRIVMSG '+channel+' :'+output,1)
		
		#because people often talk to the bot in complete phrases,
		#go ahead and include these lines in the learning set
		lines_since_write,lines_since_sort_chk=learn_from(line,state_change,state_file,lines_since_write,lines_since_sort_chk)
		
		dbg_output(sock,dbg_str)
		
		return (lines_since_write,lines_since_sort_chk)
		
	#if this was a command for the bot
	cmd_handled,cmd_dbg_str=handle_bot_cmd(sock,cmd_esc,cmd,line_post_cmd,channel,nick,is_pm,state_change,use_pg,db_login)
	if(cmd_handled):
		#then it's handled and we're done
		
		#debug if the command gave us a debug string
		dbg_str=cmd_dbg_str
	#if it wasn't a command, then add this to the markov chain state and update the file on disk
	else:
		#if this was a pm then let the user know how to get help if they want it
		if(is_pm):
			py3queueln(sock,'PRIVMSG '+channel+' :learning... (use '+cmd_esc+'help to get help, or '+cmd_esc+'wut to generate text)',3)
		
		lines_since_write,lines_since_sort_chk=learn_from(line,state_change,state_file,lines_since_write,lines_since_sort_chk)
	
	#if we're debugging over the network, then output to the debug channels
	if(net_dbg):
		dbg_output(sock,dbg_str)
	
	return (lines_since_write,lines_since_sort_chk)
Example #29
0
 def test_generate(self):
     corpus = parse("This is some text.")
     generated = generate(corpus)
     self.assertEqual("This is some text.", generated)
Example #30
0
import sys
import markov

text = sys.stdin.read()
model = markov.build_model(text.split(), 4)
generated = markov.generate(model, 4)
print ' '.join(generated)
Example #31
0
def nextword(word):
	return markov.generate(model, 3, seed=word, max_iterations=1)
Example #32
0
import markov

ngram = 3

text = open("./prince.txt").read()
model = markov.build_model(text, ngram)

print ''.join(markov.generate(model, ngram, None, 500))
Example #33
0
 def test_generate(self):
     model = {('a', 'b'): ['c'], ('b', 'c'): ['d'], ('c', 'd'): [None]}
     self.assertEqual(['a', 'b', 'c', 'd'],
                      markov.generate(model, 2, ('a', 'b')))
Example #34
0
import sys
import markov

text = sys.stdin.read()
model = markov.build_model(text.split(), 3)
generated = markov.generate(model, 3)
print ' '.join(generated)
Example #35
0
def nextword(word):
	return markov.generate(model, n, seed=word, max_iterations=1)
Example #36
0
def nextword(word):
	return markov.generate(model, 3, seed=word, max_iterations=1)


def writesong(songlength, first):
	song = [first]
	for i in range(songlength):
		song += nextword(str(song[-1]))
	return song

# choose a random song length from list of song lengths in corpus
lengthofsong = random.choice([len(x) for x in corpus if len(x) > 10])
print "Song length will be {}".format(lengthofsong)


firstnote = markov.generate(model, n, max_iterations=3)[0]
# print "first note: {}".format(firstnote)

print "Here is the song in abc format:"
song = writesong(lengthofsong, firstnote)
dob = datetime.datetime.now().strftime('%H:%M')
print song

# make song file
songname = "my_songs/markov_song_{}.abc".format(dob)
print "\n\nYou can find the song in {}".format(songname)
lastpart = lengthofsong - lengthofsong%4

# hack to include dictionary at the beginning of every abc file
# will add a more sophisticated way to generate the values in the future
title = "Markov Song {}".format(dob)
import clean_chat
import markov
import reportlab_markov
import sentiment_analysis

# remove empty lines
chat = clean_chat.remove_white_lines('chat.txt')

# replace names
chat = clean_chat.replace_names(chat.name)

# generate chat WORKS WELL
generated = markov.generate(chat.name)

# make times chronological again WORKS WELL
timestamps, chronological = clean_chat.make_time_chronological(
    'chat.txt', generated.name)

# get sentiments, store in file, prepare everything to be able to insert sentiment pictures
sentiment_analysis.compute_sentiment(chronological.name)
sentiment_senders = sentiment_analysis.process_sentiment(chronological.name)

# add titles
with_titles = clean_chat.add_titles_clean_timestamps(timestamps,
                                                     chronological.name)

# insert sentiments, needed for image input
final = sentiment_analysis.insert_sentiments(with_titles.name)

# prepare images
reportlab_markov.prepare_images()
Example #38
0
        chain = load_chain(type, c)
        chains[chain_key] = chain

        chain_z_unique = np.zeros((0, 100, 1, 1))
        chain_z = chain.z_vals
        print(chain_z[0:1].shape)
        chain_z_unique = np.append(chain_z_unique, chain_z[0:1], axis=0)


        for i in range(1, len(chain)):
            if not np.array_equal(chain_z[i],chain_z_unique[-1]):
                chain_z_unique = np.append(chain_z_unique, chain_z[i:i+1], axis=0)
        
        total += chain_z.shape[0]
        accepted += chain_z_unique.shape[0]

        chain_z_avg = np.average(chain_z_unique, axis=0)
        chain_z_avg = np.expand_dims(chain_z_avg, axis=0)
        # print(chain_z_unique.shape)
        # print(chain_z_avg.shape)
        chain_avg = np.average(chain_z, axis=0)
        chain_avg = np.expand_dims(chain_avg, axis=0)

        img = markov.generate(chain_avg)

        image_grid[(64 * t):64 * (t+1), (64 * c):64 * (c+1)] = img

# markov.save_image(image_grid, 'image_grid_1.png')
print(accepted)
print(total)
print(accepted/total)
Example #39
0
def handle_bot_cmd(sock, cmd_esc, cmd, line_post_cmd, channel, nick, is_pm,
                   state_change, use_pg, db_login):
    global gen_cmd
    global unit_conv_list
    handled = False

    dbg_str = ''

    #check if this was a bot command
    if ((cmd == (cmd_esc + 'wut')) or (cmd == cmd_esc)):
        output = ''
        if (line_post_cmd != ''):
            output, dbg_str = markov.gen_from_str(state_change,
                                                  use_pg,
                                                  db_login,
                                                  irc_str_map(line_post_cmd),
                                                  random.randint(0, 1) + 1,
                                                  retries_left=3)
        if (output == ''):
            output, dbg_str = markov.generate(state_change,
                                              use_pg=use_pg,
                                              db_login=db_login,
                                              back_gen=False)

        #prevent generating commands directed towards other bots,
        #if configured to do that
        if (not gen_cmd):
            if (output.startswith('!')):
                output = '\\' + output

        py3queueln(sock, 'PRIVMSG ' + channel + ' :' + output, 1)
        dbg_str = '[dbg] generated from line \"' + line_post_cmd + '\"' + "\n" + dbg_str
        handled = True
    elif (cmd == (cmd_esc + 'help')):
        if (is_pm):
            py3queueln(
                sock,
                'PRIVMSG ' + channel + ' :This is a simple markov chain bot',
                3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'wut                       -> generate text based on markov chains',
                3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'help                      -> displays this command list', 3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'shup [min nice lvl]       -> clears low-priority messages from sending queue (authorized users can clear higher priority messages)',
                3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'part                      -> parts current channel (you can invite to me get back)',
                3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'calc <expression>         -> simple calculator; supports +,-,*,/,and ^; uses rpn internally',
                3)
            #			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'wiki <topic>              -> [EXPERIMENTAL] grabs first paragraph from wikipedia',3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'source                    -> links the github url for this bot\'s source code',
                3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'omdb <movie name>         -> grabs movie information from the open movie database',
                3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'splchk <word> [edit dist] -> checks given word against a dictionary and suggests fixes',
                3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'dieroll [sides]           -> generates random number in range [1,sides]',
                3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'time [utc offset tz]      -> tells current UTC time, or if a timezone is given, current time in that timezone',
                3)
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :' + cmd_esc +
                'timecalc <%R> <tz1> <tz2> -> tells what the given time (%R == hours:minutes on a 24-hour clock) at the first utc-offset timezone will be at the second utc-offset timezone',
                3)
            for conversion in unit_conv_list:
                help_str = 'PRIVMSG ' + channel + ' :' + cmd_esc + conversion.from_abbr + '->' + conversion.to_abbr + ' <value>'
                while (len(help_str) <
                       len('PRIVMSG ' + channel + ' :' + cmd_esc +
                           'XXXXXXXXXXXXXXXXXXXXXXXXXX')):
                    help_str += ' '
                help_str += '-> converts ' + conversion.dimension + ' from ' + conversion.from_disp + ' to ' + conversion.to_disp
                py3queueln(sock, help_str, 3)

        else:
            py3queueln(
                sock, 'PRIVMSG ' + channel +
                ' :This is a simple markov chain bot; use ' + cmd_esc +
                'wut or address me by name to generate text; PM !help for more detailed help',
                3)

        handled = True
    #clear (low-priority) messages from the output queue
    elif ((cmd == (cmd_esc + 'shup')) or (cmd == (cmd_esc + 'shoo'))):
        #the minimum nice value to clear messages from the output queue
        nice_lvl = 4
        try:
            nice_lvl = int(line_post_cmd.strip(' '))
        except ValueError:
            nice_lvl = 4

        #authorized users can suppress high-priority output
        if (nick in authed_users):
            nice_lvl = max(nice_lvl, 1)
        #unauthorized users can only suppress low-priority output
        else:
            nice_lvl = max(nice_lvl, 4)

        py3clearq(nice_lvl)
        py3queueln(
            sock, 'PRIVMSG ' + channel +
            ' :Info: outgoing message queue cleared of low-priority messages (nice_lvl='
            + str(nice_lvl) + ')', 1)
        handled = True
    elif (cmd == (cmd_esc + 'part')):
        if (not is_pm):
            py3queueln(
                sock, 'PART ' + channel +
                ' :Goodbye for now (you can invite me back any time)', 1)
        else:
            py3queueln(
                sock, 'PRIVMSG ' + channel +
                ' :part from where, asshole? this is a PM!', 1)
        handled = True
    #conversions are their own function now
    elif (handle_conversion(sock, cmd_esc, cmd, line_post_cmd, channel)):
        handled = True
    elif (cmd == (cmd_esc + 'calc')):
        try:
            err_msgs, result = rpn.rpn_eval(rpn.rpn_translate(line_post_cmd))
            if (len(result) == 1):
                py3queueln(sock, 'PRIVMSG ' + channel + ' :' + str(result[0]),
                           1)
            else:
                py3queueln(
                    sock, 'PRIVMSG ' + channel +
                    ' :Warn: An error occurred during evaluation; simplified RPN expression is '
                    + str(result), 1)
                for err_idx in range(0, len(err_msgs)):
                    py3queueln(
                        sock, 'PRIVMSG ' + channel + ' :Err #' + str(err_idx) +
                        ': ' + str(err_msgs[err_idx]), 3)
        #TODO: figure out why divide-by-0 is triggering a ValueError here, it should be handled elsewhere
        except ValueError:
            py3queueln(
                sock, 'PRIVMSG ' + channel +
                ' :Err: Could not parse expression (ValueError) (divide by zero?)',
                1)
        except IndexError:
            py3queueln(
                sock, 'PRIVMSG ' + channel +
                ' :Err: Could not parse expression (IndexError) (mismatched parens?)',
                1)
        except:
            py3queueln(
                sock, 'PRIVMSG ' + channel +
                ' :Err: Unhandled exception in rpn parsing; tell neutrak the command you used to get this and he\'ll look into it',
                1)
        handled = True
    elif (cmd == (cmd_esc + 'wiki')):
        #disabled because we have another bot to do this now
        return (True, dbg_str)

        #TODO: handle more specific errors; this is super nasty but should keep the bot from crashing
        try:
            wiki_title = line_post_cmd.replace(' ', '_')
            wiki_url = 'https://en.wikipedia.org/wiki/' + wiki_title
            response = http_cat.get_page(wiki_url)

            response_type = response[0].split("\n")[0].rstrip("\r")

            #if we get a 301 moved and the page requested was lower case then
            #before giving up try it as upper-case
            if ((response_type.find('301 Moved') >= 0)
                    and (line_post_cmd[0] == line_post_cmd[0].lower())):
                return handle_bot_cmd(sock, cmd_esc, cmd,
                                      (line_post_cmd[0].upper()) +
                                      (line_post_cmd[1:]), channel, nick,
                                      is_pm, state_change, use_pg, db_login)

            if (response_type.find('200 OK') < 0):
                py3queueln(
                    sock,
                    'PRIVMSG ' + channel + ' :Err: \"' + response_type + '\"',
                    1)
            else:
                wiki_text = response[1]
                if (wiki_text == ''):
                    py3queueln(
                        sock, 'PRIVMSG ' + channel +
                        ' :Err: wiki got null page text', 1)
                else:
                    #get the first paragraph and throw out nested html tags
                    wiki_text = http_cat.html_parse_first(
                        wiki_text, '<p>', '</p>')
                    max_p_len = 768
                    wiki_text = wiki_text[0:max_p_len]
                    line_len = 300
                    while (wiki_text != ''):
                        line_delimiter = '. '
                        prd_idx = wiki_text.find(line_delimiter)
                        if (prd_idx >= 0):
                            prd_idx += len(line_delimiter)
                            py3queueln(
                                sock, 'PRIVMSG ' + channel + ' :' +
                                wiki_text[0:prd_idx], 1)
                            wiki_text = wiki_text[prd_idx:]
                        else:
                            py3queueln(
                                sock, 'PRIVMSG ' + channel + ' :' +
                                wiki_text[0:line_len], 1)
                            wiki_text = wiki_text[line_len:]
                py3queueln(sock, 'PRIVMSG ' + channel + ' :' + wiki_url,
                           1)  #link the wiki page itself?
        except:
            py3queueln(
                sock,
                'PRIVMSG ' + channel + ' :Err: wiki failed to get page text',
                1)
        handled = True
    elif (cmd == (cmd_esc + 'source')):
        py3queueln(
            sock,
            'PRIVMSG ' + channel + ' :bot source code: ' + SOURCE_CODE_URL, 1)
        handled = True
    elif ((cmd == (cmd_esc + 'omdb')) or (cmd == (cmd_esc + 'imdb'))):
        handle_omdb(sock, cmd_esc, cmd, line_post_cmd, channel, is_pm)
        handled = True
    elif ((cmd == (cmd_esc + 'splchk')) or (cmd == (cmd_esc + 'spellcheck'))
          or (cmd == (cmd_esc + 'sp')) or (cmd == (cmd_esc + 'spell'))):
        handle_spellcheck(sock, cmd_esc, cmd, line_post_cmd, channel, is_pm)
        handled = True
    elif (cmd == (cmd_esc + 'dieroll')):
        sides = 6
        if (line_post_cmd != ''):
            try:
                sides = int(line_post_cmd)
            except ValueError:
                py3queueln(
                    sock, 'PRIVMSG ' + channel +
                    ' :Warn: Invalid number of sides, assuming d-6', 1)
                sides = 6
        if (sides < 1):
            py3queueln(
                sock, 'PRIVMSG ' + channel +
                ' :Warn: Number of sides less than 1, setting number of sides 1 (this will return 1)',
                1)
            sides = 1

        value = random.randint(1, sides)
        py3queueln(
            sock, 'PRIVMSG ' + channel + ' :Rolled a ' + str(value) +
            ' with a d' + str(sides), 1)

        handled = True
    elif (cmd == (cmd_esc + 'time')):
        tz = 0
        if (line_post_cmd != ''):
            try:
                tz = float(line_post_cmd)
            except ValueError:
                py3queueln(
                    sock, 'PRIVMSG ' + channel + ' :Err: ' + line_post_cmd +
                    ' is not a valid UTC-offset timezone; will give UTC time instead...',
                    1)
        if (abs(tz) > 24):
            py3queueln(
                sock, 'PRIVMSG ' + channel +
                ' :Err: timezone offsets from utc cannot be outside the range [-24,24] because that makes no sense; giving UTC time...'
            )
            tz = 0
        current_time = time.asctime(time.gmtime(time.time() + (tz * 60 * 60)))
        py3queueln(
            sock, 'PRIVMSG ' + channel + ' :Current time is ' + current_time +
            ' (UTC ' + ('+' + str(tz) if tz >= 0 else str(tz)) + ')')
        handled = True
    elif (cmd == (cmd_esc + 'timecalc')):
        handle_timecalc(sock, cmd_esc, cmd, line_post_cmd, channel, is_pm)
        handled = True
    elif (cmd.startswith(cmd_esc)):
        try:
            #alternate conversion syntax
            #check if the "command" is a valid floating point number
            conv_arg = float(cmd[len(cmd_esc):])

            #the line after the "command" is the command checked against the conversion list
            #some arguments here are a little weird because they're being transposed
            found_conversion = False
            for conversion in unit_conv_list:
                #we found the requested conversion, so do the thing and output the result
                #note that "X to Y" gets translated here as "X->Y"
                if (conversion.chk_cmd(
                        cmd_esc,
                        cmd_esc + line_post_cmd.replace(' to ', '->'))):
                    conversion.output_conv(sock, channel, conv_arg)
                    found_conversion = True

            #this was a valid number, but something went wrong during conversion
            if (not found_conversion):
                py3queueln(
                    sock, 'PRIVMSG ' + channel +
                    ' :Err: Conversion not found ' + line_post_cmd, 1)

            #in any case if we got a number don't handle this line any more
            handled = True
        #the "command" wasn't a valid floating point number,
        #so output an error for PM, or just do nothing in a channel
        except ValueError:
            if (is_pm):
                py3queueln(
                    sock,
                    'PRIVMSG ' + channel + ' :Warn: Invalid command: \"' +
                    cmd + '\"; see ' + cmd_esc + 'help for help', 1)

        #this prevents the bot from learning from unrecognized ! commands
        #(which are usually meant for another bot)


#		handled=True
#this was added at the request of NuclearWaffle, in an attempt, and I'm quoting here
#to "f**k with Proview"
#	elif((len(cmd)>1) and odd_quest(cmd)):
#		output,dbg_str=markov.generate(state_change,use_pg=use_pg,db_login=db_login,back_gen=False)
#
#		#prevent generating commands directed towards other bots,
#		#if configured to do that
#		if(not gen_cmd):
#			if(output.startswith('!')):
#				output='\\'+output
#
#		py3queueln(sock,'PRIVMSG '+channel+' :'+output,1)
#		handled=True

    return (handled, dbg_str)
Example #40
0
def handle_privmsg(sock, line, state_change, state_file, lines_since_write,
                   lines_since_sort_chk):
    global gen_cmd

    #get some information (user, nick, host, etc.)
    success, info, line = get_token(line, ' ')
    info = info.lstrip(':')
    success, nick, info = get_token(info, '!')
    success, realname, info = get_token(info, '@')
    success, hostmask, info = get_token(info, ' ')
    success, privmsg_cmd, line = get_token(line, ' ')
    success, channel, line = get_token(line, ' ')

    if (line.startswith(':')):
        line = line[1:]

    #debug
    log_line('[' + channel + '] <' + nick + '> ' + line)

    #ignore blacklisted users,
    #but throw some output on the console so we know that's happening
    if nick in ignored_users:
        print('Warn: ignored line from ' + nick +
              ' because their nick is blacklisted (ignored)')
        return (lines_since_write, lines_since_sort_chk)

    #strip trailing whitespace because users expect that to not matter
    line = line.rstrip(' ').rstrip("\t")

    #and now because whitespace is gone it's possible to have a blank line
    #so ignore blank lines
    if (line == ''):
        return (lines_since_write, lines_since_sort_chk)

    #if they PM'd us, then PM 'em right back
    #that'll show 'em
    is_pm = False
    if (channel == bot_nick):
        is_pm = True
        channel = nick

    success, cmd, line_post_cmd = get_token(line, ' ')

    dbg_str = ''

    #at ente's request; allow users in "debug" channels to read the bot's mind
    #	net_dbg=False
    net_dbg = True

    cmd_esc = '!'

    #support question/answer style markov chain-ing stuff
    if (cmd.startswith(bot_nick)):
        output, dbg_str = markov.gen_from_str(state_change,
                                              use_pg,
                                              db_login,
                                              irc_str_map(line_post_cmd),
                                              random.randint(0, 1) + 1,
                                              retries_left=3)

        #if it didn't have that word as a starting state,
        #then just go random (fall back functionality)
        if (output == ''):
            output, dbg_str = markov.generate(state_change,
                                              use_pg=use_pg,
                                              db_login=db_login,
                                              back_gen=False)

        #prevent generating commands directed towards other bots,
        #if configured to do that
        if (not gen_cmd):
            if (output.startswith('!')):
                output = '\\' + output

        dbg_str = '[dbg] generated from line \"' + line_post_cmd + '\"' + "\n" + dbg_str

        py3queueln(sock, 'PRIVMSG ' + channel + ' :' + output, 1)

        #because people often talk to the bot in complete phrases,
        #go ahead and include these lines in the learning set
        lines_since_write, lines_since_sort_chk = learn_from(
            line, state_change, state_file, lines_since_write,
            lines_since_sort_chk)

        dbg_output(sock, dbg_str)

        return (lines_since_write, lines_since_sort_chk)

    #if this was a command for the bot
    cmd_handled, cmd_dbg_str = handle_bot_cmd(sock, cmd_esc, cmd,
                                              line_post_cmd, channel, nick,
                                              is_pm, state_change, use_pg,
                                              db_login)
    if (cmd_handled):
        #then it's handled and we're done

        #debug if the command gave us a debug string
        dbg_str = cmd_dbg_str
    #if it wasn't a command, then add this to the markov chain state and update the file on disk
    else:
        #if this was a pm then let the user know how to get help if they want it
        if (is_pm):
            py3queueln(
                sock, 'PRIVMSG ' + channel + ' :learning... (use ' + cmd_esc +
                'help to get help, or ' + cmd_esc + 'wut to generate text)', 3)

        lines_since_write, lines_since_sort_chk = learn_from(
            line, state_change, state_file, lines_since_write,
            lines_since_sort_chk)

    #if we're debugging over the network, then output to the debug channels
    if (net_dbg):
        dbg_output(sock, dbg_str)

    return (lines_since_write, lines_since_sort_chk)
Example #41
0
# --------

supreme_conversations = open("data/supreme.conversations.txt")
movie_quotes = open("data/moviequotes.memorable_quotes.txt")

data = ""

for line in movie_quotes:
    line = line.strip()
    line = line.split('+++$+++')
    data+=line[-1]

words = data.split()
words[0:2]
model = markov.build_model(words, 2)
" ".join(markov.generate(model, 2, ['the', 'police']))

# get the current text
msword_text = asrun(current_text).split('\r')
decoded_text = []
for line in msword_text:
    decoded_text.append(line.decode('utf-8').lower())


for i in range(2):
    asrun(linebreak()) # add a new line and then begin writting

for line in decoded_text:
    if len(line) > 2:
        seed = line.split()[0:2]
        generated = markov.generate(model, 2, seed)
Example #42
0
 def test_generate_number_of_sentences(self):
     corpus = parse("This is some text. This is some more text. And some more. Have some more text.")
     for x in range(1, 5):
         generated = generate(corpus, x)
         number_of_sentences = generated.count(".")
         self.assertEqual(x, number_of_sentences)
Example #43
0
def handle_bot_cmd(sock,cmd_esc,cmd,line_post_cmd,channel,nick,is_pm,state_change,use_pg,db_login):
	global gen_cmd
	global unit_conv_list
	handled=False
	
	dbg_str=''
	
	#check if this was a bot command
	if((cmd==(cmd_esc+'wut')) or (cmd==cmd_esc)):
		output=''
		if(line_post_cmd!=''):
			output,dbg_str=markov.gen_from_str(state_change,use_pg,db_login,irc_str_map(line_post_cmd),random.randint(0,1)+1,retries_left=3)
		if(output==''):
			output,dbg_str=markov.generate(state_change,use_pg=use_pg,db_login=db_login,back_gen=False)
		
		#prevent generating commands directed towards other bots,
		#if configured to do that
		if(not gen_cmd):
			if(output.startswith('!')):
				output='\\'+output
		
		py3queueln(sock,'PRIVMSG '+channel+' :'+output,1)
		dbg_str='[dbg] generated from line \"'+line_post_cmd+'\"'+"\n"+dbg_str
		handled=True
	elif(cmd==(cmd_esc+'help')):
		if(is_pm):
			py3queueln(sock,'PRIVMSG '+channel+' :This is a simple markov chain bot',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'wut                       -> generate text based on markov chains',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'help                      -> displays this command list',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'shup [min nice lvl]       -> clears low-priority messages from sending queue (authorized users can clear higher priority messages)',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'part                      -> parts current channel (you can invite to me get back)',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'calc <expression>         -> simple calculator; supports +,-,*,/,and ^; uses rpn internally',3)
#			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'wiki <topic>              -> [EXPERIMENTAL] grabs first paragraph from wikipedia',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'source                    -> links the github url for this bot\'s source code',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'omdb <movie name>         -> grabs movie information from the open movie database',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'splchk <word> [edit dist] -> checks given word against a dictionary and suggests fixes',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'dieroll [sides]           -> generates random number in range [1,sides]',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'time [utc offset tz]      -> tells current UTC time, or if a timezone is given, current time in that timezone',3)
			py3queueln(sock,'PRIVMSG '+channel+' :'+cmd_esc+'timecalc <%R> <tz1> <tz2> -> tells what the given time (%R == hours:minutes on a 24-hour clock) at the first utc-offset timezone will be at the second utc-offset timezone',3)
			for conversion in unit_conv_list:
				help_str='PRIVMSG '+channel+' :'+cmd_esc+conversion.from_abbr+'->'+conversion.to_abbr+' <value>'
				while(len(help_str)<len('PRIVMSG '+channel+' :'+cmd_esc+'XXXXXXXXXXXXXXXXXXXXXXXXXX')):
					help_str+=' '
				help_str+='-> converts '+conversion.dimension+' from '+conversion.from_disp+' to '+conversion.to_disp
				py3queueln(sock,help_str,3)

		else:
			py3queueln(sock,'PRIVMSG '+channel+' :This is a simple markov chain bot; use '+cmd_esc+'wut or address me by name to generate text; PM !help for more detailed help',3)
			
		handled=True
	#clear (low-priority) messages from the output queue
	elif((cmd==(cmd_esc+'shup')) or (cmd==(cmd_esc+'shoo'))):
		#the minimum nice value to clear messages from the output queue
		nice_lvl=4
		try:
			nice_lvl=int(line_post_cmd.strip(' '))
		except ValueError:
			nice_lvl=4
		
		#authorized users can suppress high-priority output
		if(nick in authed_users):
			nice_lvl=max(nice_lvl,1)
		#unauthorized users can only suppress low-priority output
		else:
			nice_lvl=max(nice_lvl,4)
		
		py3clearq(nice_lvl)
		py3queueln(sock,'PRIVMSG '+channel+' :Info: outgoing message queue cleared of low-priority messages (nice_lvl='+str(nice_lvl)+')',1)
		handled=True
	elif(cmd==(cmd_esc+'part')):
		if(not is_pm):
			py3queueln(sock,'PART '+channel+' :Goodbye for now (you can invite me back any time)',1)
		else:
			py3queueln(sock,'PRIVMSG '+channel+' :part from where, asshole? this is a PM!',1)
		handled=True
	#conversions are their own function now
	elif(handle_conversion(sock,cmd_esc,cmd,line_post_cmd,channel)):
		handled=True
	elif(cmd==(cmd_esc+'calc')):
		try:
			err_msgs,result=rpn.rpn_eval(rpn.rpn_translate(line_post_cmd))
			if(len(result)==1):
				py3queueln(sock,'PRIVMSG '+channel+' :'+str(result[0]),1)
			else:
				py3queueln(sock,'PRIVMSG '+channel+' :Warn: An error occurred during evaluation; simplified RPN expression is '+str(result),1)
				for err_idx in range(0,len(err_msgs)):
					py3queueln(sock,'PRIVMSG '+channel+' :Err #'+str(err_idx)+': '+str(err_msgs[err_idx]),3)
		#TODO: figure out why divide-by-0 is triggering a ValueError here, it should be handled elsewhere
		except ValueError:
			py3queueln(sock,'PRIVMSG '+channel+' :Err: Could not parse expression (ValueError) (divide by zero?)',1)
		except IndexError:
			py3queueln(sock,'PRIVMSG '+channel+' :Err: Could not parse expression (IndexError) (mismatched parens?)',1)
		except:
			py3queueln(sock,'PRIVMSG '+channel+' :Err: Unhandled exception in rpn parsing; tell neutrak the command you used to get this and he\'ll look into it',1)
		handled=True
	elif(cmd==(cmd_esc+'wiki')):
		#disabled because we have another bot to do this now
		return (True,dbg_str)
		
		#TODO: handle more specific errors; this is super nasty but should keep the bot from crashing
		try:
			wiki_title=line_post_cmd.replace(' ','_')
			wiki_url='https://en.wikipedia.org/wiki/'+wiki_title
			response=http_cat.get_page(wiki_url)
			
			response_type=response[0].split("\n")[0].rstrip("\r")
			
			#if we get a 301 moved and the page requested was lower case then
			#before giving up try it as upper-case
			if((response_type.find('301 Moved')>=0) and (line_post_cmd[0]==line_post_cmd[0].lower())):
				return handle_bot_cmd(sock,cmd_esc,
					cmd,
					(line_post_cmd[0].upper())+(line_post_cmd[1:]),
					channel,
					nick,is_pm,state_change,use_pg,db_login)
			
			if(response_type.find('200 OK')<0):
				py3queueln(sock,'PRIVMSG '+channel+' :Err: \"'+response_type+'\"',1)
			else:
				wiki_text=response[1]
				if(wiki_text==''):
					py3queueln(sock,'PRIVMSG '+channel+' :Err: wiki got null page text',1)
				else:
					#get the first paragraph and throw out nested html tags
					wiki_text=http_cat.html_parse_first(wiki_text,'<p>','</p>')
					max_p_len=768
					wiki_text=wiki_text[0:max_p_len]
					line_len=300
					while(wiki_text!=''):
						line_delimiter='. '
						prd_idx=wiki_text.find(line_delimiter)
						if(prd_idx>=0):
							prd_idx+=len(line_delimiter)
							py3queueln(sock,'PRIVMSG '+channel+' :'+wiki_text[0:prd_idx],1)
							wiki_text=wiki_text[prd_idx:]
						else:
							py3queueln(sock,'PRIVMSG '+channel+' :'+wiki_text[0:line_len],1)
							wiki_text=wiki_text[line_len:]
				py3queueln(sock,'PRIVMSG '+channel+' :'+wiki_url,1) #link the wiki page itself?
		except:
			py3queueln(sock,'PRIVMSG '+channel+' :Err: wiki failed to get page text',1)
		handled=True
	elif(cmd==(cmd_esc+'source')):
		py3queueln(sock,'PRIVMSG '+channel+' :bot source code: '+SOURCE_CODE_URL,1)
		handled=True
	elif((cmd==(cmd_esc+'omdb')) or (cmd==(cmd_esc+'imdb'))):
		handle_omdb(sock,cmd_esc,cmd,line_post_cmd,channel,is_pm)
		handled=True
	elif((cmd==(cmd_esc+'splchk')) or (cmd==(cmd_esc+'spellcheck')) or (cmd==(cmd_esc+'sp')) or (cmd==(cmd_esc+'spell'))):
		handle_spellcheck(sock,cmd_esc,cmd,line_post_cmd,channel,is_pm)
		handled=True
	elif(cmd==(cmd_esc+'dieroll')):
		sides=6
		if(line_post_cmd!=''):
			try:
				sides=int(line_post_cmd)
			except ValueError:
				py3queueln(sock,'PRIVMSG '+channel+' :Warn: Invalid number of sides, assuming d-6',1)
				sides=6
		if(sides<1):
			py3queueln(sock,'PRIVMSG '+channel+' :Warn: Number of sides less than 1, setting number of sides 1 (this will return 1)',1)
			sides=1
		
		value=random.randint(1,sides)
		py3queueln(sock,'PRIVMSG '+channel+' :Rolled a '+str(value)+' with a d'+str(sides),1)
		
		handled=True
	elif(cmd==(cmd_esc+'time')):
		tz=0
		if(line_post_cmd!=''):
			try:
				tz=float(line_post_cmd)
			except ValueError:
				py3queueln(sock,'PRIVMSG '+channel+' :Err: '+line_post_cmd+' is not a valid UTC-offset timezone; will give UTC time instead...',1)
		if(abs(tz)>24):
			py3queueln(sock,'PRIVMSG '+channel+' :Err: timezone offsets from utc cannot be outside the range [-24,24] because that makes no sense; giving UTC time...')
			tz=0
		current_time=time.asctime(time.gmtime(time.time()+(tz*60*60)))
		py3queueln(sock,'PRIVMSG '+channel+' :Current time is '+current_time+' (UTC '+('+'+str(tz) if tz>=0 else str(tz))+')')
		handled=True
	elif(cmd==(cmd_esc+'timecalc')):
		handle_timecalc(sock,cmd_esc,cmd,line_post_cmd,channel,is_pm)
		handled=True
	elif(cmd.startswith(cmd_esc)):
		try:
			#alternate conversion syntax
			#check if the "command" is a valid floating point number
			conv_arg=float(cmd[len(cmd_esc):])
			
			#the line after the "command" is the command checked against the conversion list
			#some arguments here are a little weird because they're being transposed
			found_conversion=False
			for conversion in unit_conv_list:
				#we found the requested conversion, so do the thing and output the result
				#note that "X to Y" gets translated here as "X->Y"
				if(conversion.chk_cmd(cmd_esc,cmd_esc+line_post_cmd.replace(' to ','->'))):
					conversion.output_conv(sock,channel,conv_arg)
					found_conversion=True
			
			#this was a valid number, but something went wrong during conversion
			if(not found_conversion):
				py3queueln(sock,'PRIVMSG '+channel+' :Err: Conversion not found '+line_post_cmd,1)
			
			#in any case if we got a number don't handle this line any more
			handled=True
		#the "command" wasn't a valid floating point number,
		#so output an error for PM, or just do nothing in a channel
		except ValueError:
			if(is_pm):
				py3queueln(sock,'PRIVMSG '+channel+' :Warn: Invalid command: \"'+cmd+'\"; see '+cmd_esc+'help for help',1)
		
		#this prevents the bot from learning from unrecognized ! commands
		#(which are usually meant for another bot)
#		handled=True
	#this was added at the request of NuclearWaffle, in an attempt, and I'm quoting here
	#to "f**k with Proview"
#	elif((len(cmd)>1) and odd_quest(cmd)):
#		output,dbg_str=markov.generate(state_change,use_pg=use_pg,db_login=db_login,back_gen=False)
#		
#		#prevent generating commands directed towards other bots,
#		#if configured to do that
#		if(not gen_cmd):
#			if(output.startswith('!')):
#				output='\\'+output
#		
#		py3queueln(sock,'PRIVMSG '+channel+' :'+output,1)
#		handled=True
	
	return (handled,dbg_str)