Esempio n. 1
0
def slow_sorted_lines(line_count=1000):
    states = models.session.query(models.Chain.state).all()
    full_word_list = [w for s in states for w in s[0]['word_list']]
    full_word_list.sort()
    for i in range(line_count):
        logger.info('slow gen: %s' % i)
        list_position = math.floor(
            (float(i) / line_count) * len(full_word_list)
        )
        current_slice = full_word_list[
            max(list_position - 50, 0): list_position + 50
        ]
        line = ' '.join(
            map(lambda i: random.choice(current_slice),  range(10))
        )
        time.sleep(.05)
        yield line
Esempio n. 2
0
def readurl(request):
    """
    Read the text at provided url
    (or via twitter api if you provide http://twitter.com/{username})
    And feed this into a new MarkovGenerator
    """
    url = request.match_info.get('url', "Anonymous")
    file_queue = request.app['file_queue']

    logger.info('file queue size: %s' % file_queue.qsize())
    logger.info('handling url: %s' % url)
    request.app['mk'].set_chain(markov.MarkovGenerator())
    try:
        file_queue.put_nowait(url)
        request.app['mk'].sources.append(url)
        success = True
    except:
        success = False
    logger.info('file queue size: %s' % file_queue.qsize())
    return web.json_response(dict(
        success=success,
        modelName='_'.join(url.split('/')[-2:])
    ))
Esempio n. 3
0
def dripfeeder(file_queue, bigram_queue):
    while True:
        item = yield from file_queue.get()
        logger.info('handling %s' % item)
        if 'twitter.com' in item:
            logger.info('handling %s as twitter' % item)
            bi_gen = bigrams_from_twitter(item)
        elif 'demo' in item:
            logger.info('handling %s as demo!!' % item)
            bi_gen = demonstration_bigrams()
        elif item.startswith('http'):
            logger.info('handling %s as text url' % item)
            bi_gen = bigrams_from_url(item)
        elif item.startswith('static'):
            logger.info('handling %s as local file' % item)
            bi_gen = bigrams_from_upload(item)
        for bigram in bi_gen:
            logger.info("putting %s" % str(bigram))
            yield from bigram_queue.put(bigram)
            yield from asyncio.sleep(0)
Esempio n. 4
0
def receiver(queue, mk):
    while True:
        item = yield from queue.get()
        logger.info("receiving %s" % str(item))
        mk.receive(item)
        yield from asyncio.sleep(0)
Esempio n. 5
0
def get_timeline(twitter_url, item_count=200):
    tokens = twitter_url.split('/')
    user = tokens[tokens.index('twitter.com') + 1]
    logger.info('fetching tweets for user %s' % user)
    return tweepy.Cursor(api.user_timeline, id=user).items(item_count)
Esempio n. 6
0
def draw(request):
    mk = request.app['mk']
    logger.info('DRAWING!')
    text = mk.draw()
    return web.json_response(text)