Exemplo n.º 1
0
def watch_stream():
    twitter_creds = utils.twitter_creds()
    redis_queue = utils.get_rq_redis_conn()
    hashtag = redis_queue.get("hashtag")
    q = utils.get_rq()

    twitter_api = TwitterAPI(
        consumer_key=twitter_creds['consumer_key'].encode('ascii','ignore'),
        consumer_secret=twitter_creds['consumer_secret'].encode('ascii','ignore'),
        access_token_key=twitter_creds['access_token'].encode('ascii','ignore'),
        access_token_secret=twitter_creds['token_secret'].encode('ascii','ignore')
    )  #setup the twitter streaming connectors.

    watcher_logger.info("Waiting for tweets...")
    while True:
            try:
                for tweet in twitter_api.request('statuses/filter', {'track': hashtag}).get_iterator():  #for each one of thise
                    if hashtag != redis_queue.get("hashtag"):
                        watcher_logger.info("Hashtag changed from {}, breaking loop to restart with new hashtag".format(hashtag))
                        hashtag = redis_queue.get("hashtag")
                        break
                    #watcher_logger.debug("Tweet Received: {}".format(hashtag))  #Log it
                    redis_queue.incr("stats:tweets")  #Let Redis know we got another one.
                    watcher_logger.debug("received tweet with tag {}".format(hashtag))
                    try:
                        if tweet['entities']['media'][0]['type'] == 'photo': #Look for the photo.  If its not there, will throw a KeyError, caught below
                            if 'retweeted' not in tweet:
                                watcher_logger.info("Tweet was a RT - ignoring")
                                continue
                            watcher_logger.info("Dispatching tweet ({}) with URL {}".format(hashtag,tweet['entities']['media'][0]['media_url']))  # log it
                            q.enqueue(
                                get_image,
                                tweet['entities']['media'][0]['media_url'],
                                ttl=60,
                                result_ttl=60,
                                timeout=60
                            )  #add a job to the queue, calling get_image() with the image URL and a timeout of 60s
                    except KeyError as e:
                        watcher_logger.debug("Caught a key error for tweet, expected behavior, so ignoring: {}".format(e.message))
                    except Exception as e:
                        watcher_logger.critical("UNEXPECTED EXCEPTION: {}".format(e))
            except httplib.IncompleteRead as e:
                watcher_logger.warn("HTTP Exception {}".format(e))
            except ProtocolError as e:
                watcher_logger.warn("Protocol Exception {}".format(e))
Exemplo n.º 2
0
from rq.decorators import job #funtion decoration
import dweepy #see dweet.io
import logging, logging.config
import utils
import datetime

logging.config.dictConfig(utils.get_log_dict())

logger = logging.getLogger('vascodagama.dashboard')


redis_images = utils.get_images_redis_conn()
r = utils.get_rq_redis_conn()
q = utils.get_rq()

#Setup our redis and RQ connections.   see twitter_watch for more details.
configstuff = utils.configstuff()

@job("dashboard", connection=r, timeout=10, result_ttl=10)
def send_update(metric, value): #method for sending updates about metrics as needed.
    logger.debug("Sending update for {}: {}".format(metric, value))
    dweepy.dweet_for(configstuff['dweet_thing'], {metric: value})




def update_dashboard(): # the primary function.
    logger.info("updating")

Exemplo n.º 3
0
import logging, logging.config #logging
import os,json #OS functions
from cloudfoundry import CloudFoundryInterface #The CF interface written by Matt Cowger
import redis
import utils
import boto  # the library for interacting with AWS services
from config import Config #Easy config files

logging.config.dictConfig(utils.get_log_dict())
logger = logging.getLogger('vascodagama.scaler')



redis_images = utils.get_images_redis_conn()
redis_queue = utils.get_rq_redis_conn()
q = utils.get_rq()


def batch_gen(data, batch_size):
    for i in range(0, len(data), batch_size):
            yield data[i:i+batch_size]

def clear_app():
    s3_creds = utils.s3_creds()
    hashtag = redis_queue.get("hashtag")
    logger.info("Got request to reset. Will clear the db and bucket")
    logger.debug("flushing redis image db")
    redis_images.flushdb()
    logger.debug("flushing redis queue db")
    redis_queue.flushdb()