Пример #1
0
def store_to_redis(image_key):
    """
    Keep track of an image in redis
    """
    redis_images = utils.get_images_redis_conn()
    tx = redis_images.pipeline()  #Because we are goign to do a bunch of Redis ops quickly, setup a 'pipeline' (batch the ops)
    tx.hset(image_key.key, "filename",
            image_key.key)  #using the key's UUID as the name, set the hash value of 'filename' to the filename
    tx.hset(image_key.key, "url",
            image_key.generate_url(60 * 60 * 23))  #Get a URL, and store it.  That URL is good for 23 hrs
    tx.hset(image_key.key, "size",
            image_key.size)  #Store the size of the image.  Better to store it here where its cheap to check than in ViPR where its expensive.
    tx.expire(image_key.key, 60 * 60 * 23)  # Expire the entire redis key in 23 hours

    tx.execute()  #Run the transaction.
    worker_logger.info("Stored image to redis: {}".format(image_key))
Пример #2
0
from rq.decorators import job #funtion decoration
import dweepy #see dweet.io
import logging, logging.config
import utils
import datetime

logging.config.dictConfig(utils.get_log_dict())

logger = logging.getLogger('vascodagama.dashboard')


redis_images = utils.get_images_redis_conn()
r = utils.get_rq_redis_conn()
q = utils.get_rq()

#Setup our redis and RQ connections.   see twitter_watch for more details.
configstuff = utils.configstuff()

@job("dashboard", connection=r, timeout=10, result_ttl=10)
def send_update(metric, value): #method for sending updates about metrics as needed.
    logger.debug("Sending update for {}: {}".format(metric, value))
    dweepy.dweet_for(configstuff['dweet_thing'], {metric: value})




def update_dashboard(): # the primary function.
    logger.info("updating")

Пример #3
0
import redis  #and redis
from config import Config  # and the config files
import os  #and some OS functions
import json  #json functions

logging.config.dictConfig(utils.get_log_dict())
worker_logger = logging.getLogger("vascodagama.worker")
watcher_logger = logging.getLogger("vascodagama.watcher")

logger = logging.getLogger('vascodagama.images')

#setup flask
app = Flask(__name__)

#connect to redis
redis_images = utils.get_images_redis_conn()

#Setup a connection that will be used by RQ (each redis connection instance only talks to 1 DB)
redis_queue = utils.get_rq_redis_conn()


#gets a list of random URLS from refis.
def get_random_urls(count=100):
    pipe_keys = redis_images.pipeline()  #setup 2 batches
    pipe_urls = redis_images.pipeline()
    keys = []
    for i in range(0, count):  # get 'count' random keys
        pipe_keys.randomkey()

    for key in pipe_keys.execute():  #for each one of those random keys
        pipe_urls.hget(key, "url")  #get the URL property.