def _setup_queues(self): """ Setup a 2 queue redis connection for pushing and pulling work/results """ # TODO: Remove hard coding and graph from config self.processing_queue = hq.HotQueue("processor", serializer=json, host="smalls", port=8000, db=0) self.completed_queue = hq.HotQueue("completed", serializer=json, host="smalls", port=8000, db=0)
def hotqueue_test(num): print 'hotqueue_test:' MSG = { 'test': "haha", } queue = hotqueue.HotQueue("testqueue", host="127.0.0.1", port=6379, db=10) st = time.time() tic = lambda: 'at seconds %s' % (time.time()-st) for i in xrange(num): queue.put(MSG) ct = time.time() - st print 'hotqueue put %s times %s' % (num,tic()) st = time.time() tic = lambda: 'at seconds %s' % (time.time()-st) for i in xrange(num): queue.get() ct = time.time() - st print 'hotqueue get %s times %s' % (num,tic())
def test_get_hotqueue(self): queue = microqueue.MicroQueue('get', host='127.0.0.1', port=self.redis_test_port) queue2 = hotqueue.HotQueue('get', host='127.0.0.1', port=self.redis_test_port, serializer=json) queue2.put("test") result = queue.get() self.assertEqual(result, "test")
def __init__(self): self.config = self._get_config() handlers = [ url(r'/', IndexHandler, name='index'), url(r'/auth/google', GoogleAuthHandler, name='auth_google'), url(r'/logout', LogoutHandler, name='logout'), url(r'/home', HomeHandler, name='home'), url(r'/new', NewRoomHandler, name='new'), url(r'/rooms/(?P<id>\w+)', MessagesHandler, name='room'), url(r'/rooms/(?P<id>\w+)/messages', MessagesHandler, name='messages'), url(r'/rooms/(?P<id>\w+)/files', FilesHandler, name='files'), url(r'/rooms/(?P<id>\w+)/transcripts', TranscriptsHandler, name='transcripts'), url(r'/rooms/(?P<id>\w+)/transcripts/(?P<date>.+)', TranscriptsHandler, name='transcripts_by_date'), url(r'/rooms/(?P<id>\w+)/settings', SettingsHandler, name='settings'), url(r'/rooms/(?P<id>\w+)/say', NewMessageHandler, name='new_message'), url(r'/rooms/(?P<id>\w+)/upload', UploadHandler, name='upload'), url(r'/rooms/(?P<id>\w+)/leave', LeaveRoomHandler, name='leave_room'), url(r'/rooms/(?P<id>\w+)/delete', DeleteRoomHandler, name='delete_room'), url(r'/rooms/(?P<id>\w+)/invite', NewInvitationHandler, name='invite'), url(r'/rooms/(?P<id>\w+)/invitations', InvitationsHandler, name='invitations'), url(r'/rooms/(?P<id>\w+)/members', MembersHandler, name='members'), url(r'/i', InvitationHandler, name='invitation'), ] settings = dict( debug=self.config.debug, login_url='/auth/google', static_path=os.path.join(os.path.dirname(__file__), "static"), template_path=os.path.join(os.path.dirname(__file__), 'templates'), xsrf_cookies=True, cookie_secret=self.config.cookie_secret, ui_modules=uimodules, ) tornado.web.Application.__init__(self, handlers, **settings) self.connection = pymongo.Connection() self.db = self.connection[self.config.mongodb_database] # TODO Configurable settings for redis self.redis = redis.Redis(host='localhost', port=6379, db=0) self.upload_queue = hotqueue.HotQueue('upload', host='localhost', port=6379, db=0) self.mail_queue = hotqueue.HotQueue('mail', host='localhost', port=6379, db=0) # TODO create indexes here self.memcache = pylibmc.Client(self.config.memcache_servers, binary=True, behaviors={ "tcp_nodelay": True, "ketama": True }) self.pubnub = pubnub.Pubnub(self.config.pubnub_publish_key, self.config.pubnub_subscribe_key, self.config.pubnub_secret_key, self.config.pubnub_ssl_on) self.s3 = boto.s3.connection.S3Connection( self.config.aws_access_key_id, self.config.aws_secret_access_key)
import uuid import datetime import hotqueue # Sets the redis IP_ADDRESS from the service. redis_ip = os.environ.get('REDIS_IP') if not redis_ip: raise Exception() #redis_ip = '10.101.199.157' app = Flask(__name__) rd = redis.StrictRedis(host=redis_ip, port=6379, db=0) rd_jobs = redis.StrictRedis(host=redis_ip, port=6379, db=2) rd_imgs = redis.StrictRedis(host=redis_ip, port=6379, db=3) q = hotqueue.HotQueue('queue', host=redis_ip, port=6379, db=1) # Returns the data under the given key. def get_data(): with open("data.json", "r") as json_file: userdata = json.load(json_file) return userdata @app.route('/', methods=['GET']) def instructions(): return """ The route are as follows: curl <host>:<flask_port>/ # General info. curl <host>:<flask_port>/load # Adds data.json info to the database.
import smtplib from email.MIMEMultipart import MIMEMultipart from email.MIMEText import MIMEText from email.header import Header import hotqueue import yaml import tornado.options import tornado.web from tornado.options import define, options define("config_file", default="app_config.yml", help="app_config file") queue = hotqueue.HotQueue('mail', host='localhost', port=6379, db=0) class Model(dict): """Like tornado.web._O but does not whine for non-existent attributes""" def __getattr__(self, name): try: return self[name] except KeyError: return None def __setattr__(self, name, value): self[name] = value class Mailer(object):
import time import hotqueue import plotting # Usage: # $ python serial_python.py --server # which will setup a series of jobs in Redis via a HotQueue # area of space to investigate x1, x2, y1, y2 = -2.13, 0.77, -1.3, 1.3 # plot in 2D for a fast output or 3D for a slower but prettier output SHOW_IN_3D = False # setup input and output queues hq_in = hotqueue.HotQueue('mandelbrot_in') hq_out = hotqueue.HotQueue('mandelbrot_out') def calculate_z((q, maxiter)): """Pure python with complex datatype, iterating over list of q and z""" output = [0] * len(q) for i in range(len(q)): zi = 0 + 0j qi = q[i] if i % 1000 == 0: # print out some progress info since it is so slow... print "%0.2f%% complete" % (1.0 / len(q) * i * 100) output[i] = maxiter # force max value if we exceed maxiter
if k > 25: k = 25 subset = spatial_suppression(matches, bounds, k=k) # Push the points through overlaps = msg['overlaps'] oid = msg['oid'] pts = deepen(subset, fundamentals, overlaps, oid) return pts def finalize(data, queue): for k, v in data.items(): if isinstance(v, np.ndarray): data[k] = v.tolist() queue.put(data) if __name__ == '__main__': queue = hq.HotQueue('processor', serializer=json, host="smalls", port=8000, db=0) fqueue = hq.HotQueue('completed', serializer=json, host="smalls", port=8000, db=0) msg = queue.get() data = {} pts = main(msg) data['points'] = pts data['success'] = True data['callback'] = 'create_network_callback' finalize(data, fqueue)
import boto.s3.key import boto.s3.connection import hotqueue import Image import yaml import pymongo import tornado.options from tornado.options import define, options import tornado.web import pubnub_sync define("config_file", default="app_config.yml", help="app_config file") queue = hotqueue.HotQueue('upload', host='localhost', port=6379, db=0) class Model(dict): """Like tornado.web._O but does not whine for non-existent attributes""" def __getattr__(self, name): try: return self[name] except KeyError: return None def __setattr__(self, name, value): self[name] = value class Uploader(object): def __init__(self):