Beispiel #1
0
class AnmadQueues:
    """Queues used by anmad."""
    def __init__(self, prequeue, queue, info):
        self.prequeue = HotQueue(prequeue)
        self.queue = HotQueue(queue)
        self.info = HotQueue(info)
        self.update_job_lists()

    def update_job_lists(self):
        """Reset queue_message vars."""
        self.prequeue_list = []
        self.prequeue_list = read_queue(self.prequeue)
        self.queue_list = []
        self.queue_list = read_queue(self.queue)
        self.info_list = []
        trim_queue(self.info, 100)
        self.info_list = read_queue(self.info)
        self.info_list.reverse()

    def prequeue_job(self, job):
        """Adds an item to the pre-run queue."""
        self.prequeue.put([job])

    def queue_job(self, job):
        """Adds an item to the run queue."""
        self.queue.put(job)

    def clear(self):
        """Clears all job queues."""
        self.queue.clear()
        self.prequeue.clear()

    def clearinfo(self):
        """Clears info queue."""
        self.info.clear()
Beispiel #2
0
 def test_arguments(self):
     """Test that HotQueue.__init__ accepts arguments correctly, and that
     the Redis key is correctly formed.
     """
     kwargs = {
         'name': "testqueue",
         'serializer': DummySerializer,
         'host': "localhost",
         'port': 6379,
         'db': 0}
     # Instantiate the HotQueue instance:
     self.queue = HotQueue(**kwargs)
     # Ensure that the properties of the instance are as expected:
     self.assertEqual(self.queue.name, kwargs['name'])
     self.assertEqual(self.queue.key, "hotqueue:%s" % kwargs['name'])
     self.assertEqual(self.queue.serializer, kwargs['serializer'])
     self.assertEqual(self.queue._HotQueue__redis.host, kwargs['host'])
     self.assertEqual(self.queue._HotQueue__redis.host, kwargs['host'])
     self.assertEqual(self.queue._HotQueue__redis.port, kwargs['port'])
     self.assertEqual(self.queue._HotQueue__redis.db, kwargs['db'])
     # Instantiate a HotQueue instance with only the required args:
     self.queue = HotQueue(kwargs['name'])
     # Ensure that the properties of the instance are as expected:
     self.assertEqual(self.queue.name, kwargs['name'])
     self.assertEqual(self.queue.key, "hotqueue:%s" % kwargs['name'])
     self.assertTrue(self.queue.serializer is pickle) # Defaults to cPickle or
Beispiel #3
0
def add_to_queue():
    """
    we use HotQueue to store input info
    :return:
    """
    with open(r"C:\Users\29625\Desktop\Trans_xtoy_1.csv","r") as f:
        reader = csv.reader(f)
        redis_config = config["redis"]
        queue = HotQueue(name="amap_busline_route", host=redis_config["host"],
                         port=redis_config["port"], db=redis_config["db"])


        print "start adding to queue"
        for row in reader:
            num = row[0]
            origin_lng = row[1]
            origin_lng = origin_lng.strip(" ")
            origin_lat = row[2]
            origin_lat = origin_lat.strip(" ")
            destination_lng = row[3]
            destination_lng = destination_lng.strip(" ")
            destination_lat = row[4]
            destination_lat = destination_lat.strip(" ")
            origin = origin_lng + ","+origin_lat
            destination = destination_lng + ","+destination_lat
            print origin + " " + destination
            queue.put((num,origin,destination))
        print "finished adding to queue"
def make(name, path, sentinals):
    q = HotQueue(name)
    with open(path) as f:
        for line in f.readlines():
            q.put(line.strip().split('\t'))

    for n in xrange(sentinals):
        q.put(None)
Beispiel #5
0
 def unsubscribe_uuid(cls, uuid):
     if True: #TODO: check uuid
         queue = HotQueue("connection_thread_id_queue=" + str(g.connection_thread_id))
         msg = {'cmd': 'unsubscribe', 'params': str(uuid)}
         queue.put(json.dumps(msg))
         return msg
     else:
         return None
Beispiel #6
0
 def unsubscribe_uuid(cls, uuid):
     if True:  #TODO: check uuid
         queue = HotQueue("connection_thread_id_queue=" +
                          str(g.connection_thread_id))
         msg = {'cmd': 'unsubscribe', 'params': str(uuid)}
         queue.put(json.dumps(msg))
         return msg
     else:
         return None
Beispiel #7
0
 def open(self):
     print "Opening"
     logging.info("opening")
     print "Initializing queue"
     queue = HotQueue("myqueue", host="localhost", port=6379, db=0)
     count = 0
     while count < 100:
             count = count + 1
             print "adding " + str(count) + " to queue"
             queue.put(count)
Beispiel #8
0
 def open(self):
     print "Opening"
     logging.info("opening")
     print "Initializing queue"
     queue = HotQueue("myqueue", host="localhost", port=6379, db=0)
     count = 0
     while count < 100:
         count = count + 1
         print "adding " + str(count) + " to queue"
         queue.put(count)
 def subscribe_greenlet(ps, connection_thread_id):
     queue = HotQueue("connection_thread_id_queue=" + str(connection_thread_id))
     for msg in queue.consume():
         try:
             cmd = json.loads(msg)
             if cmd['cmd'] == 'subscribe':
                 ps.subscribe('uuid=' + cmd['params'])
             elif cmd['cmd'] == 'subscribe':
                 ps.unsubscribe('uuid=' + cmd['params'])
         except:
             pass
def output_category(cat,data):
    global redis_enabled, appcfg

    pp.pprint(data)
    print "{cat} Entries: {size}".format(cat=cat,size=len(data))
    if appcfg['redis']:
        queue = HotQueue(
            appcfg['queue_name'], host=appcfg['redis_host'], port=appcfg['redis_port'], db=appcfg['redis_db']
        )
        for item in data:
            queue.put(item)
Beispiel #11
0
def put(queuename,message=None,senderName=None):
    returncode = 200
    if not message:
        message = str(request.form['body'])
    if message:
        queue = HotQueue(queuename, senderName=senderName, originIPAddress=request.remote_addr, host="localhost", port=6379, db=0)
        put_status = HQMessage()
        put_status = queue.put(message)[0]
    else:
        returncode = 400
    return put_status.to_json(), returncode
Beispiel #12
0
class RedisMQ(object):
    def __init__(self, app=None):
        if app is not None:
            self.init_app(app)
        else:
            self.app = None

        self._tasks = {}

    def init_app(self, app):
        name = app.config.setdefault('REDISMQ_NAME', 'mq')
        host = app.config.setdefault('REDISMQ_HOST', 'localhost')
        port = app.config.setdefault('REDISMQ_PORT', 6379)
        db = app.config.setdefault('REDISMQ_DB', 0)
        password = app.config.setdefault('REDISMQ_PASSWORD', None)

        app.config.get('REDISMQ_BLOCK', True),
        app.config.get('REDISMQ_BLOCK_TIMEOUT', 0)
        
        if not hasattr(app, 'extensions'):
            app.extensions = {}
        app.extensions['redismq'] = self

        self.app = app
        self._hq = HotQueue(name, host=host, port=port, db=db,
                            password=password)

    def task(self, func):
        
        func_name = "%s.%s" % (func.__module__, func.__name__)
        self._tasks[func_name] = func

        @wraps(func)
        def _func(*args, **kwargs):
            self._hq.put((func_name, args, kwargs))

        setattr(func, "async", _func)
        return func        

    def work(self, *args, **kwargs):
        kwargs.update({
            'block': self.app.config.get('REDISMQ_BLOCK', True),
            'timeout': self.app.config.get('REDISMQ_BLOCK_TIMEOUT', 0)
        })
        
        @self._hq.worker(*args, **kwargs)
        def _worker(msg):
            try:
                func_name, args, kwargs = msg
                self._tasks[func_name](*args, **kwargs)
            except Exception, e:
                pass

        return _worker()
Beispiel #13
0
 def subscribe_greenlet(ps, connection_thread_id):
     queue = HotQueue("connection_thread_id_queue=" +
                      str(connection_thread_id))
     for msg in queue.consume():
         try:
             cmd = json.loads(msg)
             if cmd['cmd'] == 'subscribe':
                 ps.subscribe('uuid=' + cmd['params'])
             elif cmd['cmd'] == 'subscribe':
                 ps.unsubscribe('uuid=' + cmd['params'])
         except:
             pass
Beispiel #14
0
def get(queuename):
    returncode = 200
    return_hqmessage = None
    reservation_id = str(uuid4())
    queue = HotQueue(queuename, host="localhost", port=6379, db=0)
    hqmessage = HQMessage()
    hqmessage = queue.get()
    if hqmessage:
        return_hqmessage = hqmessage.to_json()
    else:
        returncode = 204
        return_hqmessage = ''
    return return_hqmessage, returncode
    def __init__(self, in_cnt, out_cnt):
        self.in_cnt = in_cnt
        self.out_cnt = out_cnt

        time_uniq = int(time.time())
        checksum_queue_key_prefix = "checksum-%d-" % time_uniq
        redis_host, redis_port = REDIS_INFO.split(":")

        # 生产者客户端连接池,根据同时比较的表数量而定
        self.in_queues = [HotQueue(checksum_queue_key_prefix + str(i),
                                   host=redis_host, port=redis_port, db=11) for i in range(in_cnt)]
        # 消费者取用队列,对应同等个数的后端消费者
        self.out_queues = [HotQueue(checksum_queue_key_prefix + str(i),
                                    host=redis_host, port=redis_port, db=11) for i in range(out_cnt)]
Beispiel #16
0
def Main():

	config = file_read('config.json')
	config = json.loads(config)
	HOST = config['mongohost']
	PORT = config['port']
	DB_NAME = config['database_name']
	LISTENER_QUEUE = config['listener_queue']
	RESPONSE_QUEUE = config['response_queue']

	file_dir = config['repo_directory']
	client = MongoClient(HOST,PORT)
	
	db = client[DB_NAME]
	
	listen = HotQueue(LISTENER_QUEUE,serializer=json)

	response = HotQueue(RESPONSE_QUEUE,serializer=json)

	r = redis.StrictRedis(host='localhost', port=6379, db=0)

	print "\nPython-Redis-Listener-Started "

	for item in listen.consume():
		
		files = []
		for _file  in item['files']:
			files.append(diff_file(_file['name'],_file['path'],_file['isnew'],_file['tag'],db,item['repoid'],file_dir))
		
		commits = {
					'changes'  : files,
					'desc'     : item['desc'],
					'created'  : datetime.datetime.utcnow(),
					#'comment'  : [],
					'repo'     : {'id':ObjectId(item['repoid']),'slug':item['reposlug']},
					'user'     : {'id':ObjectId(item['userid']),'username':item['username']}
				}

		commitid = db.commits.insert(commits)
		
		db.repos.update({'_id':commits['repo']['id']},{'$push':{'commits':commitid}})
		
		db.users.update({'_id':commits['user']['id']},{'$push':{'commits':commitid}})
		
		responseobj= {'commitid':str(commitid),'userid':str(commits['user']['id'])}

		#response.put(responseobj)
		r.publish('cy-pullcommits', json.dumps(responseobj))
		
		print commits
Beispiel #17
0
class RedisInput(object):
    input_name = "redis"

    def __init__(self, host, queue, port=6379):
        self.host = host
        self.port = port
        self.queue = queue

    def handle_input(self):
        try:
            self.queue = HotQueue(self.queue, serializer=json, host=self.host, port=self.port, db=0)

            for data in self.queue.consume():
                for fmt in self.format_modules:
                    if fmt.bind and self.input_name in fmt.bind:
                        data = fmt.decode(data)

                self.output_threads.write(data)
        except Exception as err:
            raise EveConnectionError(err)

    def run(self, format_modules, output_modules):
        # Start output threads
        self.format_modules = format_modules
        self.output_modules = output_modules
        self.output_threads = OutputThreads(self.output_modules, self.format_modules)
        while True:
            try:
                self.handle_input()
            except EveConnectionError as err:
                logger.error("connection error in input handler %s: %r - retrying in 1 second" % (self.input_name, err))
                sleep(1)
Beispiel #18
0
    def initQueue(self):
        # hotqueue队列字典表
        self.q_dict = {}

        # 抓取队列
        for q in self.q_list:
            self.q_dict[q] = HotQueue(q, host=self.redis_ip, port=self.redis_port, password=self.redis_passwd, db=self.QUEUE_DB)
Beispiel #19
0
def main():
    queue = HotQueue(main_config.INDEX_QUEUE, 
                     host=main_config.REDIS_HOST, 
                     port=main_config.REDIS_PORT)
    index = get_index(main_config.WHOOSH_INDEX_DIR)
    writer = BufferedWriter(index, limit=10)
    try:
        for doc_id in queue.consume():
            print "looking at {}".format(doc_id)
            doc = Document.query.get(doc_id)
            if doc:
                write_doc(doc, writer)
            else:
                print "no doc with doc_id {}".format(doc_id)
    finally:
       writer.close()
Beispiel #20
0
def get_queue(name):
    """获取一个指定名称的队列"""
    cp = ConfigParser()
    cp.read('./config.ini')
    host = cp.get('redis', 'host')
    password = cp.get('redis', 'password')

    return HotQueue(name, host=host, password=password)
Beispiel #21
0
 def test_from_key(self):
     """Test the HotQueue.from_key static method"""
     redis = Redis()
     queues = {
         '_test:tens': range(10, 20),
         '_test:twenties': range(20, 30),
         '_test:thirties': range(30, 40)
     }
     for name, values in queues.items():
         q = HotQueue(name)
         q.put(*values)
     
     for key in redis.keys('hotqueue:_test:*'):
         q = HotQueue.from_key(key)
         self.assertEqual(
             list(q.consume(block=False)),
             queues[q.name]
         )
Beispiel #22
0
 def __init__(self, source):
     self.url_table = source + "_url_table"
     redis_config = config["redis"]
     if redis_config.has_key("password"):
         self.queue = HotQueue(name=self.url_table,
                               host=redis_config["host"],
                               port=redis_config["port"],
                               password=redis_config["password"],
                               db=redis_config["db"])
     else:
         self.queue = HotQueue(name=self.url_table,
                               host=redis_config["host"],
                               port=redis_config["port"],
                               db=redis_config["db"])
     self.sql = "select urlmd5, url, type, boundary from " + self.url_table + " where status=0 limit 5000"
     self.update_sql_base = "update " + self.url_table + " set status=100 where urlmd5='%s'"
     self.run_sign = True
     self.start_feed()
Beispiel #23
0
 def sent_to_broker(self, newLogString, host="localhost", port=6379, db=1):
     '''function sent_to_broker
     
     sent message/text string to broker (rabbitmq, etc)
     
     Keyword Arguments:
         newLogString {str} -- [description] (default: {"localhost"})
         host {str} -- [description] (default: {"localhost"})
         port {number} -- [description] (default: {6379})
         db {number} -- [description] (default: {1})
     '''
     if self.read_config('QUEUE', 'host'):
         host = self.read_config('QUEUE', 'host')
     if self.read_config('QUEUE', 'port'):
         port = int(self.read_config('QUEUE', 'port'))
     from hotqueue import HotQueue
     queue = HotQueue("logqueue", host=host, port=port, db=db)
     queue.put(newLogString)
Beispiel #24
0
def main():
    pid = os.getpid()
    print(f"{pid} starting")
    for item in HotQueue(sys.argv[1]).consume():
        if item is None:
            print(f"{pid} terminating")
            return
        print(f"{item} printed by {pid}")
        time.sleep(2)
Beispiel #25
0
 def client(self, queue):
     """
     Given one of the configured queues returns a
     :class:`hotqueue.HotQueue` instance with the
     :attr:`~hirefire.procs.hotqueue.HotQueueProc.connection_params`.
     """
     if isinstance(queue, HotQueue):
         return queue
     return HotQueue(queue, **self.connection_params)
Beispiel #26
0
	def __init__(self, jukebox):
		cmd.Cmd.__init__(self)
		threading.Thread.__init__(self)
		self.jukebox = jukebox
		self.playlist = None
		self.track = None
		self.results = False
		
		self.queue_play = HotQueue("wekkie:play")
def create_queue(data):
    try:
        data['hotqueue'] = HotQueue(data['queue'],
                                    serializer=json,
                                    host=data['host'],
                                    port=data['port'],
                                    db=0)
    except:
        return False
    else:
        return True
Beispiel #28
0
    def handle_input(self):
        try:
            self.queue = HotQueue(self.queue, serializer=json, host=self.host, port=self.port, db=0)

            for data in self.queue.consume():
                for fmt in self.format_modules:
                    if fmt.bind and self.input_name in fmt.bind:
                        data = fmt.decode(data)

                self.output_threads.write(data)
        except Exception as err:
            raise EveConnectionError(err)
Beispiel #29
0
def queue_processor():
    queue = HotQueue("postprocessing_queue")
    ocr = OCR.Engine()

    for uuid in queue.consume():
        print str(uuid)
        dm = DocumentManager()
        doc = dm.getDocument(uuid)
        print str(doc.url)
        url = urlparse(doc.url)
        filename = os.path.join(UPLOAD_FOLDER, url.path)
        m = magic.Magic()
        print filename + ' ' + str(m.id_filename(filename))

        if 'PDF' in str(m.id_filename(filename)):
            pdf_text = PDFProcessor.extractContent(str(filename))
            cm = ContentDocument()
            cm.content = unicode(pdf_text, encoding='utf-8')
            cm.save()
            #ocr_text = ocr.interpret(str(filename))
            print pdf_text
def queue_processor():
        queue = HotQueue("postprocessing_queue")
        ocr = OCR.Engine()

        for uuid in queue.consume():
            print str(uuid)
            dm = DocumentManager()
            doc = dm.getDocument(uuid)
            print str(doc.url)
            url = urlparse(doc.url)
            filename = os.path.join(UPLOAD_FOLDER, url.path)
            m = magic.Magic()
            print filename + ' ' + str(m.id_filename(filename))

            if 'PDF' in  str(m.id_filename(filename)):
                pdf_text = PDFProcessor.extractContent(str(filename))
                cm = ContentDocument()
                cm.content = unicode(pdf_text, encoding='utf-8')
                cm.save()
                #ocr_text = ocr.interpret(str(filename))
                print pdf_text
Beispiel #31
0
 def test_arguments(self):
     """Test that HotQueue.__init__ accepts arguments correctly, and that
     the Redis key is correctly formed.
     """
     kwargs = {
         'name': "testqueue",
         'serializer': DummySerializer,
         'host': "localhost",
         'port': 6379,
         'db': 0}
     # Instantiate the HotQueue instance:
     self.queue = HotQueue(**kwargs)
     # Ensure that the properties of the instance are as expected:
     self.assertEqual(self.queue.name, kwargs['name'])
     self.assertEqual(self.queue.key, "hotqueue:%s" % kwargs['name'])
     self.assertEqual(self.queue.serializer, kwargs['serializer'])
     # Instantiate a HotQueue instance with only the required args:
     self.queue = HotQueue(kwargs['name'])
     # Ensure that the properties of the instance are as expected:
     self.assertEqual(self.queue.name, kwargs['name'])
     self.assertEqual(self.queue.key, "hotqueue:%s" % kwargs['name'])
     self.assertTrue(self.queue.serializer is pickle) # Defaults to cPickle
Beispiel #32
0
 def __init__(self, interval_upp, interval_down, source,need_proxy=True):
     Thread.__init__(self)
     redis_config = config["redis"]
     url_table = source+"_url_table"
     if redis_config.has_key("password"):
         self.queue = HotQueue(name=url_table, host=redis_config["host"],
                               port=redis_config["port"],password=redis_config["password"], db=redis_config["db"])
     else:
         self.queue = HotQueue(name=url_table, host=redis_config["host"],
                           port=redis_config["port"], db=redis_config["db"])
     self.activity = 0.0
     self.spider = None
     self.check_round = 20
     self.interval_upp = interval_upp  #
     self.interval_down = interval_down  #
     self.lock = RLock()
     self.tuple_from_queue = ()
     self.proxy_manger = ProxyMgr()
     self.proxy_ip = self.proxy_manger.get_proxy()[1]
     self.CHKTHD = 20
     self.proxy = {'http': 'http://' + self.proxy_ip}
     self.need_proxy = need_proxy
Beispiel #33
0
    def configure(self, gconfig={}, **options):
        if self.running:
            raise SchedulerAlreadyRunningError

        config = combine_opts(gconfig, 'main.', options)
        self._config = config

        self.misfire_grace_time = int(config.pop('misfire_grace_time', 1))
        self.coalesce = asbool(config.pop('coalesce', True))
        self.daemonic = asbool(config.pop('daemonic', True))
        self.standalone = asbool(config.pop('standalone', False))

        timezone = config.pop('timezone', None)
        self.timezone = gettz(timezone) if isinstance(timezone, basestring) else timezone or tzlocal()

        # config threadpool
        threadpool_opts = combine_opts(config, 'threadpool.')
        self._worker_threadpool = ThreadPool(**threadpool_opts)

        # config jobstore
        jobstore_opts = combine_opts(config, 'jobstore.')
        self._job_store = SQLAlchemyJobStore(**jobstore_opts)

        # config syncqueue
        syncqueue_opts = combine_opts(config, 'syncqueue.')
        self._changes_queue = HotQueue(**syncqueue_opts)

        # config statstore
        statstore_opts = combine_opts(config, 'statstore.')
        self._stat_store = JobReporter(**statstore_opts)

        # config statqueue
        statqueue_opts = combine_opts(config, 'statqueue.')
        self._stats_queue = HotQueue(**statqueue_opts)

        # configure logger
        self.logger = logging.getLogger(__name__)
        self.logger.setLevel(logging.DEBUG)
Beispiel #34
0
def acknack(reservation_uuid):
    returncode = 200
    returnmsg = ''
    hq = HotQueue(host="localhost", port=6379, db=0)
    msg = None

    nack = False
    if 'nack' in request.path:
        nack = True
    if 'PUT' in request.method:
        nack = True

    if nack:
        msg = hq.nack(reservation_uuid)
    else:
        msg = hq.ack(reservation_uuid)

    if msg:
        hqmessage = HQMessage()
        hqmessage = msg
        returnmsg = hqmessage.to_json()
    else:
        returncode = 400
    return returnmsg, returncode
Beispiel #35
0
def do_with_error(path):
    redis_config = config["redis"]
    queue = HotQueue(name="amap_busline_route", host=redis_config["host"],
                     port=redis_config["port"], db=redis_config["db"])
    with open(r"C:\Users\29625\Desktop\Trans_xtoy_1.csv", "r") as f_1:
        reader = csv.reader(f_1)
        reader = list(reader)
        with open(path, 'r') as f:
            lines = f.readlines()
        for line in lines:
            num = line.strip("\n")
            row = reader[int(num)-1]
            origin_lng = row[1]
            origin_lng = origin_lng.strip(" ")
            origin_lat = row[2]
            origin_lat = origin_lat.strip(" ")
            destination_lng = row[3]
            destination_lng = destination_lng.strip(" ")
            destination_lat = row[4]
            destination_lat = destination_lat.strip(" ")
            origin = origin_lng + "," + origin_lat
            destination = destination_lng + "," + destination_lat
            print origin + " " + destination
            queue.put((num, origin, destination))
Beispiel #36
0
    def init_app(self, app):
        name = app.config.setdefault('REDISMQ_NAME', 'mq')
        host = app.config.setdefault('REDISMQ_HOST', 'localhost')
        port = app.config.setdefault('REDISMQ_PORT', 6379)
        db = app.config.setdefault('REDISMQ_DB', 0)
        password = app.config.setdefault('REDISMQ_PASSWORD', None)

        app.config.get('REDISMQ_BLOCK', True),
        app.config.get('REDISMQ_BLOCK_TIMEOUT', 0)
        
        if not hasattr(app, 'extensions'):
            app.extensions = {}
        app.extensions['redismq'] = self

        self.app = app
        self._hq = HotQueue(name, host=host, port=port, db=db,
                            password=password)
Beispiel #37
0
def init_workers(num):
    global scout, worker, parser
    
    # q contains list of threads, scraped by the scout
    q = HotQueue(home)

    # qf contains list of tuples of len 3
    # contains source code of scraped pages and metadata, for parser
    # (subforum name, link of post, post source)
    qf = HotQueue(home + "_sources")
    
    # boolean variable that describes whether the Parser is training
    # (only used for GenericParser)
    qt = Value('b', False)
    
    workers = []
    scout = multiprocessing.Process(target=scout, args=(q,))
    scout.start()
    logger.info("Starting scout process %s" % str(scout.pid))
    workers.append(scout)
    sleep(15)
    for i in xrange(num):
        tmp = multiprocessing.Process(target=worker, args=(q,qf,qt,delay, delay_range))
        tmp.start()
        logger.info("Starting worker process %s" % str(tmp.pid))
        workers.append(tmp)
    for i in xrange(num-1):
        tmp = multiprocessing.Process(target=parser, args=(qf,qt))
        tmp.start()
        logger.info("Starting parser process %s" % str(tmp.pid))
        workers.append(tmp)
    parser(qf,qt, main=True)
    logger.info("Using main process as parser")
    for worker in workers:
        worker.join()
    logger.info("All done.")
    q.clear()
    qf.clear()
Beispiel #38
0
def main():
	parser = OptionParser(usage="usage: %prog [options]",
                          version="%prog 1.0")
	parser.add_option("-f", "--filename",
                      dest="filename",
                      help="Use prerecorded audio file")
	parser.add_option("-s", "--string",
                      dest="string",
                      help="Use custom string",)
    
	(options, args) = parser.parse_args()

	text = None
	if options.string:
		text = options.string
	elif options.filename:
		text = googlevoice.recognize(options.filename)
	else:
		# Record audio:
		var = raw_input("Druk <Enter> om de opname te starten: ")
		filename = "tmp/recordings/%s.wav" % base64.b64encode(os.urandom(6)).replace("/", "")
		soundrecorder.record(filename)
		text = googlevoice.recognize(filename)
	
	if text:
		text = text.lower()
		time = parsetime.parse(text)
		artist_uri = parseartist.parse(text)

		# TODO: read Spotify user from properties
		default_uri = "spotify:user:bertspaan:starred"
		if not artist_uri:			
			artist_uri = default_uri

		print "Alarm:", time, "with", artist_uri

		if time:
			queue_time = HotQueue("wekkie:time")
			queue_time.put(time)
		if artist_uri:			
			queue_spotify_uri = HotQueue("wekkie:uri")
			queue_spotify_uri.put(artist_uri)
Beispiel #39
0
def main(argv) :
	global q

	featureName = argv[1]
	featureStatus = argv[2]
	featureServer = argv[3]

	q = HotQueue(('server_'+featureName), host="localhost", port=6379, db=0)
	r_server = redis.Redis('localhost')

	
	if featureStatus == "ON":
		q.put('http://127.0.0.1:'+featureServer)
		if(not r_server.exists(featureName)):
			r_server.set(featureName, '1')
		else:
			r_server.incr(featureName)
		print 'set key value is: ' + r_server.get(featureName) 
	else :
		k = list()	
		while(1) :
			x = q.get()
			print "Queue get : ", x
			if x is None : 
				break;
			else :
				if x == ('http://127.0.0.1:'+featureServer) :
					print("Removing : ",x)
					if(r_server.get(featureName) > 0):
						r_server.decr(featureName)
					print 'set key value is: ' + r_server.get(featureName) 
					break
				else:
					k.append(x)
		while len(k) != 0:
			x = k.pop(0)
			q.put(x)
	printQueue()
	print "Queue name : ",('server_'+featureName)
Beispiel #40
0
DEBUG = config.getboolean('Consumer', 'debug')
TERM_OUT = config.getboolean('Consumer', 'term_out')
mcserver = config.get('Memcache', 'server')
mckey = config.get('Memcache', 'key')
statkey = config.get('Memcache', 'statkey')

# Max number of greenlet workers
MAX_NUM_POOL_WORKERS = 75

# item list of stuff we want immediately updated in stats
fastupdate = [34, 35, 36, 37, 38, 39, 40, 29668]

# use a greenlet pool to cap the number of workers at a reasonable level
greenlet_pool = Pool(size=MAX_NUM_POOL_WORKERS)

queue = HotQueue("emdr-messages", host=redisdb, port=6379, db=0)
statqueue = HotQueue("e43-stats", host=redisdb, port=6379, db=0)

# Handle DBs without password
if not dbpass:
    # Connect without password
    dbcon = psycopg2.connect("host=" + dbhost + " user="******" dbname=" + dbname + " port=" + dbport)
else:
    dbcon = psycopg2.connect("host=" + dbhost + " user="******" password="******" dbname=" + dbname +
                             " port=" + dbport)

#connect to memcache
mc = pylibmc.Client([mcserver],
                    binary=True,
from hotqueue import HotQueue
import redis
import os
from uuid import uuid4

redis_ip = os.environ.get('REDIS_IP')
if not redis_ip:
    raise Exception()

app = Flask(__name__)
rd = redis.StrictRedis(host=redis_ip, port=6379, db=1)
rd_dataset = redis.StrictRedis(host=redis_ip,
                               port=6379,
                               db=0,
                               decode_responses=True)
q = HotQueue('queue', host=redis_ip, port=6379, db=2)


@app.route('/', methods=['GET'])
def instructions():
    return """
    Try these routes:
    
    /                                       informational
    /run                                    (GET) job instructions
    /run/<stats>/<specific_stat>            (POST) submit job
    /jobs                                   get list of past jobs
    /jobs/<UUID>                            get job status
    /delete                                 (GET) delete instructions
    /delete                                 (DELETE) delete job
    /download/<UUID>                        download img from job 
Beispiel #42
0
cursor = db1.cursor()
sql = 'CREATE DATABASE  IF NOT EXISTS ' + database_name
print(sql)
cursor.execute(sql)
db1.commit()
db1.close()

db2 = MySQLdb.connect(
    host=dbhost,  # your host
    user=dbuser,  # username
    passwd=dbpass,  # password
    db=database_name)  # name of the database
# Create a Cursor object to execute queries.
cur = db2.cursor()
sql = """CREATE TABLE IF NOT EXISTS  USERS (
         NAME  CHAR(20) NOT NULL,
         EMAIL  CHAR(50) )"""

# Select data from table using SQL query.
cur.execute(sql)

queue = HotQueue("myqueue", host=redis_host, port=6379, db=0)
for item in queue.consume():
    print(item)
    js = json.loads(item)
    sql = "INSERT INTO USERS(NAME,EMAIL) VALUES ('%s', '%s' )" % (
        js['username'], js['mail'])
    cur.execute(sql)
db2.commit()
db2.close()
Beispiel #43
0
class LocalScheduler(object):

    _stopped = False
    _main_thread = None

    #init worker thread pool,reporter thread,updater thread
    def __init__(self, gconfig={}, **options):
        self._wakeup = Event()
        self._job_store = None
        self._stat_store = None
        self._jobs     = {}
        self.logger   = None
        self._stats_queue = None
        self._changes_queue = None

        self._jobs_locks   = {}
        self._jobs_lock = Lock()
        self._log_queue_lock = Lock()


        self._worker_threadpool = None
        self._reporter_thread   = None
        self._main_thread       = None
        self._updater_thread    = None
        self._monitor_thread    = None

        self.configure(gconfig, **options)

    def configure(self, gconfig={}, **options):
        if self.running:
            raise SchedulerAlreadyRunningError

        config = combine_opts(gconfig, 'main.', options)
        self._config = config

        self.misfire_grace_time = int(config.pop('misfire_grace_time', 1))
        self.coalesce = asbool(config.pop('coalesce', True))
        self.daemonic = asbool(config.pop('daemonic', True))
        self.standalone = asbool(config.pop('standalone', False))

        timezone = config.pop('timezone', None)
        self.timezone = gettz(timezone) if isinstance(timezone, basestring) else timezone or tzlocal()

        # config threadpool
        threadpool_opts = combine_opts(config, 'threadpool.')
        self._worker_threadpool = ThreadPool(**threadpool_opts)

        # config jobstore
        jobstore_opts = combine_opts(config, 'jobstore.')
        self._job_store = SQLAlchemyJobStore(**jobstore_opts)

        # config syncqueue
        syncqueue_opts = combine_opts(config, 'syncqueue.')
        self._changes_queue = HotQueue(**syncqueue_opts)

        # config statstore
        statstore_opts = combine_opts(config, 'statstore.')
        self._stat_store = JobReporter(**statstore_opts)

        # config statqueue
        statqueue_opts = combine_opts(config, 'statqueue.')
        self._stats_queue = HotQueue(**statqueue_opts)

        # configure logger
        self.logger = logging.getLogger(__name__)
        self.logger.setLevel(logging.DEBUG)



    def start(self):
        if self.running:
            raise SchedulerAlreadyRunningError

        self.load_jobs()

        self._stopped = False

        if self.standalone:
            self._main_loop()
        else:
            self._main_thread = Thread(target = self._main_loop, name = 'main')
            self._main_thread.setDaemon(self.daemonic)
            self._main_thread.start()
            print 'main thread is startted'

            self._updater_thread = Thread(target = self._sync_changes, name = 'update')
            self._updater_thread.setDaemon(self.daemonic)
            self._updater_thread.start()
            print 'update thread is started'

            self._stater_thread = Thread(target = self._stat_runs, name = 'stat')
            self._stater_thread.setDaemon(self.daemonic)
            self._stater_thread.start()
            print 'stat thread is started'

    def shutdown(self, shutdown_threadpool=True, close_jobstore=True):
        if not self.running:
            return 
        self._stopped = True
        self._wakeup.set()

        if shutdown_threadpool:
            self._worker_threadpool.shutdown()

        if self._main_thread:
            self._main_thread.join()

        if close_jobstore:
            self._job_store.close()

    @property
    def running(self):
        return not self._stopped and self._main_thread and self._main_thread.isAlive()
    

    def now(self):
        return datetime.now(self.timezone)

    def set_jobs(self, jobs):
        now = self.now()
        with self._jobs_lock:
            for job in jobs:
                job.compute_next_run_time(now)
                self._jobs[job.id] = job
                self._jobs_locks[job.id] = Lock()

    # loads jobs pool from db
    def load_jobs(self):
        jobs = self._job_store.load_jobs()
        now = self.now()
        with self._jobs_lock:
            for job in jobs:
                self._add_job(job)

    def _add_job(self, job):
        try:
            now = self.now()
            job.compute_next_run_time(now)
            if job.next_run_time:
                self._jobs[job.id] = job
                self._jobs_locks[job.id] = Lock()
        except:
            logger.exception("add job(id=%d, name=%s) failed" % (job.id, job.name))
            return False

        return True

    def _remove_job(self, job_id):
        try:
            with self._jobs_locks[job_id]:
                del self._jobs[job_id]
            del self._jobs_locks[job_id]
        except:
            logger.exception("remove job(id=%d) failed" % (job_id))
            return False

        return True

    def _main_loop(self):
        print "get into the main loop"
        self._wakeup.clear()
        while not self._stopped:
            print 'check again'
            now = self.now()
            next_wakeup_time = self._process_jobs(now)
            print "next_wakeup_time:", next_wakeup_time
            if next_wakeup_time is not None:
                wait_seconds = time_difference(next_wakeup_time, now)
                self._wakeup.wait(wait_seconds)
                self._wakeup.clear()
            else:
                self._wakeup.wait()
                self._wakeup.clear()
        print "get out the main loop"

    def _process_jobs(self, now):
        next_wakeup_time = None
        print self._jobs

        for job in self._jobs.values():
            run_time_list = job.get_run_times(now)

            if run_time_list:
                self._worker_threadpool.submit(self._run_job, job, run_time_list)

                with self._jobs_locks[job.id]:
                    next_run_time = job.compute_next_run_time(now + timedelta(microseconds=1))

                if not next_run_time:
                    self._remove_job(job.id)

            print 'job.next_run_time:', job.id,  job.next_run_time
            if not next_wakeup_time:
                next_wakeup_time = job.next_run_time
            elif job.next_run_time:
                next_wakeup_time = min(next_wakeup_time, job.next_run_time)

        return next_wakeup_time


    def _run_job(self, job, run_time_list):
        for run_time in run_time_list:
            now = self.now()
            difference = now - run_time
            grace_time = timedelta(seconds=self.misfire_grace_time)
            if difference > grace_time:
                self.logger.warning('Run time of job "%s" was missed by %s', job, difference)
                self._put_stat(job.id, 'missed', next_run_time=job.next_run_time)
            else:
                try:
                    # maybe add a timeout handle by join thread. 
                    # t = Thread(job.run); t.start(); t.join(timeout)
                    # refer: http://augustwu.iteye.com/blog/554827
                    self._put_stat(job.id, 'running', next_run_time=job.next_run_time)
                    result = job.run()
                    print 'job runned success'
                    cost = self.now() - now
                    self._put_stat(job.id, 'succed', cost=cost)

                except:
                    self.logger.exception('Job "%s" raised an exception', job)
                    cost = self.now() - now
                    self._put_stat(job.id, 'failed', cost=cost)

            if self.coalesce:
                break


    def _put_stat(self, job_id, status, next_run_time=None, cost=timedelta(seconds=0)):
        msg = {
            'time': pickle.dumps(self.now()),
            'job_id': job_id,
            'status': status,
            'next_run_time':pickle.dumps(next_run_time),
            'cost': cost.total_seconds() + cost.microseconds / 1000000
        }
        try:
            self._stats_queue.put(msg)
        except:
            logger.exception('failed to put stat item ' + json.dumps(msg))

    def _stat_runs(self):
        while not self._stopped:
            try: 
                msg = self._stats_queue.get(block=True, timeout=1)
            except:
                logger.exception('get stat item failed')
                msg = None

            if not msg:
                continue

            try:
                msg["time"] = pickle.loads(msg['time'])
                msg["next_run_time"] = pickle.loads(msg['next_run_time'])
                self._stat_store.report(**msg)
            except:
                traceback.print_exc()
                logger.exception('report job status failed ' + pickle.dumps(msg))

    def _sync_changes(self):
        count = 0
        max_items_once = int(self._config.pop('max_items_once', 0))
        while not self._stopped:
            try:
                msg = self._changes_queue.get(block=True, timeout=1)
            except:
                logger.exception('get sync item failed')
                msg = None

            if msg:
                opt_type = msg['opt_type']
                job_id   = msg['job_id']
                if job_id > 0 and isinstance(opt_type, basestring):
                    try:
                        self._apply_change(opt_type, job_id)
                    except:
                        pass
                    self.logger.info('apply change "%s" for job(%d)', opt_type, job_id)
                    count += 1

            if not msg or (max_items_once > 0 and count > max_items_once):
                if count > 0:
                    self.logger.info('wakeup main thread by sync thread with %d updates' % count)
                    self._wakeup.set()
                    count = 0


    def _apply_change(self, opt_type, job_id):
            if opt_type == 'add' or opt_type == 'update':
                try:
                    job = self._job_store.get_job(job_id)
                except Exception as e:
                    self.logger.exception(e)

                if job:
                    if opt_type == 'add':
                        if not self._jobs.has_key(job_id):
                            self._add_job(job)
                        else:
                            logger.exception("apply channge '%s job(id=%d, name=%s)' failed" % (opt_type, job.id, job.name))
                    else:
                        #!todo check if compute next_run_time again is necessary
                        now = self.now()
                        job.compute_next_run_time(now)
                        with self._jobs_locks[job_id]:
                            self._jobs[job_id] = job

            elif opt_type == 'delete' or opt_type == 'pause':
                self._remove_job(job_id)
            else:
                self.logger.exception('opt %s job(%d) to jobs pool is not supported' % (opt_type, job_id))
Beispiel #44
0
 def setUp(self):
     """Create the queue instance before the test."""
     self.queue = HotQueue('testqueue')
Beispiel #45
0
class JukeboxUI(cmd.Cmd, threading.Thread):

	prompt = "jukebox> "
	
	def __init__(self, jukebox):
		cmd.Cmd.__init__(self)
		threading.Thread.__init__(self)
		self.jukebox = jukebox
		self.playlist = None
		self.track = None
		self.results = False
		
		self.queue_play = HotQueue("wekkie:play")
	
	def run(self):
		container_loaded.wait()
		container_loaded.clear()
		
		print "Starting consume loop..."
		for uri in self.queue_play.consume():
			if uri:
				print "Try to play", uri
				link = Link.from_string(uri)
				if link.type() == Link.LINK_TRACK:
					self.jukebox.load_track(link.as_track())
					self.jukebox.play()
				elif link.type() == Link.LINK_ARTIST:
					browser = ArtistBrowser(link.as_artist())
					while not browser.is_loaded():
						time.sleep(0.1)
					toptracks = browser.tophit_tracks()
					print toptracks
					#random.shuffle(toptracks)					
					track = choice(toptracks)
					self.jukebox.load_track(track)
					self.jukebox.play()
				elif link.type() == 7: #Link.LINK_PLAYLIST:
					playlist = self.jukebox.starred
					track = choice(playlist)

					while track.availability() != 1:
						track = choice(playlist)

					self.jukebox.load_track(track)
					self.jukebox.play()
					
			else:
				self.jukebox.stop()
				
		try:
			self.cmdloop()
		finally:
			self.do_quit(None)

	def do_logout(self, line):
		self.jukebox.session.logout()

	def do_quit(self, line):
		self.jukebox.stop()
		self.jukebox.disconnect()
		print "Goodbye!"
		return True

	def do_list(self, line):
		""" List the playlists, or the contents of a playlist """
		if not line:
			for i, p in enumerate(self.jukebox.ctr):
				if p.is_loaded():
					print "%3d %s" % (i, p.name())
				else:
					print "%3d %s" % (i, "loading...")
			print "%3d Starred tracks" % (i + 1,)

		else:
			try:
				p = int(line)
			except ValueError:
				print "that's not a number!"
				return
			if p < 0 or p > len(self.jukebox.ctr):
				print "That's out of range!"
				return
			print "Listing playlist #%d" % p
			if p < len(self.jukebox.ctr):
				playlist = self.jukebox.ctr[p]
			else:
				playlist = self.jukebox.starred
			for i, t in enumerate(playlist):
				if t.is_loaded():
					print "%3d %s - %s [%s]" % (
						i, t.artists()[0].name(), t.name(),
						self.pretty_duration(t.duration()))
				else:
					print "%3d %s" % (i, "loading...")

	def pretty_duration(self, milliseconds):
		seconds = milliseconds // 1000
		minutes = seconds // 60
		seconds = seconds % 60
		duration = '%02d:%02d' % (minutes, seconds)
		return duration

	def do_play(self, line):
		if not line:
			self.jukebox.play()
			return
		if line.startswith("spotify:"):
			# spotify url
			l = Link.from_string(line)
			if not l.type() == Link.LINK_TRACK:
				print "You can only play tracks!"
				return
			self.jukebox.load_track(l.as_track())
		else:
			try:
				playlist, track = map(int, line.split(' ', 1))
				self.jukebox.load(playlist, track)
			except ValueError:
				try:
					playlist = int(line)
					self.jukebox.load_playlist(playlist)
				except ValueError:
					print("Usage: play [track_link] | "
						  "[playlist] [track] | [playlist]")
					return
		self.jukebox.play()

	def do_browse(self, line):
		if not line or not line.startswith("spotify:"):
			print "Invalid id provided"
			return
		l = Link.from_string(line)
		if not l.type() in [Link.LINK_ALBUM, Link.LINK_ARTIST]:
			print "You can only browse albums and artists"
			return
		def browse_finished(browser, userdata):
			print "Browse finished, %s" % (userdata)
		self.jukebox.browse(l, browse_finished)

	def print_search_results(self):
		print "Artists:"
		for a in self.results.artists():
			print "	", Link.from_artist(a), a.name()
		print "Albums:"
		for a in self.results.albums():
			print "	", Link.from_album(a), a.name()
		print "Tracks:"
		for a in self.results.tracks():
			print "	", Link.from_track(a, 0), a.name()
		print self.results.total_tracks() - len(self.results.tracks()), \
			"Tracks not shown"

	def do_search(self, line):
		if not line:
			if self.results is False:
				print "No search is in progress"
			elif self.results is None:
				print "Searching is in progress"
			else:
				print "Artists:"
				for a in self.results.artists():
					print "	", Link.from_artist(a), a.name()
				print "Albums:"
				for a in self.results.albums():
					print "	", Link.from_album(a), a.name()
				print "Tracks:"
				for a in self.results.tracks():
					print "	", Link.from_track(a, 0), a.name()
				print self.results.total_tracks() - \
						len(self.results.tracks()), "Tracks not shown"
		else:
			line = line.decode('utf-8')
			self.results = None
			def search_finished(results, userdata):
				print "\nSearch results received"
				self.results = results
				self.print_search_results()
			self.jukebox.search(line, search_finished)

	def do_queue(self, line):
		if not line:
			for playlist, track in self.jukebox._queue:
				print playlist, track
			return
		try:
			playlist, track = map(int, line.split(' ', 1))
		except ValueError:
			print "Usage: play playlist track"
			return
		self.jukebox.queue(playlist, track)

	def do_stop(self, line):
		self.jukebox.stop()

	def do_next(self, line):
		self.jukebox.next()

	def emptyline(self):
		pass

	def do_watch(self, line):
		if not line:
			print """Usage: watch [playlist]
You will be notified when tracks are added, moved or removed from the
playlist."""
		else:
			try:
				p = int(line)
			except ValueError:
				print "That's not a number!"
				return
			if p < 0 or p >= len(self.jukebox.ctr):
				print "That's out of range!"
				return
			self.jukebox.watch(self.jukebox.ctr[p])

	def do_unwatch(self, line):
		if not line:
			print "Usage: unwatch [playlist]"
		else:
			try:
				p = int(line)
			except ValueError:
				print "That's not a number!"
				return
			if p < 0 or p >= len(self.jukebox.ctr):
				print "That's out of range!"
				return
			self.jukebox.watch(self.jukebox.ctr[p], True)

	def do_toplist(self, line):
		usage = "Usage: toplist (albums|artists|tracks) (GB|FR|..|all|current)"
		if not line:
			print usage
		else:
			args = line.split(' ')
			if len(args) != 2:
				print usage
			else:
				self.jukebox.toplist(*args)

	def do_shell(self, line):
		self.jukebox.shell()

	def do_add_new_playlist(self, line):
		if not line:
			print "Usage: add_new_playlist <name>"
		else:
			new_playlist = self.jukebox.ctr.add_new_playlist(
				line.decode('utf-8'))

	def do_add_to_playlist(self, line):
		usage = "Usage: add_to_playlist <playlist_index> <insert_point>" + \
				" <search_result_indecies>"
		if not line:
			print usage
			return
		args = line.split(' ')
		if len(args) < 3:
			print usage
		else:
			if not self.results:
				print "No search results"
			else:
				index = int(args.pop(0))
				insert = int(args.pop(0))
				artists = self.results.artists()
				tracks = self.results.tracks()
				for i in args:
					for a in tracks[int(i)].artists():
						print u'{0}. {1} - {2} '.format(
							i, a.name(), tracks[int(i)].name())
				print u'adding them to {0} '.format(
					self.jukebox.ctr[index].name())
				self.jukebox.ctr[index].add_tracks(
					insert, [tracks[int(i)] for i in args])

	do_ls = do_list
	do_EOF = do_quit
Beispiel #46
0
def clear_queue():
    q = HotQueue(home)
    qf = HotQueue(home + "_sources")
    q.clear()
    qf.clear()
    os.remove(pfile)
Beispiel #47
0
hdir = "./" + home
if save_files and not os.path.isdir(hdir):
    os.mkdir(hdir)
pfile = hdir[2:] + ".p"


try:
    with open(pfile, "r") as f:
        state = cPickle.load(f)
    if args.generic:
        generic = True
        P = GenericParser(name=home,save=True)
except:
    # no pickle file; fresh start
    state = [0, 0] 
    q = HotQueue(home)
    qf = HotQueue(home + "_sources")
    q.clear()
    qf.clear()
    if args.generic:
        generic = True
        P = GenericParser(name=home,save=False)

if args.vbulletin:
    vbulletin = True
    P = vBulletinParser()

init_logger()


temp = urlparse.urljoin("http:////", home)
Beispiel #48
0
from hotqueue import HotQueue

from apscheduler.job import Job
from apscheduler.jobstores.sqlalchemy_store import SQLAlchemyJobStore
from apscheduler.triggers import IntervalTrigger
from apscheduler.scripts import HttpScript


if __name__ == '__main__':

    script = HttpScript(url='http://baidu.com')
    local_tz = gettz('Asia/Chongqing')
    defaults = {'timezone': local_tz}
    trigger = IntervalTrigger(defaults, seconds=3)

    store = SQLAlchemyJobStore(url='sqlite:////tmp/task.db', tablename='tasks')
    job   = store.get_job(3)
    if not job:
        job = Job(id=3, name='BaiduCheck', script=script, trigger=trigger)
        store.add_job(job)

    print job

    job.trigger = IntervalTrigger(defaults, seconds=5)
    store.update_job(job)

    queue = HotQueue('job_changes')
    queue.put({'job_id':job.id, 'opt_type':'update'})


Beispiel #49
0
def start_server(host='localhost', port=8081, IPv6=False, timeout=60,
                  handler=ConnectionHandler):
    global stop
    global session
    global sequence
    global login
    global html_response,role_id,authorized,login_url,logout_url,num_roles
    final_sequence=[]
    final_session=[]
    if IPv6==True:
        soc_type=socket.AF_INET6
    else:
        soc_type=socket.AF_INET
    soc = socket.socket(soc_type)
    soc.bind((host, port))
    print "Serving on %s:%d."%(host, port)#debug
    soc.listen(0)
    print 'Please browse like an unauthorized user first'
    while num_roles!=0:
        thread.start_new_thread(handler, soc.accept()+(timeout,))
	#print " Real Stop=",str(stop)
	stop=False
	if stop:
        	#writedata()
		if '' in sequence:
           		del sequence['']
    		#print sequence
    		for x in sequence:
			if x in sequence[x]:
				if sequence[x]==[x]:
					pass
				else:
					sequence[x].remove(x)
		final_session.append(session)
		final_sequence.append(sequence)
		session={}
		sequence={}
                time.sleep(2)
        	ch=raw_input('Press y if you have more roles to explore...,press c to change login url')
        	if ch=='y' or ch=='c':
                 	stop=False
			authorized=True
			role_id+=1
		if ch=='c':
			login_url=raw_input('Please enter the login url:')
    			logout_url=raw_input('Please enter the logout url:') 
                 	
    
    print final_session
    print final_sequence
    print params
    print login
    f=open("responses.txt",'w')
    for i in html_response:
	print i
    	f.write(i)
	f.write("\n\n")
	f.write(html_response[i])
    f.close()
    fileObject = open("db",'wb') 
    pickle.dump([state_set,transition_set,login],fileObject) 
    queue = HotQueue("myqueue", host="localhost", port=6379, db=0)
    queue.put(state_set) 
    queue.put(transition_set) 
    queue.put(login) 
    queue.put(login_url) 
    queue.put(logout_url)   
    fileObject.close()
Beispiel #50
0
 def get(self):
     queue = HotQueue("myqueue", host="localhost", port=6379, db=0)
     self.write('Hello from tornado' + str(queue.get()))
Beispiel #51
0
class HotQueueTestCase(unittest.TestCase):
    
    def setUp(self):
        """Create the queue instance before the test."""
        self.queue = HotQueue('testqueue')
    
    def tearDown(self):
        """Clear the queue after the test."""
        self.queue.clear()
    
    def test_arguments(self):
        """Test that HotQueue.__init__ accepts arguments correctly, and that
        the Redis key is correctly formed.
        """
        kwargs = {
            'name': "testqueue",
            'serializer': DummySerializer,
            'host': "localhost",
            'port': 6379,
            'db': 0}
        # Instantiate the HotQueue instance:
        self.queue = HotQueue(**kwargs)
        # Ensure that the properties of the instance are as expected:
        self.assertEqual(self.queue.name, kwargs['name'])
        self.assertEqual(self.queue.key, "hotqueue:%s" % kwargs['name'])
        self.assertEqual(self.queue.serializer, kwargs['serializer'])
        # Instantiate a HotQueue instance with only the required args:
        self.queue = HotQueue(kwargs['name'])
        # Ensure that the properties of the instance are as expected:
        self.assertEqual(self.queue.name, kwargs['name'])
        self.assertEqual(self.queue.key, "hotqueue:%s" % kwargs['name'])
        self.assertTrue(self.queue.serializer is pickle) # Defaults to cPickle
                                                         # or pickle, depending
                                                         # on the platform.
    
    def test_consume(self):
        """Test the consume generator method."""
        nums = [1, 2, 3, 4, 5, 6, 7, 8]
        # Test blocking with timeout:
        self.queue.put(*nums)
        msgs = []
        for msg in self.queue.consume(timeout=1):
            msgs.append(msg)
        self.assertEqual(msgs, nums)
        # Test non-blocking:
        self.queue.put(*nums)
        msgs = []
        for msg in self.queue.consume(block=False):
            msgs.append(msg)
        self.assertEqual(msgs, nums)
    
    def test_cleared(self):
        """Test for correct behaviour if the Redis list does not exist."""
        self.assertEqual(len(self.queue), 0)
        self.assertEqual(self.queue.get(), None)
    
    def test_get_order(self):
        """Test that messages are get in the same order they are put."""
        alphabet = ['abc', 'def', 'ghi', 'jkl', 'mno']
        self.queue.put(alphabet[0], alphabet[1], alphabet[2])
        self.queue.put(alphabet[3])
        self.queue.put(alphabet[4])
        msgs = []
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        self.assertEqual(msgs, alphabet)
    
    def test_length(self):
        """Test that the length of a queue is returned correctly."""
        self.queue.put('a message')
        self.queue.put('another message')
        self.assertEqual(len(self.queue), 2)
    
    def test_worker(self):
        """Test the worker decorator."""
        colors = ['blue', 'green', 'red', 'pink', 'black']
        # Test blocking with timeout:
        self.queue.put(*colors)
        msgs = []
        @self.queue.worker(timeout=1)
        def appender(msg):
            msgs.append(msg)
        appender()
        self.assertEqual(msgs, colors)
        # Test non-blocking:
        self.queue.put(*colors)
        msgs = []
        @self.queue.worker(block=False)
        def appender(msg):
            msgs.append(msg)
        appender()
        self.assertEqual(msgs, colors)
        # Test decorating a class method:
        self.queue.put(*colors)
        msgs = []
        class MyClass(object):
            @self.queue.worker(block=False)
            def appender(self, msg):
                msgs.append(msg)
        my_instance = MyClass()
        my_instance.appender()
        self.assertEqual(msgs, colors)
    
    def test_threaded(self):
        """Threaded test of put and consume methods."""
        msgs = []
        def put():
            for num in range(3):
                self.queue.put('message %d' % num)
                sleep(0.1)
        def consume():
            for msg in self.queue.consume(timeout=1):
                msgs.append(msg)
        putter = threading.Thread(target=put)
        consumer = threading.Thread(target=consume)
        putter.start()
        consumer.start()
        for thread in [putter, consumer]:
            thread.join()
        self.assertEqual(msgs, ["message 0", "message 1", "message 2"])
    
    def test_custom_serializer(self):
        """Test the use of a custom serializer and None as serializer."""
        msg = "my message"
        # Test using None:
        self.queue.serializer = None
        self.queue.put(msg)
        self.assertEqual(self.queue.get().decode(), msg)

        self.queue.put({"a": 1})
        expected = "{u'a': 1}" if sys.version_info[0] == 2 else "{'a': 1}"
        self.assertEqual(self.queue.get().decode(), expected)  # Should be a string
        # Test using DummySerializer:
        self.queue.serializer = DummySerializer
        self.queue.put(msg)
        self.assertEqual(self.queue.get().decode(), "foo")
Beispiel #52
0
redisdb = config.get('Redis', 'redishost')
apiServer = config.get('API', 'host')
mcserver = config.get('Memcache', 'server')
mckey = config.get('Memcache', 'key')
psource = config.get('Pricing', 'source')
ecapi = config.get('Pricing', 'echost')
e43api = config.get('Pricing', 'e43host')
psqlhost = config.get('Pricing', 'psqlhost')
psqlname = config.get('Pricing', 'psqlname')
psqluser = config.get('Pricing', 'psqluser')
psqlpass = config.get('Pricing', 'psqlpass')
psqlport = config.get('Pricing', 'psqlport')

MAX_NUM_POOL_WORKERS = 75

queue = HotQueue("killboard-API", host=redisdb, port=6379, db=0)

logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.DEBUG)

# use a greenlet pool to cap the number of workers at a reasonable level
greenlet_pool = Pool(size=MAX_NUM_POOL_WORKERS)

def main():
    for message in queue.consume():
        greenlet_pool.spawn(worker, message)

def priceCheck(typeID):
    typeID = int(typeID)
    logging.debug("Updating mineral prices for %i" % (typeID))
    mc = pylibmc.Client([mcserver], binary=True, behaviors={"tcp_nodelay": True, "ketama": True})
    if mckey + "price" + str(typeID) in mc:
Beispiel #53
0
class HotQueueTestCase(unittest.TestCase):
    
    def setUp(self):
        """Create the queue instance before the test."""
        self.queue = HotQueue('testqueue')
    
    def tearDown(self):
        """Clear the queue after the test."""
        self.queue.clear()
    
    def test_arguments(self):
        """Test that HotQueue.__init__ accepts arguments correctly, and that
        the Redis key is correctly formed.
        """
        kwargs = {
            'name': "testqueue",
            'serializer': DummySerializer,
            'host': "localhost",
            'port': 6379,
            'db': 0}
        # Instantiate the HotQueue instance:
        self.queue = HotQueue(**kwargs)
        # Ensure that the properties of the instance are as expected:
        self.assertEqual(self.queue.name, kwargs['name'])
        self.assertEqual(self.queue.key, "hotqueue:%s" % kwargs['name'])
        self.assertEqual(self.queue.serializer, kwargs['serializer'])
        self.assertEqual(self.queue._HotQueue__redis.host, kwargs['host'])
        self.assertEqual(self.queue._HotQueue__redis.host, kwargs['host'])
        self.assertEqual(self.queue._HotQueue__redis.port, kwargs['port'])
        self.assertEqual(self.queue._HotQueue__redis.db, kwargs['db'])
        # Instantiate a HotQueue instance with only the required args:
        self.queue = HotQueue(kwargs['name'])
        # Ensure that the properties of the instance are as expected:
        self.assertEqual(self.queue.name, kwargs['name'])
        self.assertEqual(self.queue.key, "hotqueue:%s" % kwargs['name'])
        self.assertTrue(self.queue.serializer is pickle) # Defaults to cPickle or
                                                     # pickle, depending on the
                                                     # platform.
    
    def test_consume(self):
        """Test the consume generator method."""
        nums = [1, 2, 3, 4, 5, 6, 7, 8]
        # Test blocking with timeout:
        self.queue.put(*nums)
        msgs = []
        for msg in self.queue.consume(timeout=1):
            msgs.append(msg)
        self.assertEquals(msgs, nums)
        # Test non-blocking:
        self.queue.put(*nums)
        msgs = []
        for msg in self.queue.consume(block=False):
            msgs.append(msg)
        self.assertEquals(msgs, nums)
    
    def test_cleared(self):
        """Test for correct behaviour if the Redis list does not exist."""
        self.assertEquals(len(self.queue), 0)
        self.assertEquals(self.queue.get(), None)
    
    def test_get_order(self):
        """Test that messages are get in the same order they are put."""
        alphabet = ['abc', 'def', 'ghi', 'jkl', 'mno']
        self.queue.put(alphabet[0], alphabet[1], alphabet[2])
        self.queue.put(alphabet[3])
        self.queue.put(alphabet[4])
        msgs = []
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        self.assertEquals(msgs, alphabet)
    
    def test_length(self):
        """Test that the length of a queue is returned correctly."""
        self.queue.put('a message')
        self.queue.put('another message')
        self.assertEquals(len(self.queue), 2)
    
    def test_worker(self):
        """Test the worker decorator."""
        colors = ['blue', 'green', 'red', 'pink', 'black']
        # Test blocking with timeout:
        self.queue.put(*colors)
        msgs = []
        @self.queue.worker(timeout=1)
        def appender(msg):
            msgs.append(msg)
        appender()
        self.assertEqual(msgs, colors)
        # Test non-blocking:
        self.queue.put(*colors)
        msgs = []
        @self.queue.worker(block=False)
        def appender(msg):
            msgs.append(msg)
        appender()
        self.assertEqual(msgs, colors)
    
    def test_threaded(self):
        """Threaded test of put and consume methods."""
        msgs = []
        def put():
            for num in range(3):
                self.queue.put('message %d' % num)
                sleep(0.1)
        def consume():
            for msg in self.queue.consume(timeout=1):
                msgs.append(msg)
        putter = threading.Thread(target=put)
        consumer = threading.Thread(target=consume)
        putter.start()
        consumer.start()
        for thread in [putter, consumer]:
            thread.join()
        self.assertEqual(msgs, ["message 0", "message 1", "message 2"])
    
    def test_custom_serializer(self):
        """Test the use of a custom serializer."""
        self.queue.serializer = DummySerializer
        phrase = "my message"
        self.queue.put(phrase)
        msg = self.queue.get()
        self.assertEqual(msg, phrase)
Beispiel #54
0
 def setUp(self):
     """Create the queue instance before the test."""
     self.queue = HotQueue('testqueue')
Beispiel #55
0
class HotQueueTestCase(unittest.TestCase):
    
    def setUp(self):
        """Create the queue instance before the test."""
        self.queue = HotQueue('testqueue')
    
    def tearDown(self):
        """Clear the queue after the test."""
        self.queue.clear()
    
    def test_arguments(self):
        """Test that HotQueue.__init__ accepts arguments correctly, and that
        the Redis key is correctly formed.
        """
        kwargs = {
            'name': "testqueue",
            'serializer': DummySerializer,
            'host': "localhost",
            'port': 6379,
            'db': 0}
        # Instantiate the HotQueue instance:
        self.queue = HotQueue(**kwargs)
        # Ensure that the properties of the instance are as expected:
        self.assertEqual(self.queue.name, kwargs['name'])
        self.assertEqual(self.queue.key, "hotqueue:%s" % kwargs['name'])
        self.assertEqual(self.queue.serializer, kwargs['serializer'])
        # Instantiate a HotQueue instance with only the required args:
        self.queue = HotQueue(kwargs['name'])
        # Ensure that the properties of the instance are as expected:
        self.assertEqual(self.queue.name, kwargs['name'])
        self.assertEqual(self.queue.key, "hotqueue:%s" % kwargs['name'])
        self.assertTrue(self.queue.serializer is pickle) # Defaults to cPickle
                                                         # or pickle, depending
                                                         # on the platform.
    
    def test_consume(self):
        """Test the consume generator method."""
        nums = [1, 2, 3, 4, 5, 6, 7, 8]
        # Test blocking with timeout:
        nums_added = self.queue.put(*nums)
        msgs = []
        for msg in self.queue.consume(timeout=1):
            msgs.append(msg)
        self.assertEqual(msgs, nums_added)
        for msg in msgs:
            self.queue.ack(msg.get_reservationId())
        # Test non-blocking:
        nums_added = self.queue.put(*nums)
        msgs = []
        for msg in self.queue.consume(block=False):
            msgs.append(msg)
        self.assertEqual(msgs, nums_added)
        for msg in msgs:
            self.queue.ack(msg.get_reservationId())

    def test_nack(self):
        """Test the consume generator method."""
        nums_added = self.queue.put("1")
        msg = self.queue.get() #1
        self.assertEqual(msg, nums_added[0])
        self.queue.nack(msg.get_reservationId())
        msg = self.queue.get() #2
        self.assertEqual(msg, nums_added[0])
        self.queue.nack(msg.get_reservationId())
        msg = self.queue.get() #3
        if msg:
            self.assertEqual(msg, nums_added[0])
            self.queue.ack(msg.get_reservationId())
        self.assertEqual(msg, nums_added[0])
        self.assertEqual(len(self.queue), 0)
        self.assertEqual(msg.get_deliveryCount(),3) #3
    
    def test_cleared(self):
        """Test for correct behaviour if the Redis list does not exist."""
        self.assertEqual(len(self.queue), 0)
        self.assertEqual(self.queue.get(), None)
    
    def test_get_order(self):
        """Test that messages are get in the same order they are put."""
        alphabet = ['abc', 'def', 'ghi', 'jkl', 'mno']
        msg_added = []
        msg_added.extend(self.queue.put(alphabet[0], alphabet[1], alphabet[2]))
        msg_added.extend(self.queue.put(alphabet[3]))
        msg_added.extend(self.queue.put(alphabet[4]))
        msgs = []
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        msgs.append(self.queue.get())
        self.assertEqual(msgs, msg_added)
        for msg in msgs:
            self.queue.ack(msg.get_reservationId())

    def test_length(self):
        """Test that the length of a queue is returned correctly."""
        self.queue.put('a message')
        self.queue.put('another message')
        self.assertEqual(len(self.queue), 2)
    
    def test_worker(self):
        """Test the worker decorator."""
        colors = ['blue', 'green', 'red', 'pink', 'black']
        # Test blocking with timeout:
        msg_added = self.queue.put(*colors)
        msgs = []
        @self.queue.worker(timeout=1)
        def appender(msg):
            msgs.append(msg)
        appender()
        self.assertEqual(msgs, msg_added)
        for msg in msgs:
            self.queue.ack(msg.get_reservationId())
        # Test non-blocking:
        msg_added = self.queue.put(*colors)
        msgs = []
        @self.queue.worker(block=False)
        def appender(msg):
            msgs.append(msg)
        appender()
        self.assertEqual(msgs, msg_added)
        for msg in msgs:
            self.queue.ack(msg.get_reservationId())
        # Test decorating a class method:
        msg_added = self.queue.put(*colors)
        msgs = []
        class MyClass(object):
            @self.queue.worker(block=False)
            def appender(self, msg):
                msgs.append(msg)
        my_instance = MyClass()
        my_instance.appender()
        self.assertEqual(msgs, msg_added)
        for msg in msgs:
            self.queue.ack(msg.get_reservationId())
    
    def test_threaded(self):
        """Threaded test of put and consume methods."""
        msgs = []
        msg_added = []
        def put():
            for num in range(3):
                msg_added.extend(self.queue.put('message %d' % num))
                sleep(0.1)
        def consume():
            for msg in self.queue.consume(timeout=1):
                msgs.append(msg)
        putter = threading.Thread(target=put)
        consumer = threading.Thread(target=consume)
        putter.start()
        consumer.start()
        for thread in [putter, consumer]:
            thread.join()
        self.assertEqual(msgs, msg_added)
        for msg in msgs:
            self.queue.ack(msg.get_reservationId())
#!/usr/bin/python
#

import re, os
from datetime import datetime, date, time
import redis
import time
import traceback
from hotqueue import HotQueue

#make redis comnnection
r_server = redis.Redis(host='localhost', port=6380, db=0)
r_server_raw = redis.Redis(host='localhost', port=6379, db=0)
queue = HotQueue("logqueue", host="localhost", port=6379, db=1)
lineNo = 0


def matchmyregex(line):
    expireParsedLog = 432000  # 5 days
    expireIPtoMAC = 259200  # 3 days
    expireRAWlogs = 95040  # 1.1 days

    if not REGM_oldfw.search(line):
        #generate uniq key to combine multiple loglines from same STB (mac + datetime)
        macaddr = REGEXmac.findall(line)
        datetimeUnix = REGEXdatetimeServer.findall(line)[0]

        if macaddr:
            macNoDelimtTMP = macaddr[0].replace(':', "")
            macNoDelimt = macNoDelimtTMP.replace(" ", "")
Beispiel #57
0
import sys
import os
from os.path import dirname, join
sys.path.insert(0, dirname(__file__))
activate_this = join(dirname(__file__), '.env', 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))

from web.config import config, r
from hotqueue import HotQueue
from web.job import read_job, JobObj
from time import time, sleep

# start a thread for listing qlog
qlog = HotQueue(
    'joblog',
    host=config.get('redis', 'host'),
    port=config.getint('redis', 'port'),
    password=config.get('redis', 'password'),
    db=0)

qlog.put({'cmd': 'purge'})

def loop_handle(log):
    if 'cmd' not in log:
        return
    cmd = log['cmd']

    if 'host' in log:
        r.set('host:{}:last_alive'.format(log['host']), time())

    if cmd in ('available', 'busy'):
        r.set('host:{}:status'.format(log['host']), cmd)
Beispiel #58
0
    scout = multiprocessing.Process(target=scout, args=(q, ))
    scout.start()
    print "Starting scout process %s" % str(scout.pid)
    workers.append(scout)
    for i in xrange(num):
        tmp = multiprocessing.Process(target=worker, args=(q, ))
        tmp.start()
        print "Starting worker process %s" % str(tmp.pid)
        workers.append(tmp)
    for worker in workers:
        worker.join()
    print "All workers done."
    return q.empty()


if len(sys.argv) < 2:
    print "Usage: python archives.py link"

q = JoinableQueue()
queue = HotQueue("links")
home = sys.argv[1]
hdir = "./" + re.sub("^http://", "", home)
if not os.path.isdir(hdir):
    os.mkdir(hdir)

archive_link = urlparse.urljoin(home, "/archive/index.php")
print archive_link
#atexit.register(save_state)

init_workers(5, q)
from __future__ import absolute_import, division, print_function
from builtins import bytes, str, open, super, range, zip, round, input, int, pow, object


import serial
from hotqueue import HotQueue
from datetime import datetime

ser = serial.Serial("/dev/cu.usbmodem1481", 115200, timeout=1)

queue = HotQueue("serial_message_queue", host="localhost", port=6379, db=0)

# q.put('hello world')

i = 0
# messages = []
message = [datetime.now()]
number = ""
new_message = False

while True:
    y = ser.read()
    if y == "\r":
        new_message = True
        queue.put(message)
        print(message)
        # messages.append(message)
        message = [datetime.now()]
    elif y == " ":
        if number != "":
            message.append(float(number))