Beispiel #1
0
def start_sequential_worker(
        r: redis.Redis,
        job_types: Dict[str, Callable[[Dict], Optional[Union[str,
                                                             int]]]]) -> None:
    while True:
        redis_job: RedisJob = RedisJob.from_json(r.blpop('jobs')[1])
        job_id = redis_job.id

        logging.info(f'Picked up {job_id}')
        update_redis_result(r, job_id, status='Picked up', end=False)

        try:
            start_time = time.time()
            rj_result = redis_job.execute(job_types)

            update_redis_result(r,
                                job_id,
                                status='Finished',
                                result='Success',
                                response=rj_result)

            logging.info(
                f'Successfully finished {redis_job.id}\tResult: {rj_result}'
                f'\tTime: {time.time() - start_time} seconds')

        except Exception as exc:
            update_redis_result(r,
                                job_id,
                                status='Finished',
                                result='Failed',
                                response=str(exc))

            logging.error(f'Failed to run {redis_job.id}. Exception: '
                          f'{str(exc)}')
class RedisQueue(object):
    def __init__(self, name, namespace='queue', **redis_kwargs):
        self.__db = Redis(**redis_kwargs)
        self.key = "%s :%s" % (namespace, name)

    def qsize(self):
        return self.__db.llen(self.key)

    def empty(self):
        return self.qsize() == 0

    def put(self, item):
        self.__db.rpush(self.key, item)

    def get(self, block=True, timeout=None):
        if block:
            item = self.__db.blpop(self.key, timeout) #if empty, blocking
        else:
            item = self.__db.lpop(self.key)  #if empty,return nothing

        if item:
            item = item[1]
        return item

    def get_nowait(self):
        return self.get(False)

    def clear(self):
        for i in xrange(self.qsize()):
            item = self.get()
            self.__db.delete(item)

        if self.empty():
            print "queue is empty"
Beispiel #3
0
class AbstractRobot(object):
    """
    The Robot listens for messages on the Redis Queue and then delegates control to the appropriate handler
    """
    def __init__(self, host='localhost',port=6379):
        self.redis = Redis(host,port)
        if not self.redis.ping():
            raise Exception('Unable to connect to Redis Server %1:%2'.format(host,port))
        self.handlers=self.registerCommandHandlers()
    
    def registerCommandHandlers(self):
        pass
    
    def start(self):
        """
        Start listening for messages
        """
        self.waitForCommands()
       
    def waitForCommands(self):
        # Infinite Loop to wait on messages from the Queue
        while True:
            # Blocking Pop with zero timeout will wait indefinitely
            _, command = self.redis.blpop(COMMAND_QUEUE_NAME)
            self.processCommand(command.split('::'))
    
    def processCommand(self,params):
        commandName = params[0]
        handler = self.handlers.get(commandName, None)
        if handler != None:
            handler(params[1].split(','))
        else:
            print 'Unsupported command : %'.format(commandName)
Beispiel #4
0
class RedisQueue(object):
    def __init__(self, name, namespace='queue', **redis_kwargs):
        self.__db = Redis(**redis_kwargs)
        self.key = "%s :%s" % (namespace, name)

    def qsize(self):
        return self.__db.llen(self.key)

    def empty(self):
        return self.qsize() == 0

    def put(self, item):
        self.__db.rpush(self.key, item)

    def get(self, block=True, timeout=None):
        if block:
            item = self.__db.blpop(self.key, timeout)  #if empty, blocking
        else:
            item = self.__db.lpop(self.key)  #if empty,return nothing

        if item:
            item = item[1]
        return item

    def get_nowait(self):
        return self.get(False)

    def clear(self):
        for i in xrange(self.qsize()):
            item = self.get()
            self.__db.delete(item)

        if self.empty():
            print "queue is empty"
Beispiel #5
0
class RedisQueueProvider(object):
    def __init__(self, queue_name):
        self.queue_name = queue_name
        redis_url = urlparse.urlparse(app.config.get('REDIS_URL'))

        self.redis_server = Redis(host=redis_url.hostname,
                                  port=redis_url.port,
                                  password=redis_url.password)

    def add_to_queue(self, data):
        self.redis_server.rpush(self.queue_name, dumps(data))

    def read_from_queue(self):
        return loads(self.redis_server.blpop(self.queue_name)[1])

    def queue_size(self):
        return self.redis_server.llen(self.queue_name)

    def is_empty(self):
        return self.queue_size() == 0

    def health(self):
        try:
            self.redis_server.info()
            return True, "Redis"
        except Exception as e:
            self.logger.error("Healthcheck failed [%s]" % e)
            return False, "Redis"
class RedisQueueProvider(object):
    def __init__(self, queue_name):
        self.queue_name = queue_name
        redis_url = urlparse.urlparse(app.config.get('REDIS_URL'))

        self.redis_server = Redis(
            host=redis_url.hostname,
            port=redis_url.port,
            password=redis_url.password
        )

    def add_to_queue(self, data):
        self.redis_server.rpush(self.queue_name, dumps(data))

    def read_from_queue(self):
        return loads(self.redis_server.blpop(self.queue_name)[1])

    def queue_size(self):
        return self.redis_server.llen(self.queue_name)

    def is_empty(self):
        return self.queue_size() == 0

    def health(self):
        try:
            self.redis_server.info()
            return True, "Redis"
        except Exception as e:
            self.logger.error("Healthcheck failed [%s]" % e)
            return False, "Redis"
Beispiel #7
0
class RedisQueue(object):
    def __init__(self, **redis_args):
        self.redis = Redis(**redis_args)

    def put(self, queue: str, message):
        if not queue:
            raise ValueError("'queue' argument cannot be None/empty")
        if not message:
            raise ValueError("'message' argument cannot be None/empty")

        msg_packed = packb(message)
        self.redis.rpush(f"pyredq:{queue}", msg_packed)

    def subscribe(self, queue: str, callback, background: bool = False):
        if not queue:
            raise ValueError("'queue' argument cannot be None/empty")
        if not callback:
            raise ValueError("'callback' function argument cannot be None")

        if background:
            t = threading.Thread(target=self._blocking_pop, args=(queue, callback))
            t.setDaemon(True)
            t.start()
        else:
            self._blocking_pop(queue, callback)

    def _blocking_pop(self, queue: str, callback):
        print(f"pyredq -- Subscribed to messages on '{queue}'", flush=True)
        while True:
            _, msg = self.redis.blpop(f"pyredq:{queue}")
            if msg:
                msg_unpacked = unpackb(msg)
                callback(msg_unpacked)
    def run_permutations(self, temp_uuid):
        """Runs permutations and gets significant and suggestive LOD scores"""

        top_lod_scores = []
	
        print("self.num_perm:", self.num_perm)

        for permutation in range(int(self.num_perm)):

            pheno_vector = np.array([val == "x" and np.nan or float(val) for val in self.vals])
            np.random.shuffle(pheno_vector)

            key = "pylmm:input:" + temp_uuid
        
            if self.dataset.group.species == "human":
                p_values, t_stats = self.gen_human_results(pheno_vector, key, temp_uuid)
            else:
                genotype_data = [marker['genotypes'] for marker in self.dataset.group.markers.markers]
                
                no_val_samples = self.identify_empty_samples()
                trimmed_genotype_data = self.trim_genotypes(genotype_data, no_val_samples)
                
                genotype_matrix = np.array(trimmed_genotype_data).T
    
                params = dict(pheno_vector = pheno_vector.tolist(),
                            genotype_matrix = genotype_matrix.tolist(),
                            restricted_max_likelihood = True,
                            refit = False,
                            temp_uuid = temp_uuid,
                            
                            # meta data
                            timestamp = datetime.datetime.now().isoformat(),
                            )
                
                json_params = json.dumps(params)
                Redis.set(key, json_params)
                Redis.expire(key, 60*60)
    
                command = 'python /home/zas1024/gene/wqflask/wqflask/my_pylmm/pyLMM/lmm.py --key {} --species {}'.format(key,
                                                                                                                        "other")
    
                os.system(command)
    
                
                json_results = Redis.blpop("pylmm:results:" + temp_uuid, 45*60)
                results = json.loads(json_results[1])
                p_values = [float(result) for result in results['p_values']]
                
                lowest_p_value = 1
                for p_value in p_values:
                    if p_value < lowest_p_value:
                        lowest_p_value = p_value
                
                print("lowest_p_value:", lowest_p_value)        
                top_lod_scores.append(-math.log10(lowest_p_value))

        print("top_lod_scores:", top_lod_scores)

        self.suggestive = np.percentile(top_lod_scores, 67)
        self.significant = np.percentile(top_lod_scores, 95)
Beispiel #9
0
    def gen_human_results(self, pheno_vector, key, temp_uuid):
        file_base = os.path.join(webqtlConfig.PYLMM_PATH,
                                 self.dataset.group.name)
        print("file_base:", file_base)

        plink_input = input.plink(file_base, type='b')
        input_file_name = os.path.join(webqtlConfig.SNP_PATH,
                                       self.dataset.group.name + ".snps.gz")

        pheno_vector = pheno_vector.reshape((len(pheno_vector), 1))
        covariate_matrix = np.ones((pheno_vector.shape[0], 1))
        kinship_matrix = np.fromfile(open(file_base + '.kin', 'r'), sep=" ")
        kinship_matrix.resize(
            (len(plink_input.indivs), len(plink_input.indivs)))

        print("Before creating params")

        params = dict(
            pheno_vector=pheno_vector.tolist(),
            covariate_matrix=covariate_matrix.tolist(),
            input_file_name=input_file_name,
            kinship_matrix=kinship_matrix.tolist(),
            refit=False,
            temp_uuid=temp_uuid,

            # meta data
            timestamp=datetime.datetime.now().isoformat(),
        )

        print("After creating params")

        json_params = json.dumps(params)
        Redis.set(key, json_params)
        Redis.expire(key, 60 * 60)

        print("Before creating the command")

        command = PYLMM_COMMAND + ' --key {} --species {}'.format(key, "human")

        print("command is:", command)

        os.system(command)

        json_results = Redis.blpop("pylmm:results:" + temp_uuid, 45 * 60)
        results = json.loads(json_results[1])
        t_stats = results['t_stats']
        p_values = results['p_values']

        #p_values, t_stats = lmm.run_human(key)

        #p_values, t_stats = lmm.run_human(
        #        pheno_vector,
        #        covariate_matrix,
        #        input_file_name,
        #        kinship_matrix,
        #        loading_progress=tempdata
        #    )

        return p_values, t_stats
Beispiel #10
0
def run_daemon():
    redis = Redis()
    while True:
        try:
            to_do = redis.blpop(['judge_queue'])
            program_id = to_do[1]
            judge_by_id(int(program_id))
        except Exception as e:
            print >> sys.stderr, e
    def gen_human_results(self, pheno_vector, key, temp_uuid):
        file_base = os.path.join(webqtlConfig.PYLMM_PATH, self.dataset.group.name)
        print("file_base:", file_base)

        plink_input = input.plink(file_base, type='b')
        input_file_name = os.path.join(webqtlConfig.SNP_PATH, self.dataset.group.name + ".snps.gz")

        pheno_vector = pheno_vector.reshape((len(pheno_vector), 1))
        covariate_matrix = np.ones((pheno_vector.shape[0],1))
        kinship_matrix = np.fromfile(open(file_base + '.kin','r'),sep=" ")
        kinship_matrix.resize((len(plink_input.indivs),len(plink_input.indivs)))

        print("Before creating params")

        params = dict(pheno_vector = pheno_vector.tolist(),
                    covariate_matrix = covariate_matrix.tolist(),
                    input_file_name = input_file_name,
                    kinship_matrix = kinship_matrix.tolist(),
                    refit = False,
                    temp_uuid = temp_uuid,
                        
                    # meta data
                    timestamp = datetime.datetime.now().isoformat(),
                    )
        
        print("After creating params")
        
        json_params = json.dumps(params)
        Redis.set(key, json_params)
        Redis.expire(key, 60*60)

        print("Before creating the command")

        command = 'python /home/zas1024/gene/wqflask/wqflask/my_pylmm/pyLMM/lmm.py --key {} --species {}'.format(key,
                                                                                                                "human")
        
        print("command is:", command)
        
        os.system(command)
        
        json_results = Redis.blpop("pylmm:results:" + temp_uuid, 45*60)
        results = json.loads(json_results[1])
        t_stats = results['t_stats']
        p_values = results['p_values']
        

        #p_values, t_stats = lmm.run_human(key)

        #p_values, t_stats = lmm.run_human(
        #        pheno_vector,
        #        covariate_matrix,
        #        input_file_name,
        #        kinship_matrix,
        #        loading_progress=tempdata
        #    )

        return p_values, t_stats
    def run_permutations(self, temp_uuid):
        """Runs permutations and gets significant and suggestive LOD scores"""

        top_lod_scores = []

        #logger.debug("self.num_perm:", self.num_perm)

        for permutation in range(self.num_perm):

            pheno_vector = np.array([val == "x" and np.nan or float(val) for val in self.vals])
            np.random.shuffle(pheno_vector)

            key = "pylmm:input:" + temp_uuid

            if self.dataset.group.species == "human":
                p_values, t_stats = self.gen_human_results(pheno_vector, key, temp_uuid)
            else:
                genotype_data = [marker['genotypes'] for marker in self.dataset.group.markers.markers]

                no_val_samples = self.identify_empty_samples()
                trimmed_genotype_data = self.trim_genotypes(genotype_data, no_val_samples)

                genotype_matrix = np.array(trimmed_genotype_data).T

                params = dict(pheno_vector = pheno_vector.tolist(),
                            genotype_matrix = genotype_matrix.tolist(),
                            restricted_max_likelihood = True,
                            refit = False,
                            temp_uuid = temp_uuid,

                            # meta data
                            timestamp = datetime.datetime.now().isoformat(),
                            )

                json_params = json.dumps(params)
                Redis.set(key, json_params)
                Redis.expire(key, 60*60)

                command = PYLMM_COMMAND+' --key {} --species {}'.format(key,"other")
                shell(command)

                json_results = Redis.blpop("pylmm:results:" + temp_uuid, 45*60)
                results = json.loads(json_results[1])
                p_values = [float(result) for result in results['p_values']]

                lowest_p_value = 1
                for p_value in p_values:
                    if p_value < lowest_p_value:
                        lowest_p_value = p_value

                #logger.debug("lowest_p_value:", lowest_p_value)
                top_lod_scores.append(-math.log10(lowest_p_value))

        #logger.debug("top_lod_scores:", top_lod_scores)

        self.suggestive = np.percentile(top_lod_scores, 67)
        self.significant = np.percentile(top_lod_scores, 95)
def run_daemon():
    redis = Redis()
    while True:
        try:
            to_do = redis.blpop(['judge_queue'])
            program_id = to_do[1]
            judge_by_id(int(program_id))
        except Exception as e:
            print >> sys.stderr, e
Beispiel #14
0
def get_message_response(msg):
    redis = Redis(**bus().redis_args)
    response = redis.blpop(get_redis_queue_name_by_message(msg), timeout=60)
    if response and len(response) > 1:
        response = Message.build(response[1])
    else:
        response = None

    return response
Beispiel #15
0
class Crawler:
    # 全局变量来设定
    count = 0
    def __init__(self):
        # 初始化redis
        self.r_server = Redis()

    def pullCrawlRequest(self):
        return json.loads(self.r_server.blpop('url_list')[1].decode())
        '''
        while True:
            if not self.r_server.lpop('url_list'):
                return json.loads(self.r_server.lpop('url_list').decode())
            else:
                sleep(5)
        '''

    def pushParserRequest(self, dic):
        self.r_server.rpush('crawl_list', json.dumps(dic))

    def crawl(self):
        while True:
            # 从redis中取出要爬的url等信息
            dic = self.pullCrawlRequest()

            # 爬下来,然后放到redis中
            content = self._get_content(dic['url'], dic['encode'])
            del dic['url']
            dic['content'] = content
            self.pushParserRequest(dic)
            
            # 打印count
            self.__class__.count += 1
            print(self.__class__.count)

    def isEmpty(self):
        if self.r_server.llen('url_list') == 0:
            return True
        else:
            return False

    def _get_content(self, url, encode, method='GET', params=None):
        while True:
            try:
                if method == 'GET':
                    r = requests.get(url)
                else:
                    r = requests.post(url, data=params)
                r.encoding = encode
                return r.text
            except:
                s = sys.exc_info()
                print(url)
                print('Error %s happened in line %d' % (s[1], s[2].tb_lineno))
                sleep(5)
class Orderbook(object):

	def __init__(self, uuid):
		self.uuid = uuid # + uuid4().hex
		self.keymanager = RedisKeyManager(uuid)				
		self.buy_orders = list()
		self.sell_orders = list()
		self.redis = Redis()
		logger.info('Initializing orderbook: %s'%self.uuid)
				
	
	def start_auction(self):
		""" Run this to start the auction. If the auction is already running, nothing happens"""
		if not hasattr(self, 'daemon'):
			self.daemon = Thread(name = 'auction_%s'%self.uuid, target = self.queue_daemon)		
			self.daemon.start()
			# self.osn = Thread(name = 'orderbookstatus_%s'%self.uuid, target = self.orderbook_status_notifier)		
			# self.osn.start()
			

			# Auction.query.filter_by(uuid = self.uuid).update({'running' : True})
			
			logger.info('Started auction for book %s'%self.uuid)
		else:
			logger.info('Auction is already running at book %s'%self.uuid)

	
	def stop_auction(self):
		if hasattr(self, 'daemon'):
			self.daemon.terminate()
			# Auction.query.filter_by(uuid = self.uuid).update({'running' : False})
			del self.daemon
			logger.info('Terminated auction at book %s'%self.uuid)
		else:
			logger.info('Cannot stop auction that is not already running at book %s'%self.uuid)

	
	def queue_daemon(self, rv_ttl=500):
		""" 
		The daemon that listens for incoming orders. Must be run in a separate process. 
		All received orders are stored in the database
		"""
		while True:
			logger.debug('Waiting for orders...')
			order_form_data = self.redis.blpop(prefixed(self.uuid))
			order_form_data = loads(order_form_data[1])
			new_order = Order(**order_form_data)
			self.store_order(new_order)
			try:
				response = self.process_order(new_order)
				logger.debug('Finished processing order.')
			except Exception, e:
				logger.exception(e)
				response = e
Beispiel #17
0
class Consumer(object):
    def __init__(self):
        self._redis = Redis(host = __conf__.REDIS_HOST, port = __conf__.REDIS_PORT, \
                password = __conf__.REDIS_PASS, db = __conf__.REDIS_DB)

    def consume(self):
        return unpack(self._redis.blpop(__conf__.REDIS_CHANNEL)[1])

    def close(self):
        if hasattr(self, "_redis"):
            self._redis.connection_pool.disconnect()
Beispiel #18
0
def process_sold_email_queue(conn: Redis):
    while not QUIT_FLAG:
        packed = conn.blpop(["queue:email"], timeout=30)
        if not packed:
            continue
        to_send = json.loads(packed[1])
        try:
            fetch_data_and_send_sold_email(to_send)
        except EmailSendError as err:
            my_logger.error("Failed to send sold email: %s, %s", err, to_send)
        else:
            my_logger.info("Send sold email %s", to_send)
Beispiel #19
0
def worker_watch_queue_with_priority(conn: Redis, queues, callbacks):
    while not QUIT_FLAG:
        packed = conn.blpop(queues, timeout=30)
        if not packed:
            continue

        name, args = json.loads(packed[1])
        if name not in callbacks:
            my_logger.error("Unknown callback %s", name)
            continue

        callbacks[name](args)
Beispiel #20
0
class Master(object):
    def __init__(self):
        self.redisCon = Redis(host=conf.REDIS_HOST,
                              port=conf.REDIS_PORT,
                              password=conf.REDIS_PASSWD)
        self.jobQueue = Queue(connection=self.redisCon)
        map(lambda key: self.redisCon.delete(key), [
            key for key in self.redisCon.keys()
            if re.search('visit|rq:', key, re.I)
        ])
        hashData = hashUrl(conf.CRAWL_SITE)
        self.redisCon.lpush('visit', conf.CRAWL_SITE)
        self.redisCon.sadd('visitSet', hashData)

    def start(self):

        initDB()

        countDepth = 0
        countUrls = 0

        while countDepth <= int(conf.CRAWL_DEPTH):

            while True:
                # wait for 10 minites
                # print 'len visite:', self.redisCon.llen('visit')
                # print 'len visited:', self.redisCon.scard('visited')
                url = self.redisCon.lpop('visit')
                if url:
                    countUrls += 1
                    print 'countDepth:', countDepth, 'countUrls:', countUrls
                    self.jobQueue.enqueue_call(crawl,
                                               args=(url, countDepth,
                                                     countUrls))
                else:
                    self.redisCon.delete('visitSet')
                    break

            while True:
                # wait 30 seconds, if timeout, jobqueue is empty(except failed job)
                keyUrl = self.redisCon.blpop('tmpVisit', timeout=30)
                if keyUrl:
                    url = keyUrl[1]
                    hashData = hashUrl(url)
                    if not self.redisCon.sismember('visited', hashData) and \
                            not self.redisCon.sismember('visitSet', hashData):
                        self.redisCon.lpush('visit', url)
                        self.redisCon.sadd('visitSet', hashData)
                else:
                    break

            countDepth += 1
Beispiel #21
0
class MessageQueue(BaseQueue):
    def __init__(self, key, host, db=2, port=6379):
        self.server = Redis(host, port, db)
        self.key = key

    def __len__(self):
        return self.server.llen(self.key)

    def pop(self):
        return self.server.blpop(self.key, 0)[1]

    def push(self, value):
        self.server.lpush(self.key, value)
Beispiel #22
0
class MessageQueue(BaseQueue):

    def __init__(self, key, host, db=2, port=6379):
        self.server = Redis(host, port, db)
        self.key = key

    def __len__(self):
        return self.server.llen(self.key)

    def pop(self):
        return self.server.blpop(self.key, 0)[1]

    def push(self, value):
        self.server.lpush(self.key, value)
Beispiel #23
0
class Master(object):

    def __init__(self):
        self.redisCon = Redis(host=conf.REDIS_HOST,
                              port=conf.REDIS_PORT,
                              password=conf.REDIS_PASSWD)
        self.jobQueue = Queue(connection=self.redisCon)
        map(lambda key: self.redisCon.delete(key), [key for key in self.redisCon.keys() if re.search('visit|rq:', key, re.I)])
        hashData = hashUrl(conf.CRAWL_SITE)
        self.redisCon.lpush('visit', conf.CRAWL_SITE)
        self.redisCon.sadd('visitSet', hashData)


    def start(self):

        initDB()

        countDepth = 0
        countUrls = 0

        while countDepth <= int(conf.CRAWL_DEPTH):

            while True:
                # wait for 10 minites
                # print 'len visite:', self.redisCon.llen('visit')
                # print 'len visited:', self.redisCon.scard('visited')
                url = self.redisCon.lpop('visit')
                if url:
                    countUrls += 1
                    print 'countDepth:', countDepth, 'countUrls:', countUrls
                    self.jobQueue.enqueue_call(crawl, args=(url, countDepth, countUrls))
                else:
                    self.redisCon.delete('visitSet')
                    break

            while True:
                # wait 30 seconds, if timeout, jobqueue is empty(except failed job)
                keyUrl = self.redisCon.blpop('tmpVisit', timeout=30)
                if keyUrl:
                    url = keyUrl[1]
                    hashData = hashUrl(url)
                    if not self.redisCon.sismember('visited', hashData) and \
                            not self.redisCon.sismember('visitSet', hashData):
                        self.redisCon.lpush('visit', url)
                        self.redisCon.sadd('visitSet', hashData)
                else:
                    break

            countDepth += 1
Beispiel #24
0
def process_queue_item(topic=None, persist=True):
    """
    method that block pops items from a redis queue and processes them according to the defined processor for the topic.
    default behavior is persistent, i.e., will run as long as the worker is alive.
    :param topic: string, the topic for which to process queue items
    :param persist: boolean, default=True, whether to keep the process alive indefinitely
    :return: implicit None if persistent, explicit None if not persistent
    """

    log_level = config.log_level() or logging.DEBUG
    logging.basicConfig(level=log_level)
    logger = logging.getLogger(__name__)

    redis = Redis(config.redis('host'), int(config.redis('port')), int(config.redis('db')), config.redis('password'))
    job_data = None
    job_sid = None

    while True:
        try:
            queue, job_data = redis.blpop('%s.%s' % (topic, 'submitted'))
            # if job_data was found, i.e., there was an item in queue, proceed. If not, wait for the next one
            if job_data:
                job_data = json.loads(job_data)
                job_sid = job_data['sid']
                # insert the record for this job into the pending queue for this topic
                redis.hset('%s.%s' % (topic, 'pending'), uuid, json.dumps(job_data))
                # attempt to process the message
                final_job_data = config.handler(topic)(job_data)
                # add job to complete queue and remove from pending queue
                redis.hdel('%s.%s' % (topic, 'pending'),
                           job_sid)
                redis.hset('%s.%s' % (topic, 'complete'),
                           job_sid, json.dumps(final_job_data))
        except (ConnectionError, TimeoutError, IndexError, TypeError, KeyError), e:
            # if the error was not a redis connection or timeout error, log the error to redis
            # if the log to redis fails, let it go
            if type(e) in [KeyError, TypeError]:
                try:
                    timestamp = time.time()
                    date_timestamp = datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
                    error = {'message': job_data, 'error': str(e)}
                    redis.hset('%s.error' % topic, date_timestamp, error)
                except:
                    pass
            logger.error(e)

        if not persist:
            return job_sid
Beispiel #25
0
def worker_watch_queue(conn: Redis, queue, callbacks):
    while not QUIT_FLAG:
        # 尝试取出一项执行任务
        packed = conn.blpop([queue], timeout=30)
        if not packed:
            # 队列为空,则重试
            continue

        # 解码任务信息
        name, args = json.loads(packed[1])
        if name not in callbacks:
            # 没有找到任务指定的回调函数,记录错误日志,并重试
            my_logger.error("Unknown callback %s", name)
            continue

        # 执行任务
        callbacks[name](args)
Beispiel #26
0
class Lock(object):
    """Lock implemented on top of redis."""

    def __init__(self, name, timeout=60, db=0):
        """
        Create, if necessary the lock variable in redis.

        We utilize the ``blpop`` command and its blocking behavior.

        The ``_key`` variable is used to check, whether the mutex exists or not,
        while the ``_mutex`` variable is the actual mutex.
        """
        self._key = 'lock:name:%s' % name
        self._mutex = 'lock:mutex:%s' % name
        self._timeout = timeout
        self._r = Redis(db=1)
        self._init_mutex()

    @property
    def mutex_key(self):
        return self._mutex

    def lock(self):
        """
        Lock and block.

        Raises:
            RuntimeError, in case of synchronization issues.
        """
        res = self._r.blpop(self._mutex, self._timeout)
        if res is None:
            raise RuntimeError

    def unlock(self):
        self._r.rpush(self._mutex, 1)

    def _init_mutex(self):
        """
        Initialize the mutex, if necessary.

        Use a separate key to check for the existence of the "mutex",
        so that we can utilize ``getset``, which is atomic.
        """
        exists = self._r.getset(self._key, 1)
        if exists is None:
            self._r.lpush(self._mutex, 1)
Beispiel #27
0
class RedisBus(Bus):
    """ Overrides the in-process in-memory local bus with a Redis bus """
    _DEFAULT_REDIS_QUEUE = 'platypush/bus'

    def __init__(self,
                 on_message=None,
                 redis_queue=_DEFAULT_REDIS_QUEUE,
                 *args,
                 **kwargs):
        super().__init__(on_message=on_message)

        if not args and not kwargs:
            kwargs = (Config.get('backend.redis') or {}).get('redis_args', {})

        self.redis = Redis(*args, **kwargs)
        self.redis_args = kwargs
        self.redis_queue = redis_queue
        self.on_message = on_message
        self.thread_id = threading.get_ident()

    def get(self):
        """ Reads one message from the Redis queue """
        msg = None

        try:
            msg = self.redis.blpop(self.redis_queue)
            if not msg or msg[1] is None:
                return

            msg = msg[1].decode('utf-8')
            try:
                msg = json.loads(msg)
            except json.decoder.JSONDecodeError:
                msg = ast.literal_eval(msg)

            msg = Message.build(msg)
        except Exception as e:
            logger.exception(e)

        return msg

    def post(self, msg):
        """ Sends a message to the Redis queue """
        return self.redis.rpush(self.redis_queue, str(msg))
Beispiel #28
0
def main():
    redis = Redis(host="localhost")
    while True:
        _, picklemsg = redis.blpop(REDIS_KEY)
        title = picklemsg.decode("utf-8")
        page = pywikibot.Page(
            site, title)  #this could be the issue as page title isnt clean
        #print(page.title())
        #print(str(title))
        #print(type(title))

        #for page in igen:
        mycursor.execute(search_sql, page.title())
        myresult = mycursor.fetchall()
        if len(myresult) > 0:
            print("Already done")
            continue
        else:
            text = page.text
            search_result = re.search("Category:El Paso Daily Times", text)
            if search_result:
                print("Found in page")
                add_to_db(page)
                continue  # already done, move on
        #if counter >4:
        #    print("Done")
        #    break
        #text = page.text
        if not call_home(site):
            raise ValueError(
                "Kill switch on-wiki is false. Terminating program.")
        page.text = page.text + "\n[[Category:El Paso Daily Times]]"
        print("Editing")
        print(len(text))
        print(len(page.text))
        page.save(summary="Adding to [[:Category:El Paso Daily Times]]" +
                  " ([[Commons:Bots/Requests/TheSandBot 3|BRFA]])",
                  minor=True,
                  botflag=True,
                  force=True)
        #counter += 1
        add_to_db(page)
Beispiel #29
0
def consume_queue(redis_connection: redis.Redis, redis_queue: str) -> None:
    while True:
        try:
            packed = redis_connection.blpop([redis_queue], timeout=30)
        except Exception as error:
            print("Consumer failed to connect to Redis")
            # time.sleep(5000)
            packed = None

        if not packed:
            continue

        message = json.loads(packed[1])

        try:
            send_message(message)
        except Exception as error:
            print(f"Error: {error}. Failed to send message: {message}")
        else:
            print(f"Sent message: {message}")
Beispiel #30
0
class RedisBus(Bus):
    """ Overrides the in-process in-memory local bus with a Redis bus """
    _DEFAULT_REDIS_QUEUE = 'platypush/bus'

    def __init__(self, *args, on_message=None, redis_queue=None, **kwargs):
        super().__init__(on_message=on_message)

        if not args and not kwargs:
            kwargs = (Config.get('backend.redis') or {}).get('redis_args', {})

        self.redis = Redis(*args, **kwargs)
        self.redis_args = kwargs
        self.redis_queue = redis_queue or self._DEFAULT_REDIS_QUEUE
        self.on_message = on_message
        self.thread_id = threading.get_ident()

    def get(self):
        """ Reads one message from the Redis queue """
        try:
            if self.should_stop():
                return

            msg = self.redis.blpop(self.redis_queue, timeout=1)
            if not msg or msg[1] is None:
                return

            msg = msg[1].decode('utf-8')
            return Message.build(msg)
        except Exception as e:
            logger.exception(e)

    def post(self, msg):
        """ Sends a message to the Redis queue """
        return self.redis.rpush(self.redis_queue, str(msg))

    def stop(self):
        super().stop()
        self.redis.close()
Beispiel #31
0
class RedisBroker:
    def __init__(self, jukebox):
        # Socket to talk to server
        self.redisClient = Redis(host="redis92559-pyply.j.layershift.co.uk",
                                 password="******")
        self.jukebox = jukebox

    def messageListener(self, partyID):
        while True:
            message = self.redisClient.blpop("party")
            message_parts = message[1].split()
            topic = message[0]
            verb = message_parts[0]
            argument = ""
            if len(message_parts) > 1:
                argument = message_parts[1]
                print argument

            if verb == 'loadPlaylist' and argument:
                self.jukebox.setPlaylist(json.loads(argument))
            if verb == 'skipVote':
                self.jukebox.addSkipVote()
            if verb == 'togglePause':
                self.jukebox.togglePausePlayer()
Beispiel #32
0
def semaphore(
    redis: Redis,
    key: str,
    limit: int,
    expire_in: int = 60,
    blocking: bool = True,
    timeout: int = 0,
    signal_key: Optional[str] = None,
) -> Generator[UUID, None, None]:
    if timeout <= 0 and blocking:
        raise ValueError(
            f"Timeout {timeout} cannot be less than or equal to 0")

    if signal_key is None:
        signal_key = f"signal_key:{key}"
    lock_id = acquire_lock(redis,
                           key=key,
                           signal_key=signal_key,
                           limit=limit,
                           expire_in=expire_in)

    if lock_id is None and blocking:
        if redis.blpop(signal_key, timeout):
            lock_id = acquire_lock(redis,
                                   key=key,
                                   signal_key=signal_key,
                                   limit=limit,
                                   expire_in=expire_in)

    if lock_id is None:
        raise FailedToAcquireLock

    try:
        yield lock_id
    finally:
        clear_lock(redis, key=key, lock_id=lock_id, signal_key=signal_key)
Beispiel #33
0
def get_notification(r: Redis, worker_id: int) -> Dict:
    try:
        _, notification = r.blpop(f"notifications:{worker_id}", timeout=1)
    except TypeError:
        notification = b"{}"
    return json.loads(notification)
Beispiel #34
0
class QBQueue(object):
    def __init__(self, name, namespace='queue', **kwargs):
        self.key = '%s:%s' % (namespace, name)
        self._redis = Redis(**kwargs)
        self.enable_get()
        self.enable_put()

    def __len__(self):
        return self._redis.llen(self.key)

    def empty(self):
        return self.qsize() == 0

    def clear(self):
        self._redis.delete(self.key)

    def get_status(self):
        if self._redis.get('_get') == b"True":
            return True
        else:
            return False

    def put_status(self):
        if self._redis.get('_put') == b"True":
            return True
        else:
            return False

    def enable_get(self):
        self._redis.set('_get', "True")

    def enable_put(self):
        self._redis.set('_put', "True")

    def disable_get(self):
        self._redis.set('_get', "False")

    def disable_put(self):
        self._redis.set('_put', "False")

    def get(self, block=False, timeout=1):
        #block+timeout wait 1 sec until task is available
        task = None
        if self.get_status():
            if block:
                task = self._redis.blpop(self.key, timeout=timeout)
                if task is not None:
                    task = task[1]
            else:
                task = self._redis.lpop(self.key)
                if task != None:
                    task = loads(task)
                    task["started"] = datetime.now()
        return task

    def put(self, uuid, data):
        task = None
        if self.put_status():
            time_now = datetime.now()
            data.update({"uuid": uuid})
            task = {
                'jobID': uuid,
                'status': 'wait',
                'created': time_now,
                'started': time_now,
                'finished': time_now,
                'data': data
            }
            self._redis.rpush(self.key, dumps(task))


#queue = QBQueue("analyzer", host="localhost", port=6379, db=0)
#queue.enable_get()
#queue.enable_put()
#print(queue.get_status())
    def gen_pylmm_results(self):    
        # This function is NOT used. If it is, we should use a shared function with marker_regression.py
        self.trait_results = {}
        for trait_db in self.trait_list:
            this_trait = trait_db[0]
            #this_db = trait_db[1]
            self.dataset.group.get_markers()
            
            this_db_samples = self.dataset.group.samplelist
            this_sample_data = this_trait.data
            #print("this_sample_data", this_sample_data)
            this_trait_vals = []
            for index, sample in enumerate(this_db_samples):
                if sample in this_sample_data:
                    sample_value = this_sample_data[sample].value
                    this_trait_vals.append(sample_value)
                else:
                    this_trait_vals.append("x")
                    
            pheno_vector = np.array([val == "x" and np.nan or float(val) for val in this_trait_vals])
            
            key = "pylmm:input:" + str(self.temp_uuid)
            #print("key is:", pf(key))
            
            genotype_data = [marker['genotypes'] for marker in self.dataset.group.markers.markers]
            
            no_val_samples = self.identify_empty_samples(this_trait_vals)
            trimmed_genotype_data = self.trim_genotypes(genotype_data, no_val_samples)
            
            genotype_matrix = np.array(trimmed_genotype_data).T
            
            #print("genotype_matrix:", str(genotype_matrix.tolist()))
            #print("pheno_vector:", str(pheno_vector.tolist()))
            
            params = dict(pheno_vector = pheno_vector.tolist(),
                        genotype_matrix = genotype_matrix.tolist(),
                        restricted_max_likelihood = True,
                        refit = False,
                        temp_uuid = str(self.temp_uuid),
                        
                        # meta data
                        timestamp = datetime.datetime.now().isoformat(),
                        )
            
            json_params = json.dumps(params)
            #print("json_params:", json_params)
            Redis.set(key, json_params)
            Redis.expire(key, 60*60)
            print("before printing command")
            
            command = PYLMM_COMMAND+' --key {} --species {}'.format(key,
                                                                                                                    "other")
            print("command is:", command)
            print("after printing command")

            os.system(command)
            
            json_results = Redis.blpop("pylmm:results:" + str(self.temp_uuid), 45*60)
            results = json.loads(json_results[1])
            p_values = [float(result) for result in results['p_values']]
            #print("p_values:", p_values)
            self.dataset.group.markers.add_pvalues(p_values)
            
            self.trait_results[this_trait.name] = []
            for marker in self.dataset.group.markers.markers:
                self.trait_results[this_trait.name].append(marker['lod_score'])
Beispiel #36
0
class Zone:
    def __init__(self):
        self.id = None
        self.world_path = ""
        self.entities = {}
        self.components = {}
        self.renderer_class = DefaultRenderer
        self.entities_by_component_name = {}
        self.ticking_entities = set()
        self.tick_interval = 1
        self.running = False
        self.redis = None
        self._max_id = 0

    @classmethod
    def from_config(cls, id, world_path):
        self = cls()

        self.id = id
        self.world_path = world_path

        self.load_config()

        return self

    @property
    def tick_key(self):
        return "zone:%s:tick" % self.id

    @property
    def incoming_key(self):
        return "zone:%s:incoming" % self.id

    @staticmethod
    def messages_key(entity_id):
        return "entity:%s:messages" % entity_id

    def next_id(self):
        self._max_id += 1
        return self._max_id

    def load_config(self):
        base_path = os.path.abspath(os.path.expanduser(self.world_path))

        config = None

        for serializer in SERIALIZERS.values():
            config_filename = "config.%s" % serializer.extension
            config_path = os.path.join(base_path, config_filename)

            try:
                with open(config_path) as f:
                    config = serializer.unserialize(f.read())
            except EnvironmentError:
                continue
            except Exception as e:  # TODO: UnserializeError
                fatal("Error while reading %s: %s" % (config_path, e))

        if config is None:
            fatal(
                "Unable to read config.{%s} from %s"
                % (",".join(s.extension for s in SERIALIZERS.values()), base_path)
            )

        if self.id not in config["zones"]:
            fatal("Undefined zone '%s'" % self.id)

        tick_interval = config["zones"][self.id].get("tick", 1)
        self.tick_interval = tick_interval

        # TODO: per-zone persistence settings

        persistence = config.get("persistence")
        if not persistence:
            fatal("Unspecified persistence settings")

        # TODO: alternate persistence modes

        if not persistence["mode"] == "snapshot":
            fatal("Unrecognized persistence mode '%s'" % persistence["mode"])

        self.config = config

        self.redis = Redis(config["redis"]["host"], config["redis"]["port"])

        renderer_name = self.config["world"].get("renderer")
        if renderer_name:
            renderer_module_name, _, renderer_class_name = renderer_name.rpartition(".")
            renderer_module = importlib.import_module(renderer_module_name)
            self.renderer_class = getattr(renderer_module, renderer_class_name)

    @property
    def snapshot_path(self):
        snapshot_path = self.config["persistence"]["file"]
        try:
            snapshot_path = snapshot_path.format(id=self.id)
        except TypeError:
            pass
        return os.path.join(self.world_path, os.path.expanduser(snapshot_path))

    @property
    def snapshot_serializer(self):
        extension = self.config["persistence"].get("format")
        if not extension:
            extension = os.path.splitext(self.snapshot_path)[1][1:]
        return SERIALIZERS[extension]

    def load_snapshot(self):
        if not os.path.exists(self.snapshot_path):
            return False

        log.info("Loading snapshot: %s" % self.snapshot_path)
        with open(self.snapshot_path, "r") as f:
            snapshot = f.read()
            # if self.config['persistence'].get('compressed'):
            #     snapshot = zlib.decompress(snapshot)
            snapshot = self.snapshot_serializer.unserialize(snapshot)

            log.info("Creating entities...")

            for entity_dict in snapshot["entities"]:
                entity = Entity.from_dict(
                    {"id": entity_dict["id"], "hearing": entity_dict["hearing"]}, self
                )
                self._max_id = max(self._max_id, entity.id)

            log.info("Creating components...")

            for entity_dict in snapshot["entities"]:
                entity = self.get(entity_dict["id"])
                entity.attach_from_dict(entity_dict)

        return True

    def save_snapshot(self):
        log.info("Saving snapshot: %s" % self.snapshot_path)
        child_pid = os.fork()

        if not child_pid:
            f = tempfile.NamedTemporaryFile(delete=False)
            snapshot = self.snapshot_serializer.serialize(
                {"entities": [e.to_dict() for e in self.all()]}
            )
            # if self.config['persistence'].get('compressed'):
            #     snapshot = zlib.compress(snapshot)
            f.write(snapshot)
            f.close()
            shutil.move(f.name, self.snapshot_path)
            os._exit(os.EX_OK)

    def _import_subclasses(self, module_name, parent_class):
        module = importlib.import_module(module_name)
        return {
            cls.__name__: cls
            for name, cls in inspect.getmembers(module)
            if inspect.isclass(cls) and issubclass(cls, parent_class)
        }

    def load_modules(self):
        sys.path.append(self.world_path)

        self.components = self._import_subclasses(
            self.config["world"]["components"], Component
        )
        self.modes = self._import_subclasses(self.config["world"]["modes"], Mode)

        log.debug(
            "Loaded %s component(s) and %s mode(s)."
            % (len(self.components), len(self.modes))
        )

    def start(self):
        try:
            self.redis.ping()
        except ConnectionError as e:
            fatal("Redis error: %s" % e.message)

        self.running = True
        log.info("Listening.")

        # Clear any existing tick events
        self.redis.ltrim(self.tick_key, 0, 0)
        try:
            while self.running:
                self.process_one_event()
        except Exception as e:
            log.critical(traceback.format_exc())
        except BaseException as e:
            pass
        finally:
            self.save_snapshot()

    def stop(self):
        self.running = False

    def start_ticker(self):
        log.info("Ticking every %ss." % self.tick_interval)
        tock = False
        while True:
            log.debug("Tock." if tock else "Tick.")
            # TODO: timestamp here instead of 1, for debugging?
            self.redis.rpush(self.tick_key, 1)
            sleep(self.tick_interval)
            tock = not tock

    def send_message(self, entity_id, message):
        self.redis.publish(self.messages_key(entity_id), message)

    def listen(self, entity_id):
        subscription = self.subscribe(entity_id)
        for message in subscription.listen():
            yield message["data"]

    # TODO: Leaky abstraction :\
    def subscribe(self, entity_id):
        subscription = self.redis.pubsub(ignore_subscribe_messages=True)
        subscription.subscribe(self.messages_key(entity_id))
        return subscription

    def process_one_event(self):
        key, value = self.redis.blpop([self.tick_key, self.incoming_key])

        if key.decode("utf-8") == self.tick_key:
            self.perform_tick()
        else:
            entity_id, _, command = value.decode("utf-8").partition(" ")
            self.perform_command(int(entity_id), command)

    def enqueue_command(self, entity_id, command):
        self.redis.rpush(self.incoming_key, " ".join([str(entity_id), command]))

    def perform_command(self, entity_id, command):
        entity = self.get(entity_id)
        log.debug("Processing: [%s] %s" % (entity.id, command))
        entity.perform(command)

    def perform_tick(self):
        for entity in self.ticking_entities:
            # TODO: Somehow iterate over only ticking components
            for component in entity.components:
                if component.ticking:
                    component.tick()

    # Entity helpers

    def get(self, id):
        return self.entities.get(id)

    def all(self):
        return self.entities.values()

    def find(self, component_name):
        if inspect.isclass(component_name):
            component_name = component_name.__name__
        return self.entities_by_component_name.get(component_name, set())

    def spawn(self, components=[], **kwargs):
        entity = Entity(**kwargs)
        self.add(entity)
        if components:
            entity.components.add(components)
        return entity

    def clone(self, entity):
        # TODO FIXME: This is fairly awful
        return Entity.from_dict(entity.to_dict(), self)

    def destroy(self, entity):
        entity.components.purge()
        self.remove(entity)

    def add(self, entity):
        entity.id = self.next_id()
        entity.zone = self
        self.entities[entity.id] = entity

    def remove(self, entity):
        self.entities.pop(entity.id)
        entity.zone = None
Beispiel #37
0
class HotQueue(object):
    
    """Simple FIFO message queue stored in a Redis list. Example:

    >>> from hotqueue import HotQueue
    >>> queue = HotQueue('myqueue', host='localhost', port=6379, db=0)
    
    :param name: name of the queue
    :param kwargs: additional kwargs to pass to :class:`Redis`, most commonly
        :attr:`host`, :attr:`port`, :attr:`db`
    """
    
    def __init__(self, name, **kwargs):
        self.name = name
        self.__redis = Redis(**kwargs)
    
    def __len__(self):
        return self.__redis.llen(self.key)
    
    def __repr__(self):
        return ('<HotQueue: \'%s\', host=\'%s\', port=%d, db=%d>' %
            (self.name, self.__redis.host, self.__redis.port, self.__redis.db))
    
    @property
    def key(self):
        """Return the key name used to store this queue in Redis, which is
        a concatenation of "hotqueue:" and :attr:`name`.
        """
        return 'hotqueue:%s' % self.name
    
    def clear(self):
        """Clear the queue of all messages, deleting the Redis key."""
        self.__redis.delete(self.key)
    
    def consume(self, **kwargs):
        """Return a generator that yields whenever a message is waiting in the
        queue. Will block otherwise. Example:

        >>> for msg in queue.consume(timeout=1):
        ...     print msg
        my message
        another message
        
        :param kwargs: any arguments that :meth:`~hotqueue.HotQueue.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        kwargs.setdefault('block', True)
        try:
            while True:
                msg = self.get(**kwargs)
                if msg is None:
                    break
                yield msg
        except KeyboardInterrupt:
            print; return
    
    def get(self, block=False, timeout=None):
        """Return a message from the queue. Example:
    
        >>> queue.get()
        'my message'
        >>> queue.get()
        'another message'
        
        :param block: whether or not to wait until a msg is available in
            the queue before returning; ``False`` by default
        :param timeout: when using :attr:`block`, if no msg is available
            for :attr:`timeout` in seconds, give up and return ``None``
        """
        if block:
            if timeout is None:
                timeout = 0
            msg = self.__redis.blpop(self.key, timeout=timeout)
            if msg is not None:
                msg = msg[1]
        else:
            msg = self.__redis.lpop(self.key)
        if msg is not None:
            msg = cPickle.loads(msg)
        return msg
    
    def put(self, *msgs):
        """Put one or more messages onto the queue. Example:
    
        >>> queue.put('my message')
        >>> queue.put('another message')
        """
        for msg in msgs:
            msg = cPickle.dumps(msg)
            self.__redis.rpush(self.key, msg)
    
    def worker(self, *args, **kwargs):
        """Decorator for using a function as a queue worker. Example:
    
        >>> @queue.worker(timeout=1)
        ... def printer(msg):
        ...     print msg
        >>> printer()
        my message
        another message
        
        You can also use it without passing any keyword arguments:
        
        >>> @queue.worker
        ... def printer(msg):
        ...     print msg
        >>> printer()
        my message
        another message
        
        :param kwargs: any arguments that :meth:`~hotqueue.HotQueue.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        def decorator(worker):
            @wraps(worker)
            def wrapper():
                for msg in self.consume(**kwargs):
                    worker(msg)
            return wrapper
        if args:
            return decorator(*args)
        return decorator
Beispiel #38
0
        game_id_of_name[r['business_id']] = r['name']
    return game_id_of_name


t = time.time()
game_id_of_name = get_game_id_of_name()
while True:
    try:
        if time.time() - t > 86400:
            try:
                game_id_of_name = get_game_id_of_name()
                t = time.time()
            except:
                t = time.time()
                continue
        message = json.loads(redis_handle.blpop(message_queue)[1])
        if message['game_id'] in game_id_of_name.keys():
            message['game_name'] = game_id_of_name[message['game_id']]
        else:
            message['game_name'] = 'unknow'
        if int(message['rsync_time']) == 0:
            message['rsync_time'] = int(time.time())
        r = requests.post("%s/uuzubackup/table/" % es_url,
                          data=json.dumps(message))
        key = ':'.join([
            str(message['server_id']),
            str(message['type']),
            str(message['instance'])
        ])
        if 'interval' in message.keys():
            interval = int(message['interval'] + 3000)
Beispiel #39
0
def _record_on_demand_loop(
    bus: redis.Redis,
    camera_name: str,
    # min_interval: datetime.timedelta,
    detections: bool = False,
):

    recordings_stream_key = f"/pintu/camera/{camera_name}/recording"
    stream_key = (f"/pintu/camera/{camera_name}/detection"
                  if detections else f"/pintu/camera/{camera_name}/capture")
    start_time_key = f"/pintu/camera/{camera_name}/record_queue"
    end_time_key = f"/pintu/camera/{camera_name}/record_until"
    start_timestamp = pintu.util.UTC_DATETIME_MIN
    end_timestamp = pintu.util.UTC_DATETIME_MIN
    last_recorded_frame_timestamp = pintu.util.UTC_DATETIME_MIN
    while True:
        # Block until a start time is (r)pushed to the `start_time_key` (list)
        # Then read the latest end time from `end_time_key` (string)

        _, start_binstr = bus.blpop(start_time_key)
        start_timestamp_str = pintu.util.safe_str(start_binstr)
        end_timestamp_str = pintu.util.safe_str(
            bus.getset(end_time_key, pintu.util.UTC_DATETIME_MIN.isoformat()))

        try:
            start_timestamp = max(
                start_timestamp,
                datetime.datetime.fromisoformat(start_timestamp_str),
            )
            end_timestamp = max(
                end_timestamp,
                datetime.datetime.fromisoformat(end_timestamp_str),
            )
        except ValueError as ex:
            log.error("Invalid start and/or stop date/times "
                      f"({start_timestamp_str}-{end_timestamp_str}): "
                      f"{ex}")
            continue

        recording_file = (
            pintu.config.recordings_dir /
            start_timestamp.strftime(pintu.default.RECORDINGS_FILENAME_FORMAT))
        recording_file.parent.mkdir(parents=True, exist_ok=True)
        video_writer = None

        if start_timestamp > last_recorded_frame_timestamp:
            log.info(
                f"Recording camera '{camera_name}' from {start_timestamp.isoformat()} "
                f"to file '{recording_file}'")
            bus.xadd(
                recordings_stream_key,
                fields={
                    "camera": camera_name,
                    "finished": 0,
                    "start": start_timestamp.isoformat(),
                    "end": end_timestamp.isoformat(),
                    "file": str(recording_file),
                },
                id=pintu.util.stream_id(start_timestamp),
            )

        rec_verb = "Scheduling"
        while last_recorded_frame_timestamp < end_timestamp:
            log.info(f"{rec_verb} recording of '{recording_file}' "
                     f"until {end_timestamp.isoformat()}")
            log.debug(f"Current start: {start_timestamp.isoformat()}")
            log.debug(f"Current end:   {end_timestamp.isoformat()}")

            for record in pintu.util.slice_stream(
                    bus,
                    stream_key=stream_key,
                    start_time=max(start_timestamp,
                                   last_recorded_frame_timestamp),
                    end_time=end_timestamp,
            ):
                image_key = record["input"]

                image_bytes = bus.get(image_key)
                if not image_bytes:
                    log.error(f"No data at image key: {image_key}")
                    continue

                frame = pintu.imaging.Frame.decode(image_bytes)

                if not video_writer:
                    video_writer = cv2.VideoWriter(
                        str(recording_file),
                        cv2.VideoWriter_fourcc(*"VP80"),
                        pintu.config.sample_rate,
                        (frame.width, frame.height),
                    )
                    log.debug("Created new recorder")

                video_writer.write(frame.data)
                log.debug(f"Wrote frame {frame} to '{recording_file}'")
                last_recorded_frame_timestamp = frame.timestamp

            rec_verb = "Extending scheduled"
            log.info(f"Finished recording camera '{camera_name}' "
                     f"from {start_timestamp_str} until {end_timestamp_str} "
                     f"to file '{recording_file}'")
            end_timestamp_str = pintu.util.safe_str(
                bus.getset(end_time_key,
                           pintu.util.UTC_DATETIME_MIN.isoformat()))
            try:
                end_timestamp = datetime.datetime.fromisoformat(
                    end_timestamp_str)
            except ValueError as ex:
                log.error("Invalid start and/or stop date/times "
                          f"({start_timestamp_str}-{end_timestamp_str}): "
                          f"{ex}")

        if video_writer:
            video_writer.release()
            bus.xadd(
                recordings_stream_key,
                fields={
                    "camera": camera_name,
                    "finished": 1,
                    "start": start_timestamp.isoformat(),
                    "end": last_recorded_frame_timestamp.isoformat(),
                    "file": str(recording_file),
                },
                id=pintu.util.stream_id(last_recorded_frame_timestamp),
            )
Beispiel #40
0
class HotQueue(object):
    
    """Simple FIFO message queue stored in a Redis list. Example:

    >>> from hotqueue import HotQueue
    >>> queue = HotQueue("myqueue", host="localhost", port=6379, db=0)
    
    :param name: name of the queue
    :param serializer: the class or module to serialize msgs with, must have
        methods or functions named ``dumps`` and ``loads``,
        `pickle <http://docs.python.org/library/pickle.html>`_ is the default,
        use ``None`` to store messages in plain text (suitable for strings,
        integers, etc)
    :param kwargs: additional kwargs to pass to :class:`Redis`, most commonly
        :attr:`host`, :attr:`port`, :attr:`db`
    """
    
    def __init__(self, name, serializer=pickle, **kwargs):
        self.name = name
        self.serializer = serializer
        self.__redis = Redis(**kwargs)
    
    def __len__(self):
        return self.__redis.llen(self.key)
    
    @property
    def key(self):
        """Return the key name used to store this queue in Redis."""
        return key_for_name(self.name)
    
    def clear(self):
        """Clear the queue of all messages, deleting the Redis key."""
        self.__redis.delete(self.key)
    
    def consume(self, **kwargs):
        """Return a generator that yields whenever a message is waiting in the
        queue. Will block otherwise. Example:
        
        >>> for msg in queue.consume(timeout=1):
        ...     print msg
        my message
        another message
        
        :param kwargs: any arguments that :meth:`~hotqueue.HotQueue.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        kwargs.setdefault('block', True)
        try:
            while True:
                msg = self.get(**kwargs)
                if msg is None:
                    break
                yield msg
        except KeyboardInterrupt:
            print; return
    
    def get(self, block=False, timeout=None):
        """Return a message from the queue. Example:
    
        >>> queue.get()
        'my message'
        >>> queue.get()
        'another message'
        
        :param block: whether or not to wait until a msg is available in
            the queue before returning; ``False`` by default
        :param timeout: when using :attr:`block`, if no msg is available
            for :attr:`timeout` in seconds, give up and return ``None``
        """
        if block:
            if timeout is None:
                timeout = 0
            msg = self.__redis.blpop(self.key, timeout=timeout)
            if msg is not None:
                msg = msg[1]
        else:
            msg = self.__redis.lpop(self.key)
        if msg is not None and self.serializer is not None:
            msg = self.serializer.loads(msg)
        return msg
    
    def put(self, *msgs):
        """Put one or more messages onto the queue. Example:
        
        >>> queue.put("my message")
        >>> queue.put("another message")
        
        To put messages onto the queue in bulk, which can be significantly
        faster if you have a large number of messages:
        
        >>> queue.put("my message", "another message", "third message")
        """
        if self.serializer is not None:
            msgs = map(self.serializer.dumps, msgs)
        for msg in msgs:
            self.__redis.rpush(self.key, msg)
    
    def worker(self, *args, **kwargs):
        """Decorator for using a function as a queue worker. Example:
        
        >>> @queue.worker(timeout=1)
        ... def printer(msg):
        ...     print msg
        >>> printer()
        my message
        another message
        
        You can also use it without passing any keyword arguments:
        
        >>> @queue.worker
        ... def printer(msg):
        ...     print msg
        >>> printer()
        my message
        another message
        
        :param kwargs: any arguments that :meth:`~hotqueue.HotQueue.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        def decorator(worker):
            @wraps(worker)
            def wrapper(*args):
                for msg in self.consume(**kwargs):
                    worker(*args + (msg,))
            return wrapper
        if args:
            return decorator(*args)
        return decorator
Beispiel #41
0
def get_keywords(text):
    cleaned_text = remove_punctuations(text)
    r = Rake(min_length=1, max_length=1)
    r.extract_keywords_from_text(cleaned_text)
    keywords = ','.join(r.get_ranked_phrases())
    return keywords


def remove_punctuations(text):
    tokenizer = nltk.RegexpTokenizer(r"\w+")
    cleaned_text = tokenizer.tokenize(text)
    return " ".join(cleaned_text)


while True:
    data = json.loads(redis.blpop("uploads")[1].decode("utf-8"))
    result = []
    upload_path = "../data/uploads/"
    file_name = data['file_name']
    file_path = upload_path + file_name
    dataframe = pd.read_csv(file_path, header=None)
    data_vector = cv.transform(dataframe[0]).toarray()
    data_vector_reduced = fs.transform(data_vector)
    predictions = clf.predict(data_vector_reduced)

    for i in range(len(predictions)):
        if predictions[i] == 1:
            predicted_class = "Positive"
        else:
            predicted_class = "Negative"
Beispiel #42
0
from redis import Redis
from btmoves_redis.btmoves_redis import settings
import json

# 从redis中取数据
rds = Redis(settings.REDIS_HOST, settings.REDIS_PORT)
# client = pymongo.MongoClient(host='47.104.128.150', port=27017)
# db = client['btdy']
# collection = db['movies']
# collection.insert(data)
# break
#     mysql数据库
conn = pymysql.connect(host='127.0.0.1',
                       user='******',
                       password='******',
                       db='movies',
                       charset='utf8')
cursor = conn.cursor()

# 取出数据
while True:
    _, item = rds.blpop(settings.REDIS_ITEMS_KEY)
    # 2. 把取出的数据存储到数据库中
    data = json.loads(item.decode())
    print(data)
    sql = 'insert into moves1(`name`, score, category ) VALUES (%s, %s, %s)'
    cursor.execute(sql, (data['name'], data['score'], data['category']))
    conn.commit()
    # cursor.close()
    # conn.close()
Beispiel #43
0
class KVRedis(KVCache):
    def __init__(self, connection_string: str, identifier: str = None):
        '''
        Constructor

        :param connection_string: format 'host:port:password'
        :param identifier: string to include when formatting the key,
        typically this would be the service_id
        '''

        params = connection_string.split(':', 2)
        self.host = None
        self.port = None
        self.password = None
        if len(params) >= 1:
            self.host = params[0]
        if len(params) >= 2:
            self.port = params[1]
            if not self.port:
                self.port = None
        if len(params) == 3:
            self.password = params[2]
            if not self.password:
                self.password = None

        if not self.host:
            raise ValueError(
                'A Redis host must be specified in the connection_string'
            )

        super().__init__(identifier=identifier)

        self.driver = Redis(
            host=self.host, port=self.port, password=self.password
        )

    def exists(self, key: str) -> bool:
        '''
        Checks if the key exists in the cache
        '''

        key = self.get_annotated_key(key)

        ret = self.driver.exists(key)

        exists = ret != 0

        _LOGGER.debug(f'Does key {key} exist: {exists}')

        return exists

    def get(self, key: str) -> object:
        '''
        Gets the value for the specified key from the cache. If the value
        retrieved on the cache is a string that starts with '{' and ends with
        '}' then an attempt is made to parse the string as JSON. If the parsing
        succeeds, the resulting object is returned.
        '''

        key = self.get_annotated_key(key)

        value = self.driver.get(key)

        _LOGGER.debug(f'Got value {value} for key {key}')
        if isinstance(value, bytes):
            data = value.decode('utf-8')
            _LOGGER.debug(f'Converted data to string: {data}')
            if len(data) > 1 and data[0] == '{' and data[-1] == '}':
                try:
                    _LOGGER.debug('Attempting to deserialize JSON data')
                    data = orjson.loads(value)
                    value = data
                except JSONDecodeError:
                    pass

        return value

    def pos(self, key: str, value: str) -> int:
        '''
        Finds the first occurrence of value in the list for the key
        '''

        key = self.get_annotated_key(key)

        pos = self.driver.lpos(key, value)

        if pos is not None:
            _LOGGER.debug(
                f'Found {value} in position {pos} of list for key {key}'
            )
        else:
            _LOGGER.debug(
                f'Did not find value {value} in the list for key {key}'
            )

        return pos

    def get_next(self, key, timeout: int = 0) -> object:
        '''
        Gets the first item of a list value for the key
        '''

        key = self.get_annotated_key(key)

        value = self.driver.blpop(key, timeout=timeout)

        if isinstance(value, tuple):
            value = value[-1]

        _LOGGER.debug(f'Popped {value} from start of list for key {key}')

        return value

    def set(self, key: str, value: object,
            expiration=DEFAULT_CACHE_EXPIRATION) -> bool:
        '''
        Sets a key to the specified value. If the value is a dict
        then it gets converted to a JSON string
        '''

        key = self.get_annotated_key(key)

        if isinstance(value, dict):
            value = orjson.dumps(value)

        ret = self.driver.set(key, value, ex=expiration)

        _LOGGER.debug(f'Set key {key} to value {value}')

        return ret

    def delete(self, key: str) -> bool:
        '''
        Deletes the key
        '''

        key = self.get_annotated_key(key)

        ret = self.driver.delete(key)

        _LOGGER.debug(f'Deleted key {key}')

        return ret

    def shift_push_list(self, key: str, wait: bool = True, timeout: int = 0):
        '''
        atomically shifts a value from the start of a list 'key' and appends
        it to the end of the list.
        '''

        key = self.get_annotated_key(key)

        if not wait:
            value = self.driver.lmove(key, key, src='LEFT', dest='RIGHT')
        else:
            value = self.driver.blmove(
                key, key, src='LEFT', dest='RIGHT', timeout=timeout
            )

        _LOGGER.debug(
            f'Got moved value {value} from begin to end of key {key}'
        )

        return value

    def get_list(self, key):
        '''
        Gets the list value of a key
        '''

        key = self.get_annotated_key(key)

        ret = self.driver.lrange(key, 0, -1)

        _LOGGER.debug(f'Got list for key {key} with length {len(ret)}')

        return ret

    def shift(self, key: str) -> object:
        '''
        Removes the first item from the list and
        returns it
        '''

        key = self.get_annotated_key(key)

        val = self.driver.blpop(key, timeout=0)

        _LOGGER.debug(f'Shifted value {val} from key {key}')

        return val

    def push(self, key: str, value: object) -> int:
        '''
        Pushes a value to the list specified by 'key'
        '''

        key = self.get_annotated_key(key)

        ret = self.driver.rpush(key, value)

        _LOGGER.debug(f'Pushed value {value} to end of list for key {key}')

        return ret

    def pop(self, key: str) -> object:
        '''
        Pops a value from the list specified by 'key'
        '''

        key = self.get_annotated_key(key)

        val = self.driver.rpop(key)

        _LOGGER.debug(f'Popped value {val} from end of list for key {key}')

        return val
Beispiel #44
0
conn.hexists('hkey', 'key')
conn.hkeys('hkey')
conn.hvals('hkey')
conn.hgetall('hkey')
conn.hincrby('hkey', 'key', 1)
conn.hincrbyfloat('hkey', 'key', 2.3)

# list
conn.rpush('lkey', 1, 2, 3)
conn.lpush('lkey', 1, 2, 3)
conn.lpop('lkey')
conn.rpop('lkey')
conn.lrange('lkey', 0, -1) # return a list
conn.lindex('lkey', 2)
conn.ltrim('lkey', 1, -1)
conn.blpop(['list1', 'list2'], 1)
conn.brpop(['list1', 'list2'], 2)
conn.rpoplpush('list1', 'list2')
conn.brpoplpush('list1', 'list2', 3)

# set
conn.sadd('key', 'item1', 'item2')
conn.srem('key', 'item2')
conn.ismember('key', 'item') # not sure
conn.scard('key')
conn.smembers('key')
conn.smove('key1', 'key2', 'item')
conn.sdiff('key1', 'key2', 'key3') # 返回存在第一个集合,不在其他集合的元素
conn.sinter('key1', 'key2')
conn.sunion('key1', 'key2',)
Beispiel #45
0
        game_id_of_name[r["business_id"]] = r["name"]
    return game_id_of_name


t = time.time()
game_id_of_name = get_game_id_of_name()
while True:
    try:
        if time.time() - t > 86400:
            try:
                game_id_of_name = get_game_id_of_name()
                t = time.time()
            except:
                t = time.time()
                continue
        message = json.loads(redis_handle.blpop(message_queue)[1])
        if message["game_id"] in game_id_of_name.keys():
            message["game_name"] = game_id_of_name[message["game_id"]]
        else:
            message["game_name"] = "unknow"
        if int(message["rsync_time"]) == 0:
            message["rsync_time"] = int(time.time())
        r = requests.post("/uuzubackup/table/" % es_url, data=json.dumps(message))
        key = ":".join([str(message["server_id"]), str(message["type"]), str(message["instance"])])
        if "interval" in message.keys():
            interval = int(message["interval"] + 3000)
            del message["interval"]
        else:
            interval = 6600
        redis_handle_ex.set(key, 0)
        redis_handle_ex.expire(key, interval)
Beispiel #46
0
class HotResque(object):
    
    """Simple FIFO message queue stored in a Redis list. Example:

    >>> from HotResque import HotResque
    >>> queue = HotResque("myqueue", host="localhost", port=6379, db=0)
    
    :param name: name of the queue
    :param serializer: the class or module to serialize msgs with, must have
        methods or functions named ``dumps`` and ``loads``,
        `pickle <http://docs.python.org/library/pickle.html>`_ is the default,
        use ``None`` to store messages in plain text (suitable for strings,
        integers, etc)
    :param kwargs: additional kwargs to pass to :class:`Redis`, most commonly
        :attr:`host`, :attr:`port`, :attr:`db`

    ==============================================
    
    Para pegar uma migracao que está na fila

    >>> from hotresque import HotResque
    >>> a = HotResque("queue:migrations")
    >>> a.name_queue = "resque"
    >>> c = a.get() 

    o GET do hotresque retorna um dicionario contendo todos os dados.

    conteudo de "c"
    {
        u'port_dest': 443, 
        u'host_dest': u'destino.teste.com', 
        u'username_dest': None,
        u'password_dest': None, 
        u'migration_id': 9, 
        u'port_orig': 443, 
        u'password_orig': u'teste123', 
        u'host_orig': u'origem.teste.com', 
        u'username_orig': u'*****@*****.**'
     }

    >>> c['port_dest']
    443
    >>> c['username_dest']

    Para setar o status da migracao:

    >>> import json
    >>> from hotresque import HotResque
    >>> a = HotResque("queue:migrations_report")
    >>> a.name_queue = "resque"
    >>> resp = {"class":"MigrationReport", "args" : [json.dumps({"migration_id" : 5, "state":"ok|error" , "message":"mensagem..."}) ]}
    >>> a.put(resp)

    """
    
    def __init__(self, name, serializer=json, **kwargs):
        self.name = name
        self.serializer = serializer
        self.__redis = Redis(**kwargs)
        self.name_queue = "hotresque"
    
    def __len__(self):
        return self.__redis.llen(self.key)

    def name_queue():
        doc = "The name_queue property."
        def fget(self):
            return self._name_queue
        def fset(self, value):
            self._name_queue = value
        def fdel(self):
            del self._name_queue
        return locals()
    name_queue = property(**name_queue())
    
    @property
    def key(self):
        """Return the key name used to store this queue in Redis."""
        return '%s:%s' % (self.name_queue, self.name)
    
    def clear(self):
        """Clear the queue of all messages, deleting the Redis key."""
        self.__redis.delete(self.key)
    
    def consume(self, **kwargs):
        """Return a generator that yields whenever a message is waiting in the
        queue. Will block otherwise. Example:
        
        >>> for msg in queue.consume(timeout=1):
        ...     print msg
        my message
        another message
        
        :param kwargs: any arguments that :meth:`~HotResque.HotResque.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        kwargs.setdefault('block', True)
        try:
            while True:
                msg = self.get(**kwargs)
                if msg is None:
                    break
                yield msg
        except KeyboardInterrupt:
            print; return
    
    def get(self, block=False, timeout=None):
        """Return a message from the queue. Example:
    
        >>> queue.get()
        'my message'
        >>> queue.get()
        'another message'
        
        :param block: whether or not to wait until a msg is available in
            the queue before returning; ``False`` by default
        :param timeout: when using :attr:`block`, if no msg is available
            for :attr:`timeout` in seconds, give up and return ``None``
        """
        if block:
            if timeout is None:
                timeout = 0
            msg = self.__redis.blpop(self.key, timeout=timeout)
            if msg is not None:
                msg = msg[1]
        else:
            msg = self.__redis.lpop(self.key)
        if msg is not None and self.serializer is not None:
            msg = self.serializer.loads(msg)
            msg = json.loads(msg['args'][0])
        return msg
    
    def put(self, *msgs):
        """Put one or more messages onto the queue. Example:
        
        >>> queue.put("my message")
        >>> queue.put("another message")
        
        To put messages onto the queue in bulk, which can be significantly
        faster if you have a large number of messages:
        
        >>> queue.put("my message", "another message", "third message")
        """
        if self.serializer is not None:
            msgs = map(self.serializer.dumps, msgs)
        self.__redis.rpush(self.key, *msgs)
    
    def worker(self, *args, **kwargs):
        """Decorator for using a function as a queue worker. Example:
        
        >>> @queue.worker(timeout=1)
        ... def printer(msg):
        ...     print msg
        >>> printer()
        my message
        another message
        
        You can also use it without passing any keyword arguments:
        
        >>> @queue.worker
        ... def printer(msg):
        ...     print msg
        >>> printer()
        my message
        another message
        
        :param kwargs: any arguments that :meth:`~HotResque.HotResque.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        def decorator(worker):
            @wraps(worker)
            def wrapper(*args):
                for msg in self.consume(**kwargs):
                    worker(*args + (msg,))
            return wrapper
        if args:
            return decorator(*args)
        return decorator
Beispiel #47
0
class HotQueue(object):
    """Simple FIFO message queue stored in a Redis list. Example:

    >>> from hotqueue import HotQueue
    >>> queue = HotQueue("myqueue", host="localhost", port=6379, db=0)
    
    :param name: name of the queue
    :param serializer: the class or module to serialize msgs with, must have
        methods or functions named ``dumps`` and ``loads``,
        `pickle <http://docs.python.org/library/pickle.html>`_ is the default,
        use ``None`` to store messages in plain text (suitable for strings,
        integers, etc)
    :param kwargs: additional kwargs to pass to :class:`Redis`, most commonly
        :attr:`host`, :attr:`port`, :attr:`db`
    """
    def __init__(self,
                 name,
                 serializer=pickle,
                 max_queue_length=None,
                 **kwargs):
        self.name = name
        self.serializer = serializer
        self.max_queue_length = max_queue_length
        self.__redis = Redis(**kwargs)

    def __len__(self):
        return self.__redis.llen(self.key)

    @property
    def key(self):
        """Return the key name used to store this queue in Redis."""
        return key_for_name(self.name)

    def clear(self):
        """Clear the queue of all messages, deleting the Redis key."""
        self.__redis.delete(self.key)

    def consume(self, **kwargs):
        """Return a generator that yields whenever a message is waiting in the
        queue. Will block otherwise. Example:
        
        >>> for msg in queue.consume(timeout=1):
        ...     print msg
        my message
        another message
        
        :param kwargs: any arguments that :meth:`~hotqueue.HotQueue.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        kwargs.setdefault('block', True)
        try:
            while True:
                msg = self.get(**kwargs)
                if msg is None:
                    break
                yield msg
        except KeyboardInterrupt:
            print
            return

    def get(self, block=False, timeout=None):
        """Return a message from the queue. Example:
    
        >>> queue.get()
        'my message'
        >>> queue.get()
        'another message'
        
        :param block: whether or not to wait until a msg is available in
            the queue before returning; ``False`` by default
        :param timeout: when using :attr:`block`, if no msg is available
            for :attr:`timeout` in seconds, give up and return ``None``
        """
        if block:
            if timeout is None:
                timeout = 0
            msg = self.__redis.blpop(self.key, timeout=timeout)
            if msg is not None:
                msg = msg[1]
        else:
            msg = self.__redis.lpop(self.key)
        if msg is not None and self.serializer is not None:
            msg = self.serializer.loads(msg)
        return msg

    def put(self, *msgs):
        """Put one or more messages onto the queue. Example:
        
        >>> queue.put("my message")
        >>> queue.put("another message")
        
        To put messages onto the queue in bulk, which can be significantly
        faster if you have a large number of messages:
        
        >>> queue.put("my message", "another message", "third message")
        """
        if self.serializer is not None:
            msgs = map(self.serializer.dumps, msgs)
        self.__redis.rpush(self.key, *msgs)

        # Enforce a maximum queue size
        if self.max_queue_length is not None and int(
                self.max_queue_length) > 0:
            self.__redis.ltrim(self.key, 0, int(self.max_queue_length) - 1)

    def worker(self, *args, **kwargs):
        """Decorator for using a function as a queue worker. Example:
        
        >>> @queue.worker(timeout=1)
        ... def printer(msg):
        ...     print msg
        >>> printer()
        my message
        another message
        
        You can also use it without passing any keyword arguments:
        
        >>> @queue.worker
        ... def printer(msg):
        ...     print msg
        >>> printer()
        my message
        another message
        
        :param kwargs: any arguments that :meth:`~hotqueue.HotQueue.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        def decorator(worker):
            @wraps(worker)
            def wrapper(*args):
                for msg in self.consume(**kwargs):
                    worker(*args + (msg, ))

            return wrapper

        if args:
            return decorator(*args)
        return decorator
    def gen_data(self, temp_uuid):
        """Generates p-values for each marker"""

        pheno_vector = np.array([val == "x" and np.nan or float(val) for val in self.vals])

        #lmm_uuid = str(uuid.uuid4())

        key = "pylmm:input:" + temp_uuid
        print("key is:", pf(key))
        #with Bench("Loading cache"):
        #    result = Redis.get(key)

        if self.dataset.group.species == "human":
            p_values, t_stats = self.gen_human_results(pheno_vector, key, temp_uuid)
            #p_values = self.trim_results(p_values)
            
        else:
            print("NOW CWD IS:", os.getcwd())
            genotype_data = [marker['genotypes'] for marker in self.dataset.group.markers.markers]
            
            no_val_samples = self.identify_empty_samples()
            trimmed_genotype_data = self.trim_genotypes(genotype_data, no_val_samples)
            
            genotype_matrix = np.array(trimmed_genotype_data).T

            #print("pheno_vector: ", pf(pheno_vector))
            #print("genotype_matrix: ", pf(genotype_matrix))
            #print("genotype_matrix.shape: ", pf(genotype_matrix.shape))

            #params = {"pheno_vector": pheno_vector,
            #            "genotype_matrix": genotype_matrix,
            #            "restricted_max_likelihood": True,
            #            "refit": False,
            #            "temp_data": tempdata}
            
            params = dict(pheno_vector = pheno_vector.tolist(),
                        genotype_matrix = genotype_matrix.tolist(),
                        restricted_max_likelihood = True,
                        refit = False,
                        temp_uuid = temp_uuid,
                        
                        # meta data
                        timestamp = datetime.datetime.now().isoformat(),
                        )
            
            json_params = json.dumps(params)
            #print("json_params:", json_params)
            Redis.set(key, json_params)
            Redis.expire(key, 60*60)
            print("before printing command")

            command = 'python /home/zas1024/gene/wqflask/wqflask/my_pylmm/pyLMM/lmm.py --key {} --species {}'.format(key,
                                                                                                                    "other")
            print("command is:", command)
            print("after printing command")

            os.system(command)

            #t_stats, p_values = lmm.run(key)
            #lmm.run(key)
            
            json_results = Redis.blpop("pylmm:results:" + temp_uuid, 45*60)
            results = json.loads(json_results[1])
            p_values = [float(result) for result in results['p_values']]
            print("p_values:", p_values)
            #p_values = self.trim_results(p_values)
            t_stats = results['t_stats']
            
            #t_stats, p_values = lmm.run(
            #    pheno_vector,
            #    genotype_matrix,
            #    restricted_max_likelihood=True,
            #    refit=False,
            #    temp_data=tempdata
            #)
            #print("p_values:", p_values)

        self.dataset.group.markers.add_pvalues(p_values)
        
        #self.get_lod_score_cutoff()
        
        return self.dataset.group.markers.markers
Beispiel #49
0
class HotQueue(object):
    
    """Simple FIFO message queue stored in a Redis list. Example:

    >>> from hotqueue import HotQueue
    >>> queue = HotQueue("myqueue", host="localhost", port=6379, db=0)
    
    :param name: name of the queue
    :param serializer: the class or module to serialize msgs with, must have
        methods or functions named ``dumps`` and ``loads``,
        `pickle <http://docs.python.org/library/pickle.html>`_ will be used
        if ``None`` is given
    :param kwargs: additional kwargs to pass to :class:`Redis`, most commonly
        :attr:`host`, :attr:`port`, :attr:`db`
    """
    
    def __init__(self, name, serializer=None, **kwargs):
        self.name = name
        if serializer is not None:
            self.serializer = serializer
        else:
            self.serializer = pickle
        self.__redis = Redis(**kwargs)
    
    def __len__(self):
        return self.__redis.llen(self.key)
    
    def __repr__(self):
        return ('<HotQueue: \'%s\', host=\'%s\', port=%d, db=%d>' %
            (self.name, self.__redis.host, self.__redis.port, self.__redis.db))
    
    @property
    def key(self):
        """Return the key name used to store this queue in Redis."""
        return key_for_name(self.name)
    
    def clear(self):
        """Clear the queue of all messages, deleting the Redis key."""
        self.__redis.delete(self.key)
    
    def consume(self, **kwargs):
        """Return a generator that yields whenever a message is waiting in the
        queue. Will block otherwise. Example:

        >>> for msg in queue.consume(timeout=1):
        ...     print msg
        my message
        another message
        
        :param kwargs: any arguments that :meth:`~hotqueue.HotQueue.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        kwargs.setdefault('block', True)
        try:
            while True:
                msg = self.get(**kwargs)
                if msg is None:
                    break
                yield msg
        except KeyboardInterrupt:
            print; return
    
    def get(self, block=False, timeout=None):
        """Return a message from the queue. Example:
    
        >>> queue.get()
        'my message'
        >>> queue.get()
        'another message'
        
        :param block: whether or not to wait until a msg is available in
            the queue before returning; ``False`` by default
        :param timeout: when using :attr:`block`, if no msg is available
            for :attr:`timeout` in seconds, give up and return ``None``
        """
        if block:
            if timeout is None:
                timeout = 0
            msg = self.__redis.blpop(self.key, timeout=timeout)
            if msg is not None:
                msg = msg[1]
        else:
            msg = self.__redis.lpop(self.key)
        if msg is not None:
            msg = self.serializer.loads(msg)
        return msg
    
    def put(self, *msgs):
        """Put one or more messages onto the queue. Example:
    
        >>> queue.put("my message")
        >>> queue.put("another message")
        """
        for msg in msgs:
            msg = self.serializer.dumps(msg)
            self.__redis.rpush(self.key, msg)
    
    def worker(self, *args, **kwargs):
        """Decorator for using a function as a queue worker. Example:
    
        >>> @queue.worker(timeout=1)
        ... def printer(msg):
        ...     print msg
        >>> printer()
        my message
        another message
        
        You can also use it without passing any keyword arguments:
        
        >>> @queue.worker
        ... def printer(msg):
        ...     print msg
        >>> printer()
        my message
        another message
        
        :param kwargs: any arguments that :meth:`~hotqueue.HotQueue.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        def decorator(worker):
            @wraps(worker)
            def wrapper():
                for msg in self.consume(**kwargs):
                    worker(msg)
            return wrapper
        if args:
            return decorator(*args)
        return decorator
    
    @staticmethod
    def from_key(key, serializer=None, **kwargs):
        """
        Convenience method for getting a queue instance from a Redis key. Example
        
        >>> redis = Redis()
        >>> keys = redis.keys('hotqueue:*')
        >>> for key in keys:
        ...     q = HotQueue.from_key(key)
        ...     # do something with your new queue
        """
        name = key.replace('hotqueue:', '', 1)
        return HotQueue(name, serializer, **kwargs)
Beispiel #50
0
class RedisBackend(Backend):
    """
    Backend that reads messages from a configured Redis queue (default:
    ``platypush_bus_mq``) and posts them to the application bus.  Very
    useful when you have plugin whose code is executed in another process
    and can't post events or requests to the application bus.

    Requires:

        * **redis** (``pip install redis``)
    """
    def __init__(self,
                 queue='platypush_bus_mq',
                 redis_args={},
                 *args,
                 **kwargs):
        """
        :param queue: Queue name to listen on (default: ``platypush_bus_mq``)
        :type queue: str

        :param redis_args: Arguments that will be passed to the redis-py constructor (e.g. host, port, password), see http://redis-py.readthedocs.io/en/latest/
        :type redis_args: dict
        """

        super().__init__(*args, **kwargs)

        self.queue = queue
        self.redis_args = redis_args

        if not redis_args:
            try:
                redis_plugin = get_plugin('redis')
                if redis_plugin and redis_plugin.kwargs:
                    self.redis_args = redis_plugin.kwargs
            except:
                pass

        self.redis = Redis(**self.redis_args)

    def send_message(self, msg, queue_name=None):
        msg = str(msg)
        if queue_name:
            self.redis.rpush(queue_name, msg)
        else:
            self.redis.rpush(self.queue, msg)

    def get_message(self, queue_name=None):
        queue = queue_name or self.queue
        msg = self.redis.blpop(queue)[1].decode('utf-8')

        try:
            msg = Message.build(json.loads(msg))
        except:
            try:
                import ast
                msg = Message.build(ast.literal_eval(msg))
            except:
                try:
                    msg = json.loads(msg)
                except Exception as e:
                    self.logger.exception(e)

        return msg

    def run(self):
        super().run()

        self.logger.info(
            'Initialized Redis backend on queue {} with arguments {}'.format(
                self.queue, self.redis_args))

        while not self.should_stop():
            try:
                msg = self.get_message()
                self.logger.info(
                    'Received message on the Redis backend: {}'.format(msg))
                self.on_message(msg)
            except Exception as e:
                self.logger.exception(e)
Beispiel #51
0
class Parser():
    # 即从配置文件中读取东西,存入control中
    def configure(self):
        file = open('./parse/spider.xml', 'r')
        content_list = file.readlines()
        content = ''.join(content_list)
        soup = BeautifulSoup(content)
        tags = soup.find_all('parser')
        for tag in tags:
            level = tag.find('level').text
            function = tag.find('function').text
            self.control[level] = function

    # 设置好redis,前缀,名字,以及得到配置文件
    def __init__(self, name='result', prefix='prefix', site=''):
        self.r_server = Redis()
        self.prefix = prefix
        self.name = name
        self.control = {}
        self.configure()
        print("初始化配置...")
        print(self.control)

    def pullCrawlRequest(self):
        return json.loads(self.r_server.blpop('crawl_list')[1].decode())

    def push(self, dic, name):
        self.r_server.rpush(name + '_list', json.dumps(dic))

    def parse(self):
        while True:
            try:
                dic = self.pullCrawlRequest()
                if str(dic['level']) in self.control:
                    func = eval("self." + self.control[str(dic['level'])])
                    func(dic)
                    pass
            except:
                # 打印错误信息
                #s = sys.exc_info()
                #print('Error %s happened in line %d in %s' % (s[1], s[2].tb_lineno, s[0]))
                # 出错要把该信息放到error_list,且要重新放回到crawl_list
                self.r_server.rpush('error_list', json.dumps(dic))
                #self.r_server.rpush('crawl_list', json.dumps(dic))
            finally:
                # 再从crawl_list中取字典
                encode_dic = self.r_server.lpop('crawl_list')

    def getFirst(self, dic):
        print('getFirst')
        pass

    def getSecond(self, dic):
        print('getSecond')
        pass

    def getThird(self, dic):
        print('getThird')
        pass

    def getThird2(self, dic):
        pass

    def getWord(self, dic):
        print('getWord')
        pass

    def isEmpty(self):
        if self.r_server.llen('crawl_list') == 0:
            return True
        else:
            return False