Example #1
0
    def get_menu(cls, parent):
        from uliweb import functions, settings
        from uliweb.utils.common import Serial

        def _f(_p):
            menus = []
            for row in cls.filter(cls.c.parent == _p, cls.c.type == 'M0000',
                                  cls.c.deploy_type == 'F',
                                  cls.c.status == '00001').order_by(
                                      cls.c.order):
                item = row.to_dict()
                item['link'] = row.value
                item['permissions'] = [x.name for x in row.permissions]
                menus.append(item)
                if row.has_children:
                    item['subs'] = _f(item.id)
                else:
                    item['subs'] = []
            return menus

        menus = []
        use_redis = settings.get_var('RESOURCE/USE_REDIS')
        key = 'MENU:{}'.format(parent)
        if use_redis:
            redis = functions.get_redis()
            v = redis.get(key)
            if v:
                menus = Serial.load(v)
        if not menus:
            p = cls.get(cls.c.name == parent)
            menus = _f(p.id)
            if menus and use_redis:
                redis.set(key, Serial.dump(menus))
        return menus
Example #2
0
def call(args, options, global_options):
    from uliweb import settings

    signal.signal(signal.SIGINT, handler)
    signal.signal(signal.SIGTERM, handler)

    check_point = settings.get_var('ASYNC_TASKS/check_point')

    log.info('=============================')
    log.info(' Async Task Daemon Starting')
    log.info('=============================')
    log.info('Daemon process %d' % os.getpid())
    log.info('Check point %ds' % check_point)

    Task = functions.get_model('async_tasks')
    query = Task.filter(~Task.c.status.in_([SUCCESS, CANCEL, FAILED])).\
        order_by(Task.c.created_time)

    redis = functions.get_redis()
    while not is_exit:
        Reset()
        for row in query:
            try:
                #log.debug('Ready to process async task {} - {}'.format(row.task_id, row.command_name))
                process_task(row)
            except Exception as e:
                log.exception(e)
        beat = redis.brpop('async_tasks_beat', check_point)
        #to prevent database submit slow than redis
        time.sleep(0.5)
Example #3
0
def get_redis():
    try:
        redis = functions.get_redis()
    except Exception as e:
        log.exception(e)
        redis = None
    return redis
Example #4
0
def run_command(row):
    import json
    from uliweb import settings

    keys_queue = settings.get_var('ASYNC_TASKS/tasks_keys_queue')

    kw = row.command_info.copy()
    kw['task_id'] = row.task_id
    queue = kw['queue']
    try:
        redis = functions.get_redis()
        if not isinstance(queue, (tuple, list)):
            queue = [queue]
        msg = json.dumps(kw)
        #在添加消息到公共队列的同时,将任务ID添加到keys_queue中,用来记录当前将要,或正在执行的任务
        for q in queue:
            pipe = redis.pipeline()
            name = settings.ASYNC_TASKS_QUEUES.get(q or 'default') or q
            pipe.lpush(name, msg).sadd(keys_queue, row.task_id)
            pipe.execute()
            log.info('Put task_id {} to queue=[{}] and keys_queue=[{}]'.format(
                row.task_id, name, keys_queue))
    except Exception as e:
        _type, value, tb = sys.exc_info()
        txt = ''.join(traceback.format_exception(_type, value, tb))
        handler = row.get_handler(log=log)
        handler.save(status=ERROR,
                     execution_info=txt,
                     finished_time=date.now())
        log.error('Run async task {} failed when push with redis'.format(
            row.task_id))
        log.exception(e)
Example #5
0
def run_command(row):
    import json
    from uliweb import settings

    keys_queue = settings.get_var('ASYNC_TASKS/tasks_keys_queue')

    kw = row.command_info.copy()
    kw['task_id'] = row.task_id
    queue =kw['queue']
    try:
        redis = functions.get_redis()
        if not isinstance(queue, (tuple, list)):
            queue = [queue]
        msg = json.dumps(kw)
        #在添加消息到公共队列的同时,将任务ID添加到keys_queue中,用来记录当前将要,或正在执行的任务
        for q in queue:
            pipe = redis.pipeline()
            name = settings.ASYNC_TASKS_QUEUES.get(q or 'default') or q
            pipe.lpush(name, msg).sadd(keys_queue, row.task_id)
            pipe.execute()
            log.info('Put task_id {} to queue=[{}] and keys_queue=[{}]'.format(row.task_id, name, keys_queue))
    except Exception as e:
        _type, value, tb = sys.exc_info()
        txt =  ''.join(traceback.format_exception(_type, value, tb))
        handler = row.get_handler(log=log)
        handler.save(status=ERROR, execution_info=txt, finished_time=date.now())
        log.error('Run async task {} failed when push with redis'.format(row.task_id))
        log.exception(e)
Example #6
0
 def post_save(obj, data):
     from uliweb import functions
     from uliweb.utils.common import Serial
     from uliweb.mail import Mail
     
     Topic.filter(Topic.c.id==int(topic_id)).update(num_replies=Topic.c.num_replies+1, last_post_user=request.user.id, last_reply_on=date.now())
     Forum.filter(Forum.c.id==int(forum_id)).update(num_posts=Forum.c.num_posts+1, last_post_user=request.user.id, last_reply_on=date.now())
     self._clear_files(obj.slug, data['content'])
     
     #増加发送邮件的处理
     emails = []
     for u_id in Post.filter((Post.c.topic==int(topic_id)) & (Post.c.reply_email==True) & (Post.c.floor<obj.floor)).values(Post.c.posted_by):
         user = User.get(u_id[0])
         if user and user.email and (user.email not in emails) and (user.email!=request.user.email):
             emails.append(user.email)
     
     if not emails:
         return
     
     _type = settings.get_var('PARA/FORUM_REPLY_PROCESS', 'print')
     url = '%s/forum/%s/%s' % (settings.get_var('PARA/DOMAIN'), forum_id, topic_id)
     d = {'url':str(url)}
     mail = {'from_':settings.get_var('PARA/EMAIL_SENDER'), 'to_':emails,
         'subject':settings.get_var('FORUM_EMAIL/FORUM_EMAIL_TITLE'),
         'message':settings.get_var('FORUM_EMAIL/FORUM_EMAIL_TEXT') % d,
         'html':True}
     
     if _type == 'mail':
         Mail().send_mail(**mail)
     elif _type == 'print':
         print mail
     elif _type == 'redis':
         redis = functions.get_redis()
         _t = Serial.dump(mail)
         redis.lpush('send_mails', _t)
Example #7
0
def call(args, options, global_options):
    from uliweb import settings

    signal.signal(signal.SIGINT, handler)
    signal.signal(signal.SIGTERM, handler)

    check_point = settings.get_var('ASYNC_TASKS/check_point')

    log.info('=============================')
    log.info(' Async Task Daemon Starting')
    log.info('=============================')
    log.info('Daemon process %d' % os.getpid())
    log.info('Check point %ds' % check_point)

    Task = functions.get_model('async_tasks')
    query = Task.filter(~Task.c.status.in_([SUCCESS, CANCEL, FAILED])).\
        order_by(Task.c.created_time)

    redis = functions.get_redis()
    while not is_exit:
        Reset()
        for row in query:
            try:
                #log.debug('Ready to process async task {} - {}'.format(row.task_id, row.command_name))
                process_task(row)
            except Exception as e:
                log.exception(e)
        beat = redis.brpop('async_tasks_beat', check_point)
        #to prevent database submit slow than redis
        time.sleep(0.5)
Example #8
0
        def post_save(obj, data):
            from uliweb import functions
            from uliweb.utils.common import Serial
            from uliweb.mail import Mail

            Post.filter(Post.c.id == int(parent_id)).update(
                num_replies=Post.c.num_replies + 1,
                last_post_user=request.user.id,
                last_reply_on=date.now())
            self._clear_files(obj.slug, data['content'])

            Topic.filter(Topic.c.id == int(topic_id)).update(
                num_replies=Topic.c.num_replies + 1,
                last_post_user=request.user.id,
                last_reply_on=date.now(),
                last_post=obj.id)
            Forum.filter(Forum.c.id == int(forum_id)).update(
                num_posts=Forum.c.num_posts + 1,
                last_post_user=request.user.id,
                last_reply_on=date.now(),
                last_post=obj.id)

            #増加发送邮件的处理
            emails = []
            for u_id in Post.filter((Post.c.topic == int(topic_id))
                                    & (Post.c.reply_email == True)
                                    & (Post.c.id == parent_id)).values(
                                        Post.c.posted_by):
                user = User.get(u_id[0])
                if user and user.email and (user.email not in emails) and (
                        user.email != request.user.email):
                    emails.append(user.email)

            if not emails:
                return

            _type = settings.get_var('PARA/FORUM_REPLY_PROCESS', 'print')
            url = '%s/forum/%s/%s' % (settings.get_var('PARA/DOMAIN'),
                                      forum_id, topic_id)
            d = {'url': str(url)}
            mail = {
                'from_': settings.get_var('PARA/EMAIL_SENDER'),
                'to_': emails,
                'subject': settings.get_var('FORUM_EMAIL/FORUM_EMAIL_TITLE'),
                'message':
                settings.get_var('FORUM_EMAIL/FORUM_EMAIL_TEXT') % d,
                'html': True
            }

            if _type == 'mail':
                Mail().send_mail(**mail)
            elif _type == 'print':
                print mail
            elif _type == 'redis':
                redis = functions.get_redis()
                _t = Serial.dump(mail)
                redis.lpush('send_mails', _t)
Example #9
0
def process_task(row):
    from uliweb import settings

    Task = functions.get_model('async_tasks')

    now = date.now()
    #check started status
    if row.status == STARTED:
        #check timeout
        if row.started_time + datetime.timedelta(
                milliseconds=row.timeout) > now:
            return
        else:
            #检查任务是否还在执行
            keys_queue = settings.get_var('ASYNC_TASKS/tasks_keys_queue')
            redis = functions.get_redis()
            if redis.sismember(keys_queue, row.task_id):
                return
            row.startup_time = now

    handler = row.get_handler(log=log)
    #check depend task
    depend_task = row.check_depend_tasks(log)
    if depend_task:
        #依赖任务或父任务为取消时,当前任务取消
        if depend_task.status == CANCEL or depend_task.current_status == CANCEL:
            msg = "Depend task {} has been cancelled.".format(
                depend_task.task_id)
            row.cancel(msg, log=log)
            log.debug(
                "Current task {} cancelled because depend task {} status is CANCEL"
                .format(row.task_id, depend_task.task_id))
        #如果依赖任务失败,当前任务也置为失败
        elif depend_task.status == FAILED:
            msg = "Current task {} FAILED because depend task {} status is FAILED".format(
                row.task_id, depend_task.task_id)
            handler.save(status=FAILED, finished_time=date.now(), message=msg)
            log.info(msg)
        return
    if row.retry_times >= row.max_retry_times:
        msg = ('Async task {} reaches max retry times, '
               'status changes to FAILED').format(row.task_id)
        handler.save(status=FAILED, finished_time=date.now(), message=msg)
        log.info(msg)
        return
    if row.startup_time and row.startup_time <= now or not row.startup_time:
        #处理父结点,如果current_status为成功,则不执行
        if row.children_count > 0 and row.current_status == SUCCESS:
            return
        handler.save(status=STARTED,
                     started_time=date.now(),
                     retry_times=row.retry_times + 1)
        log.info('Async task {0} [{1}({2!r})] started, retry_times={3}'.format(
            row.task_id, row.command_name,
            row.command_info.get('parameters', ''), row.retry_times))
        run_command(row)
Example #10
0
def process_task(row):
    from uliweb import settings

    Task = functions.get_model('async_tasks')

    now = date.now()
    #check started status
    if row.status == STARTED:
        #check timeout
        if row.started_time + datetime.timedelta(milliseconds=row.timeout) > now:
            return
        else:
            #检查任务是否还在执行
            keys_queue = settings.get_var('ASYNC_TASKS/tasks_keys_queue')
            redis = functions.get_redis()
            if redis.sismember(keys_queue, row.task_id):
                return
            row.startup_time = now

    handler = row.get_handler(log=log)
    #check depend task
    depend_task = row.check_depend_tasks(log)
    if depend_task:
        #依赖任务或父任务为取消时,当前任务取消
        if depend_task.status == CANCEL or depend_task.current_status == CANCEL:
            msg = "Depend task {} has been cancelled.".format(depend_task.task_id)
            row.cancel(msg, log=log)
            log.debug("Current task {} cancelled because depend task {} status is CANCEL".format(
                row.task_id, depend_task.task_id
            ))
        #如果依赖任务失败,当前任务也置为失败
        elif depend_task.status == FAILED:
            msg = "Current task {} FAILED because depend task {} status is FAILED".format(
                row.task_id, depend_task.task_id
            )
            handler.save(status=FAILED, finished_time=date.now(), message=msg)
            log.info(msg)
        return
    if row.retry_times >= row.max_retry_times:
        msg = ('Async task {} reaches max retry times, '
                      'status changes to FAILED').format(row.task_id)
        handler.save(status=FAILED, finished_time=date.now(), message=msg)
        log.info(msg)
        return
    if row.startup_time and row.startup_time<=now or not row.startup_time:
        #处理父结点,如果current_status为成功,则不执行
        if row.children_count > 0 and row.current_status == SUCCESS:
            return
        handler.save(status=STARTED, started_time=date.now(),
                     retry_times=row.retry_times+1)
        log.info('Async task {0} [{1}({2!r})] started, retry_times={3}'.format(
            row.task_id,
            row.command_name,
            row.command_info.get('parameters', ''),
            row.retry_times))
        run_command(row)
Example #11
0
 def get(self, key, creator=None, update=False):
     redis = functions.get_redis(**self.kwargs)
     txt = ''
     if not update:
         txt = redis.get(self.get_key(key))
     if update or not txt:
         txt = creator()
         redis.set(self.get_key(key), txt)
     
     return txt
Example #12
0
        def post_save(obj, data):
            from uliweb import functions
            from uliweb.utils.common import Serial
            from uliweb.mail import Mail

            Post.filter(Post.c.id == int(parent_id)).update(
                num_replies=Post.c.num_replies + 1, last_post_user=request.user.id, last_reply_on=date.now()
            )
            self._clear_files(obj.slug, data["content"])

            Topic.filter(Topic.c.id == int(topic_id)).update(
                num_replies=Topic.c.num_replies + 1,
                last_post_user=request.user.id,
                last_reply_on=date.now(),
                last_post=obj.id,
            )
            Forum.filter(Forum.c.id == int(forum_id)).update(
                num_posts=Forum.c.num_posts + 1,
                last_post_user=request.user.id,
                last_reply_on=date.now(),
                last_post=obj.id,
            )

            # 増加发送邮件的处理
            emails = []
            for u_id in Post.filter(
                (Post.c.topic == int(topic_id)) & (Post.c.reply_email == True) & (Post.c.id == parent_id)
            ).values(Post.c.posted_by):
                user = User.get(u_id[0])
                if user and user.email and (user.email not in emails) and (user.email != request.user.email):
                    emails.append(user.email)

            if not emails:
                return

            _type = settings.get_var("PARA/FORUM_REPLY_PROCESS", "print")
            url = "%s/forum/%s/%s" % (settings.get_var("PARA/DOMAIN"), forum_id, topic_id)
            d = {"url": str(url)}
            mail = {
                "from_": settings.get_var("PARA/EMAIL_SENDER"),
                "to_": emails,
                "subject": settings.get_var("FORUM_EMAIL/FORUM_EMAIL_TITLE"),
                "message": settings.get_var("FORUM_EMAIL/FORUM_EMAIL_TEXT") % d,
                "html": True,
            }

            if _type == "mail":
                Mail().send_mail(**mail)
            elif _type == "print":
                print mail
            elif _type == "redis":
                redis = functions.get_redis()
                _t = Serial.dump(mail)
                redis.lpush("send_mails", _t)
Example #13
0
    def clear_menu(self):
        from uliweb import functions

        #只处理菜单根结点
        if self.type == 'M0000':
            p = self
            while p.parent:
                p = p.parent
            key = 'MENU:{}'.format(p.name)
            redis = functions.get_redis()
            redis.delete(key)
Example #14
0
 def f(key=key, value=value, expiry_time=expiry_time):
     redis = functions.get_redis()
     while 1:
         v = redis.get(key)
         if v == value:
             redis.set(key, value, ex=expiry_time, xx=True)
             if debug:
                 log.debug('Continue lock')
         else:
             if debug:
                 log.debug('Not acquire lock yet')
         time.sleep(expiry_time/2)
Example #15
0
 def f(key=key, value=value, expiry_time=expiry_time):
     redis = functions.get_redis()
     while 1:
         v = redis.get(key)
         if v == value:
             redis.set(key, value, ex=expiry_time, xx=True)
             if debug:
                 log.debug('Continue lock')
         else:
             if debug:
                 log.debug('Not acquire lock yet')
         time.sleep(expiry_time / 2)
Example #16
0
def process():
    from uliweb.utils.common import Serial
    from uliweb.mail import Mail

    redis = functions.get_redis()
    while 1:
        data = redis.brpop("sendmail", 5)
        if data:
            message = Serial.load(data[1])
            log.info(message)
            Mail().send_mail(**message)
        else:
            log.info("no data")
Example #17
0
def waiting_lock(key, value, expiry_time=60, debug=False):
    redis = functions.get_redis()
    while 1:
        v = redis.get(key)
        if not v:
            flag = redis.set(key, value, ex=expiry_time, nx=True)
            if flag:
                if debug:
                    log.info('Acquired cron lock')
                return True
        elif v == value:
            return True

        time.sleep(expiry_time)
Example #18
0
    def __init__(self, topic, dalay=3):
        self.delay = dalay
        redis = functions.get_redis()
        self.pub = redis.pubsub()
        self.pub.subscribe(topic)

        while 1:
            time.sleep(0.05)
            message = self.pub.get_message()
            if not message:
                break

        signal.signal(signal.SIGALRM, self.raise_exception)
        spawn(self.listen)
Example #19
0
def waiting_lock(key, value, expiry_time=60, debug=False):
    redis = functions.get_redis()
    while 1:
        v = redis.get(key)
        if not v:
            flag = redis.set(key, value, ex=expiry_time, nx=True)
            if flag:
                if debug:
                    log.info('Acquired cron lock')
                return True
        elif v == value:
            return True

        time.sleep(expiry_time)
Example #20
0
    def __init__(self, topic, dalay=3):
        self.delay = dalay
        redis = functions.get_redis()
        self.pub = redis.pubsub()
        self.pub.subscribe(topic)

        while 1:
            time.sleep(0.05)
            message = self.pub.get_message()
            if not message:
                break

        signal.signal(signal.SIGALRM, self.raise_exception)
        spawn(self.listen)
Example #21
0
    def init(self):
        from uliweb import settings

        self.redis = functions.get_redis()
        self.queue = self.kwargs['queue']
        self.work_queue = self.queue + ':' + self.name
        self.keys_queue = settings.get_var('ASYNC_TASKS/tasks_keys_queue')

        #todo log

        self.log.info(
            '{} {} created with queue={}, work queue={} timeout={} '
            'max_requests={} soft_memory_limit={} hard_memory_limit={}'.format(
                self.name, self.pid, self.queue, self.work_queue, self.timeout,
                self.max_requests, self.soft_memory_limit,
                self.hard_memory_limit))
        self.check_point = settings.get_var('ASYNC_TASKS_WORKERS/check_point')
Example #22
0
    def on_changed(self, data):
        #处理完毕时,发送消息通知,激活worker守护进行处理
        redis = functions.get_redis()
        redis.lpush('async_tasks_beat', 'start')

        if 'status' in data:
            if data['status'] == SUCCESS:
                self.on_success()
            elif data['status'] == ERROR:
                self.on_error()
            elif data['status'] == FAILED:
                self.on_failure()
            elif data['status'] == CANCEL:
                self.on_cancel()
            elif data['status'] == QUEUED:
                self.on_queued()

        #process relative job status and time
        if self.task.parent_task:
            parent = self.task.parent_task

            #根据子结点结果计算父结点的状态
            if 'status' in data:
                self._sync_parent(parent)

            #出错要重置启动时间
            if parent.status == ERROR:
                parent.startup_time = (date.now() +
                    datetime.timedelta(milliseconds=parent.retry_time_interval))

            #处理启动时间,取最早时间
            if 'started_time' in data and (not parent.started_time or
                    parent.started_time and parent.started_time>data['started_time']):
                parent.started_time = data['started_time']

            #出错和待处理不置完成时间
            if parent.status not in (ERROR, QUEUED) and 'finished_time' in data and parent.status in ('1', 'C', 'F'):
                parent.finished_time = data['finished_time']

            #保存结果
            handler = parent.get_handler(log=self.log)
            handler.save()
Example #23
0
def process():
    from datetime import timedelta, datetime
    from uliweb.utils.common import Serial
    
    Request = functions.get_model('eventrequest')
    Event = functions.get_model('event')
    User = functions.get_model('user')

    #day = date.today() - timedelta(days=2)
    day = datetime(year=2013, month=3, day=24)
    
    redis = functions.get_redis()
    
    for obj in Request.filter((Request.c.event==Event.c.id) & (Event.c.begin_date==day) & (Request.c.user==User.c.id)).values(User.c.username, User.c.email, Event.c.title) :
        email = {}
        email['from_'] = 'codepark'
        email['to_'] = obj.email
        email['subject'] = u'活动通知'
        email['message'] = u'用户:%s 活动"%s"将于%s开始' % (obj.username, obj.title, date.today())
        message = Serial.dump(email)
        print 'xxxxxxxxxxxxx', message
        redis.lpush('sendmail', message)
Example #24
0
def call(args, options, global_options):
    from uliweb import settings
    from multiprocessing import cpu_count

    check_point = settings.get_var('ASYNC_TASKS_WORKERS/check_point')
    queues = settings.get_var('ASYNC_TASKS_WORKERS/queues')

    #清除keys_queue
    keys_queue = settings.get_var('ASYNC_TASKS/tasks_keys_queue')
    redis = functions.get_redis()
    redis.delete(keys_queue)

    n = cpu_count()
    workers = []

    for x, queue in queues.items():
        max_requests = queue.get('max_requests')
        timeout = queue.get('timeout')
        soft_memory_limit = queue.get('soft_memory_limit')
        hard_memory_limit = queue.get('hard_memory_limit')
        number = queue.get('number', n) or n

        for i in range(number):
            workers.append(
                AsyncWorker(kwargs={'queue': x},
                            max_requests=max_requests,
                            timeout=timeout,
                            soft_memory_limit=soft_memory_limit,
                            hard_memory_limit=hard_memory_limit))

    manager = Manager(workers,
                      title='Async Workers Daemon',
                      check_point=check_point,
                      daemon=True)

    manager.start()
Example #25
0
def refresh_jobs():
    redis = functions.get_redis()
    redis.publish('cron_refresh', 'refresh')
Example #26
0
    def process_response(self, request, response):
        from uliweb import settings, functions, json_dumps
        import base64

        begin_datetime = self.access_datetime
        end_datetime = datetime.now()
        # #if not debug status it'll quit
        # if not settings.get_var('GLOBAL/DEBUG'):
        #     return response

        S = functions.get_model('uliwebrecorderstatus')
        s = S.all().one()
        if not s or s.status == 'E':
            return response

        if settings.get_var('ULIWEBRECORDER/response_text'):
            try:
                text = response.data
            except Exception as e:
                text = str(e)
        else:
            text = ''

        #test if post_data need to convert base64
        if not request.content_type:
            post_data_is_text = True
        else:
            post_data_is_text = self.test_text(request.content_type)
        if not post_data_is_text:
            post_data = base64.encodestring(request.data)
        else:
            post_data = json_dumps(request.POST.to_dict())

        #test if response.data need to convert base64
        response_data_is_text = self.test_text(response.content_type)
        if not response_data_is_text:
            response_data = base64.encodestring(text)
        else:
            response_data = text

        R = functions.get_model('uliwebrecorder')
        if request.user:
            user_id = request.user.id
        else:
            user_id = None
        max_content_length = settings.get_var(
            'ULIWEBRECORDER/max_content_length')
        if len(response_data) > max_content_length:
            msg = "Content length is great than %d so it will be omitted." % max_content_length
            log.info(msg)
            response_data = msg
            response_data_is_text = True
        recorder = R(method=request.method,
                     url=request_url(request),
                     post_data_is_text=post_data_is_text,
                     post_data=post_data,
                     user=user_id,
                     response_data=response_data,
                     response_data_is_text=response_data_is_text,
                     status_code=response.status_code,
                     begin_datetime=begin_datetime,
                     end_datetime=end_datetime,
                     time_used=(end_datetime - begin_datetime).total_seconds())
        recorder_type = settings.get_var('ULIWEBRECORDER/recorder_type')
        if recorder_type == 'db':
            recorder.save()
        elif recorder_type == 'mq':
            mq_name = settings.get_var('ULIWEBRECORDER/mq_name',
                                       default='uliweb_recorder_mq')
            redis = functions.get_redis()
            redis.lpush(mq_name, recorder.dump())
        elif recorder_type == 'stream':
            log.info(recorder.dump())
        return response
Example #27
0
def refresh_jobs():
    redis = functions.get_redis()
    redis.publish('cron_refresh', 'refresh')
Example #28
0
 def f(n=len(commands)):
     redis = functions.get_redis()
     redis.lpush('async_tasks_beat', *['start']*n)
Example #29
0
def get_redis():
    try:
        redis = functions.get_redis()
    except Exception, e:
        log.exception(e)
        redis = None
Example #30
0
def get_redis():
    try:
        redis = functions.get_redis()
    except Exception, e:
        log.exception(e)
        redis = None
Example #31
0
 def delete(self, key):
     redis = functions.get_redis(**self.kwargs)
     redis.delete(self.get_key(key))