Пример #1
0
def sms2gate(request): 

    count = 0
    now = datetime.datetime.now()
    sms_list = SmsSend.objects.not_send_yet_sms2gate()[0:1000]
    
    if sms_list:
        #sendclient = send()
        for record in sms_list:

            data = {"id":record.id}
#             payload = urllib.urlencode(data)
            #print payload,"========"
            try:
#                 queue.sms2gate.delay(data)
                queue = TaskQueue('sms2gate')
                queue.add(Task("/backend/taskqueue/sms2gate",payload))
                count+=1
            except:
                st = SmsSendTrans()
                st.sms_send_to_gate(record.id)
            #print record.receive_mobile

    result = "total push message:"+str(count)
    return HttpResponse(result)
Пример #2
0
 def runBatch(self):
     # sqlquery.writeLog('try to run task')
     # try:
     queue = TaskQueue('safecheck')
     queue.add(Task('/safecheckrun', 'safe parm'))
     # except Exception, e:
     #     sqlquery.writeLog("queue error is: " + str(e))
Пример #3
0
def send_staff_unread(request): 
    unread_mentors = unread_mentor()
    unread_waiters = unread_waiter()
    if not unread_mentors and not unread_mentors:
        return HttpResponse('')
    
    staffs = RelevantStaff.objects.exclude(send_mentor=False,send_waiter=False)
    
    for s in staffs:
        #发送短信,已有队列
        if s.send_mentor and unread_mentors and s.mobile:
            msg = "<" + SITE_INFO.name + ">导师留言后台有" + str(unread_mentors) + "条新客户留言. 请及时登录回复. 登录地址: " + SITE_INFO.domain + reverse('aq')
            helpers.send_staff_mobile(s.mobile,msg)
        if s.send_waiter and unread_waiters and s.mobile:
            msg = "<" + SITE_INFO.name + ">客服后台有" + str(unread_waiters) + "条新客服留言. 请及时登录回复. 登录地址: " + SITE_INFO.domain + reverse('waiter')
            helpers.send_staff_mobile(s.mobile,msg)
    
    for s in staffs:
        #发送邮件队列
        data = {"staff_id":s.id,"unread_mentors":unread_mentors,"unread_waiters":unread_waiters}
        payload = urllib.urlencode(data)
        #执行转换
        try:
            queue = TaskQueue('notice2staff')
            queue.add(Task("/backend/taskqueue/notice2staff",payload))
        except:
            st = helpers.StaffTrans()
            st.kinger_notice_to_staff(s.id,unread_mentors,unread_waiters)
  
    return HttpResponse('')
Пример #4
0
def add_task(user_dict,url,user):
    '''
    add sae queue task ,fetch next page
    '''
    url=url+'?&from=%s' %user
    queue = TaskQueue('fetch_next')
    queue.add(Task(url))
    return None
Пример #5
0
def add_task(user_dict, url, user):
    '''
    add sae queue task ,fetch next page
    '''
    url = url + '?&from=%s' % user
    queue = TaskQueue('fetch_next')
    queue.add(Task(url))
    return None
Пример #6
0
def addQueue(taskid,emailList):
    try:
        logger.info("Add queue,queueName:%s,taskid:%s" % (web.config.queueName,taskid))
        queue = TaskQueue(web.config.queueName)
        tasks = [Task("/job/sendmail?taskid="+str(taskid)+"&email="+email) for email in emailList]
        queue.add(tasks)
        return 1
    except Exception,e:
        logger.exception(e)
        return -1
Пример #7
0
def sms2send(request): 
    """
    转换并发送
    """
    convert_count = 0
    send_count = 0    

    now = datetime.datetime.now()

    k_sms_list = Sms.objects.not_send_yet_sms()[0:1000]

    # def get_test_sms(mobile=u'15814020825'):
    #     sms = Sms.objects.all()[0]
    #     sms.mobile = mobile
    #     return [sms]
    # sms_list = get_test_sms()
    
    if k_sms_list:        
        for record in k_sms_list:
            convert_count+=1
            id = record.id
            data = {"sms_id":id}
#             payload = urllib.urlencode(data)

            #执行转换
            try:
#                 queue.sms2send.delay(data)
#                 print '+++++++++++'
                queue = TaskQueue('sms2send')
                queue.add(Task("/backend/taskqueue/sms2send",payload))
            except:
                st = SmsTrans()
                st.kinger_sms_to_sms(id)
                print '-------------'

    result = "成功转换 "+str(convert_count)

    return HttpResponse(result)
Пример #8
0
    def GET(self):
        channels = web.input(channel=[], _unicode=False).channel
        queue = TaskQueue('web_content_fetcher')
        ret = []

        for channel in channels:
            newslist = []
            # channel = channel.encode('ascii')
            if channel not in const.databases:
                injectionlog = logging.getLogger('injection_on_dbname')
                injectionlog.warning(channel+' - '+str(web.ctx.environ))
                continue

            if channel=='meiriyiwen':
                link = const.databases[channel]+date.today().strftime(DATE_FORMAT)
                if not self.contains(channel, link):
                    newslist.append({'link': link, 'pubDate': None, 'tbln': channel})

            else:
                url = const.databases[channel]
                #on windows mysql stores tables on filesystem, hence the table name is case-insensitive,
                #but on *nix they are case-sensitive
                # channel = channel.lower()
                pages = PageListParser(url)
                for p in pages:
                    #p is sorted in desc, so if we find one then the rest is ignored
                    if self.contains(channel, p['link']): break
                    dt = dateSniffer(p['title'])
                    if dt:                        
                        p['pubDate'] = dt
                    p['tbln'] = channel
                    newslist.append(p)

            if newslist:
                queue.add((Task('/update', pickle.dumps(news)) for news in newslist))
                queue.add(Task('/feed_generator', channel))
                ret.append('DONE IN DISPATCHING %s' % channel)
        return '\n'.join(ret) if ret else "IT'S UP-TO-DATE"
Пример #9
0
def startQueue(group):
    from sae.taskqueue import TaskQueue,Task
    if group in GROUPS:
        for x in xrange(5):
            html = grabHTML(URL_START[group])
            if html:
                print 'Get html from "%s" success!' % (URL_START[group])
                break
            else:
                if x < 4:
                    print 'Get html from "%s" fail, retrying: %d' % (URL_START[group], x + 1)
                else:
                    print 'Get html from "%s" fail!' % (URL_START[group])
                    return None
        page = etree.HTML(html)
        tq = TaskQueue('crawler')
        if group == 'nmb':
            res = page.xpath(XPATH_START[group])
            paramlist = []
            for r in res:
                para = {'u' : r.find('h4/a').attrib['href'],'s':'qiniu'}
                extra = r.find("p/span")
                if extra:
                    para['e'] = ''.join(extra.itertext())
                    pass
                tq.add(Task('/crawler/info/%s/handler/?%s' % (group,urlencode(para))))
            return '%d tasks added' % len(res)
        if group == 'ske':
            root = page.xpath("//div[@id='sectionMain']")[0]
            count = 0
            para = {'s':'qiniu'}
            for e in root.getchildren():
                if e.tag == "span":
                    para['e'] = e.find("h3").text.encode('utf-8')
                elif e.tag == "ul":
                    for a in e.findall("li/dl/dd/h3/a"):
                        para['u'] = a.attrib.get('href')
                        tq.add(Task('/crawler/info/%s/handler/?%s' % (group,urlencode(para))))
                        count += 1
                    pass
            return '%d tasks added' % count
        else:
            res = page.xpath(XPATH_START[group])
            if res:
                tl = [Task('crawler/info/%s/handler/?%s' % (group,urlencode({'u':r, 's':'qiniu'}))) for r in res]
                tq.add(tl)
            return '%d tasks added' % len(res)
    else:
        return None
    pass
Пример #10
0
 def get(self, request, *args, **kwargs):
     operation = self.get_operation(request)
     tasks = self.as_task(operation)
     TaskQueue(self.QUEUE_NAME).add(tasks)
     content = 'added {0:d} tasks'.format(len(tasks))
     return HttpResponse(content)
Пример #11
0
from models import *
from framework.web import get, post, ctx, view, interceptor, seeother, notfound, unauthorized
from framework.apis import api, Page, APIError, APIValueError, APIPermissionError, APIResourceNotFoundError
import os.path
import os, re, time, base64, hashlib, logging
from config import configs
import sae.storage
import markdown2
from framework import db
import random
import sae.kvdb
from sae.taskqueue import Task, TaskQueue
import base64

kv = sae.kvdb.Client()
counter_queue = TaskQueue(configs.taskqueue)

_COOKIE_NAME = 'jblog'
_COOKIE_KEY = configs.session.secret
CHUNKSIZE = 8192
UPLOAD_PATH = 'upload'
SAE_BUCKET = configs.storage['bucket']


def tag_count_add(tag):
    tag.number += 1
    tag.update()


def tag_count_min(tag):
    tag.number -= 1
Пример #12
0
 def post(self, nodeid='1'):
     n_obj = Node.get_by_key('n-'+str(nodeid))
     if not n_obj:
         self.set_status(404)
         self.write('404')
         return
     
     errors = []
     author = str(self.get_cookie('username',''))
     title = self.get_argument('title','')
     content = self.get_argument('content','')
     
     t_obj = TOPIC_DICT.copy()
     if title and content:
         if len(title)<=TITLE_MAX_S and len(content)<=CONTENT_MAX_S:
             int_time = int(time())
             #check spam
             u_topic_time = kv.get('u_topic_time:'+author)
             if u_topic_time:
                 tlist = u_topic_time.split(',')
                 if len(tlist)== MEMBER_RECENT_TOPIC and (int_time-int(tlist[-1])) < 3600:
                     self.write(u'403:不要发帖太频繁了 <a href="/newpost/%s">请返回</a>' % nodeid)
                     return
             
             #check repeat
             content = textilize(content)
             #content = safe_encode(content)
             con_md5 = md5(content.encode('utf-8')).hexdigest()
             if mc.get('c_'+con_md5):
                 self.write(u'403:请勿灌水 <a href="/newpost/%s">请返回</a>' % nodeid)
                 return
             else:
                 mc.set('c_'+con_md5, '1', 36000)
             
             t_obj['title'] = title
             t_obj['nodeid'] = str(nodeid)
             t_obj['nodename'] = n_obj['name']
             t_obj['author'] = author
             t_obj['add'] = int_time
             t_obj['content'] = content
             
             if n_obj['count']:
                 topic_id = int(n_obj['count']) + 1
             else:
                 topic_id = 1
             if Topic.add(topic_id, t_obj):
                 topic_key = 't-%s-%s' % (str(nodeid), str(topic_id))
                 #node count +1
                 n_obj['count'] = str(topic_id)
                 Commomkvdb.save('n-'+str(nodeid), n_obj)
                 
                 #member recent +key
                 #Member.add_key_rencent_topic(author, topic_key)
                 rt_obj = kv.get('topic-'+author)
                 if rt_obj:
                     olist = rt_obj.split(',')
                     if topic_key not in olist:
                         olist.insert(0, topic_key)
                         rt_obj = ','.join(olist[:MEMBER_RECENT_TOPIC])
                         kv.set('topic-'+author, rt_obj)
                 else:
                     rt_obj = topic_key
                     kv.set('topic-'+author, topic_key)
                 
                 #recent in home +key
                 Commomkvdb.add_key_rencent_topic('recent-topic-home', topic_key)
                 #all topic counter +1
                 Count.key_incr('all-topic-num')
                 #hot node
                 tqueue = TaskQueue('default')
                 tqueue.add(Task('/task/hotnode/%s/%s' % ('n-'+str(nodeid), str(topic_id)), delay=5))
                 #notifications
                 mentions = findall_mentions(t_obj['content'], author)
                 if mentions:
                     tqueue.add(Task('/task/mentions/'+topic_key, 'member='+','.join(mentions), delay=8))
                 
                 #set for check spam
                 #u_topic_time = kv.get('u_topic_time:'+author)
                 if u_topic_time:
                     tlist = u_topic_time.split(',')
                     if str(int_time) not in tlist:
                         tlist.insert(0, str(int_time))
                         u_topic_time = ','.join(tlist[:MEMBER_RECENT_TOPIC])
                         kv.set('u_topic_time:'+author, u_topic_time)
                 else:
                     u_topic_time = str(int_time)
                     kv.set('u_topic_time:'+author, u_topic_time)
                 
                 
                 ##set new sr_code
                 cur_user = self.cur_user()
                 code_list = [cur_user['code'],u_topic_time]
                 u_comment_time = kv.get('u_comment_time:'+author)
                 if u_comment_time:
                     code_list.append(u_comment_time)
                 self.set_cookie('usercode', md5(''.join(code_list)).hexdigest(), path="/", expires_days = 365 )
                 
                 
                 #del cache
                 clear_cache_multi(['get_topic_by_keys:recent-topic-home','get_topic_by_keys:topic-' + author, 'get_comment_topic_by_keys:recent-topic-home', 'get_comment_topic_by_keys:recent-comment-topic-home','cur_user:'******'/'+topic_key)
                 return
             else:
                 errors.append("服务器出现错误,请稍后再试")
         else:
             t_obj['title'] = title
             t_obj['content'] = content
             errors.append(u"注意标题和内容的最大字数:%s %d" % (len(title), len(content)))
     else:
         errors.append("标题和内容必填")
     self.echo('newpost.html', {
         'title': "发新帖子",
         'errors':errors,
         'n_obj': n_obj,
         't_obj': t_obj,
     }, layout='_layout.html')
Пример #13
0
 def post(self, nodeid, topicid):
     author = str(self.get_cookie('username',''))
     content = self.get_argument('content','')
     
     if author and content and len(content)<=COMMENT_MAX_S:
         int_time = int(time())
         #check spam
         u_comment_time = kv.get('u_comment_time:'+author)
         if u_comment_time:
             tlist = u_comment_time.split(',')
             if len(tlist)== MEMBER_RECENT_TOPIC and (int_time-int(tlist[-1])) < 3600:
                 self.write(u'403:不要回复太频繁了 <a href="/t-%s-%s">请返回</a>' % (nodeid, topicid))
                 return
         
         #check repeat
         content = textilize(content)
         #content = safe_encode(content)
         con_md5 = md5(content.encode('utf-8')).hexdigest()
         if mc.get('c_'+con_md5):
             self.write(u'403:请勿灌水 <a href="/t-%s-%s">请返回</a>' % (nodeid, topicid))
             return
         else:
             mc.set('c_'+con_md5, '1', 36000)
         
         ##
         t_key = 't-%s-%s' % (str(nodeid), str(topicid))
         t_obj = Topic.get_by_key(t_key)
         
         if t_obj['cnum']:
             id_num = int(t_obj['cnum']) + 1
         else:
             id_num = 1
         
         c_key = 't-%s-%s-%d' % (str(nodeid), str(topicid), id_num)
         c_obj = COMMENT_DICT.copy()
         c_obj['author'] = author
         c_obj['add'] = int_time
         c_obj['content'] = content
         
         if Commomkvdb.save(c_key, c_obj):
             #topic commont count +1
             t_obj['cnum'] = id_num
             t_obj['reply'] = author
             t_obj['edit'] = int_time
             Commomkvdb.save(t_key, t_obj)
             
             #member recent +key
             #Member.add_key_rencent_comment_topic(author, t_key)
             rt_obj = kv.get('comment-topic-'+author)
             if rt_obj:
                 olist = rt_obj.split(',')
                 if t_key in olist:
                     olist.remove(t_key)
                 olist.insert(0, t_key)
                 kv.set('comment-topic-'+author, ','.join(olist[:MEMBER_RECENT_TOPIC]))
             else:
                 kv.set('comment-topic-'+author, t_key)
             
             #recent comment in home +key
             Commomkvdb.add_key_rencent_topic('recent-comment-topic-home', t_key)
             #all comment counter +1
             Count.key_incr('all-comment-num')
             #notifications
             if t_obj['author'] != author:
                 mentions = findall_mentions(c_obj['content']+' @%s '%t_obj['author'], author)
             else:
                 mentions = findall_mentions(c_obj['content'], author)
             if mentions:
                 tqueue = TaskQueue('default')
                 tqueue.add(Task('/task/mentions/'+t_key, 'member='+','.join(mentions), delay=5))
             
             #set for check spam
             #u_comment_time = kv.get('u_comment_time:'+author)
             if u_comment_time:
                 tlist = u_comment_time.split(',')
                 if str(int_time) not in tlist:
                     tlist.insert(0, str(int_time))
                     u_comment_time = ','.join(tlist[:MEMBER_RECENT_TOPIC])
                     kv.set('u_comment_time:'+author, u_comment_time)
             else:
                 u_comment_time = str(int_time)
                 kv.set('u_comment_time:'+author, u_comment_time)
             
             ##set new sr_code
             cur_user = self.cur_user()
             code_list = [cur_user['code']]
             u_topic_time = kv.get('u_topic_time:'+author)
             if u_topic_time:
                 code_list.append(u_topic_time)
             if u_comment_time:
                 code_list.append(u_comment_time)
             self.set_cookie('usercode', md5(''.join(code_list)).hexdigest(), path="/", expires_days = 365 )
             
                 
             #del cache
             cachekeys = ['get_topic_by_keys:recent-comment-topic-home', 'get_topic_by_keys:comment-topic-'+author, 'get_comment_topic_by_keys:recent-topic-home', 'get_comment_topic_by_keys:recent-comment-topic-home','cur_user:'******'recent-topic-home')
             if tks and t_key in tks.split(','):
                 cachekeys.append('get_topic_by_keys:recent-topic-home')
             if id_num<EACH_PAGE_COMMENT_NUM:
                 cachekeys.append('get_comments:%s:1' % t_key)
             else:
                 cachekeys.append('get_comments:%s:%d' % (t_key, [i for i in range(1,id_num,EACH_PAGE_COMMENT_NUM)][-1]))
             clear_cache_multi(cachekeys)
             
             self.redirect('/'+t_key)
             return
     else:
         self.set_status(403)
         self.write('错误: 403 (请返回填写内容 或 内容太长了)')
Пример #14
0
# -*- coding: utf-8 -*-
from flask import url_for
from sae.taskqueue import Task, TaskQueue
import json

queue = TaskQueue('mailq')

def send_mail(subject='', body='', recipients=None, reply_to='*****@*****.**'):
    queue.add(Task(url_for('util.send_mail'),
        json.dumps({
            'subject': subject,
            'body': body,
            'recipients': recipients,
            'reply_to': reply_to,
            })
        ))
Пример #15
0
def startQueue(group):
    from sae.taskqueue import TaskQueue, Task
    if group in GROUPS:
        for x in xrange(5):
            html = grabHTML(URL_START[group])
            if html:
                print 'Get html from "%s" success!' % (URL_START[group])
                break
            else:
                if x < 4:
                    print 'Get html from "%s" fail, retrying: %d' % (
                        URL_START[group], x + 1)
                else:
                    print 'Get html from "%s" fail!' % (URL_START[group])
                    return None
        page = etree.HTML(html)
        tq = TaskQueue('crawler')
        if group == 'nmb':
            res = page.xpath(XPATH_START[group])
            paramlist = []
            for r in res:
                para = {'u': r.find('h4/a').attrib['href'], 's': 'qiniu'}
                extra = r.find("p/span")
                if extra:
                    para['e'] = ''.join(extra.itertext())
                    pass
                tq.add(
                    Task('/crawler/info/%s/handler/?%s' %
                         (group, urlencode(para))))
            return '%d tasks added' % len(res)
        if group == 'ske':
            root = page.xpath("//div[@id='sectionMain']")[0]
            count = 0
            para = {'s': 'qiniu'}
            for e in root.getchildren():
                if e.tag == "span":
                    para['e'] = e.find("h3").text.encode('utf-8')
                elif e.tag == "ul":
                    for a in e.findall("li/dl/dd/h3/a"):
                        para['u'] = a.attrib.get('href')
                        tq.add(
                            Task('/crawler/info/%s/handler/?%s' %
                                 (group, urlencode(para))))
                        count += 1
                    pass
            return '%d tasks added' % count
        else:
            res = page.xpath(XPATH_START[group])
            if res:
                tl = [
                    Task('crawler/info/%s/handler/?%s' %
                         (group, urlencode({
                             'u': r,
                             's': 'qiniu'
                         }))) for r in res
                ]
                tq.add(tl)
            return '%d tasks added' % len(res)
    else:
        return None
    pass
Пример #16
0
    def post(self, nodeid='1'):
        n_obj = Node.get_by_key('n-' + str(nodeid))
        if not n_obj:
            self.set_status(404)
            self.write('404')
            return

        errors = []
        author = str(self.get_cookie('username', ''))
        title = self.get_argument('title', '')
        content = self.get_argument('content', '')

        t_obj = TOPIC_DICT.copy()
        if title and content:
            if len(title) <= TITLE_MAX_S and len(content) <= CONTENT_MAX_S:
                int_time = int(time())
                #check spam
                u_topic_time = kv.get('u_topic_time:' + author)
                if u_topic_time:
                    tlist = u_topic_time.split(',')
                    if len(tlist) == MEMBER_RECENT_TOPIC and (
                            int_time - int(tlist[-1])) < 3600:
                        self.write(
                            u'403:不要发帖太频繁了 <a href="/newpost/%s">请返回</a>' %
                            nodeid)
                        return

                #check repeat
                content = textilize(content)
                #content = safe_encode(content)
                con_md5 = md5(content.encode('utf-8')).hexdigest()
                if mc.get('c_' + con_md5):
                    self.write(u'403:请勿灌水 <a href="/newpost/%s">请返回</a>' %
                               nodeid)
                    return
                else:
                    mc.set('c_' + con_md5, '1', 36000)

                t_obj['title'] = title
                t_obj['nodeid'] = str(nodeid)
                t_obj['nodename'] = n_obj['name']
                t_obj['author'] = author
                t_obj['add'] = int_time
                t_obj['content'] = content

                if n_obj['count']:
                    topic_id = int(n_obj['count']) + 1
                else:
                    topic_id = 1
                if Topic.add(topic_id, t_obj):
                    topic_key = 't-%s-%s' % (str(nodeid), str(topic_id))
                    #node count +1
                    n_obj['count'] = str(topic_id)
                    Commomkvdb.save('n-' + str(nodeid), n_obj)

                    #member recent +key
                    #Member.add_key_rencent_topic(author, topic_key)
                    rt_obj = kv.get('topic-' + author)
                    if rt_obj:
                        olist = rt_obj.split(',')
                        if topic_key not in olist:
                            olist.insert(0, topic_key)
                            rt_obj = ','.join(olist[:MEMBER_RECENT_TOPIC])
                            kv.set('topic-' + author, rt_obj)
                    else:
                        rt_obj = topic_key
                        kv.set('topic-' + author, topic_key)

                    #recent in home +key
                    Commomkvdb.add_key_rencent_topic('recent-topic-home',
                                                     topic_key)
                    #all topic counter +1
                    Count.key_incr('all-topic-num')
                    #hot node
                    tqueue = TaskQueue('default')
                    tqueue.add(
                        Task('/task/hotnode/%s/%s' %
                             ('n-' + str(nodeid), str(topic_id)),
                             delay=5))
                    #notifications
                    mentions = findall_mentions(t_obj['content'], author)
                    if mentions:
                        tqueue.add(
                            Task('/task/mentions/' + topic_key,
                                 'member=' + ','.join(mentions),
                                 delay=8))

                    #set for check spam
                    #u_topic_time = kv.get('u_topic_time:'+author)
                    if u_topic_time:
                        tlist = u_topic_time.split(',')
                        if str(int_time) not in tlist:
                            tlist.insert(0, str(int_time))
                            u_topic_time = ','.join(
                                tlist[:MEMBER_RECENT_TOPIC])
                            kv.set('u_topic_time:' + author, u_topic_time)
                    else:
                        u_topic_time = str(int_time)
                        kv.set('u_topic_time:' + author, u_topic_time)

                    ##set new sr_code
                    cur_user = self.cur_user()
                    code_list = [cur_user['code'], u_topic_time]
                    u_comment_time = kv.get('u_comment_time:' + author)
                    if u_comment_time:
                        code_list.append(u_comment_time)
                    self.set_cookie('usercode',
                                    md5(''.join(code_list)).hexdigest(),
                                    path="/",
                                    expires_days=365)

                    #del cache
                    clear_cache_multi([
                        'get_topic_by_keys:recent-topic-home',
                        'get_topic_by_keys:topic-' + author,
                        'get_comment_topic_by_keys:recent-topic-home',
                        'get_comment_topic_by_keys:recent-comment-topic-home',
                        'cur_user:'******'/' + topic_key)
                    return
                else:
                    errors.append("服务器出现错误,请稍后再试")
            else:
                t_obj['title'] = title
                t_obj['content'] = content
                errors.append(u"注意标题和内容的最大字数:%s %d" %
                              (len(title), len(content)))
        else:
            errors.append("标题和内容必填")
        self.echo('newpost.html', {
            'title': "发新帖子",
            'errors': errors,
            'n_obj': n_obj,
            't_obj': t_obj,
        },
                  layout='_layout.html')
Пример #17
0
    def post(self, nodeid, topicid):
        author = str(self.get_cookie('username', ''))
        content = self.get_argument('content', '')

        if author and content and len(content) <= COMMENT_MAX_S:
            int_time = int(time())
            #check spam
            u_comment_time = kv.get('u_comment_time:' + author)
            if u_comment_time:
                tlist = u_comment_time.split(',')
                if len(tlist) == MEMBER_RECENT_TOPIC and (
                        int_time - int(tlist[-1])) < 3600:
                    self.write(u'403:不要回复太频繁了 <a href="/t-%s-%s">请返回</a>' %
                               (nodeid, topicid))
                    return

            #check repeat
            content = textilize(content)
            #content = safe_encode(content)
            con_md5 = md5(content.encode('utf-8')).hexdigest()
            if mc.get('c_' + con_md5):
                self.write(u'403:请勿灌水 <a href="/t-%s-%s">请返回</a>' %
                           (nodeid, topicid))
                return
            else:
                mc.set('c_' + con_md5, '1', 36000)

            ##
            t_key = 't-%s-%s' % (str(nodeid), str(topicid))
            t_obj = Topic.get_by_key(t_key)

            if t_obj['cnum']:
                id_num = int(t_obj['cnum']) + 1
            else:
                id_num = 1

            c_key = 't-%s-%s-%d' % (str(nodeid), str(topicid), id_num)
            c_obj = COMMENT_DICT.copy()
            c_obj['author'] = author
            c_obj['add'] = int_time
            c_obj['content'] = content

            if Commomkvdb.save(c_key, c_obj):
                #topic commont count +1
                t_obj['cnum'] = id_num
                t_obj['reply'] = author
                t_obj['edit'] = int_time
                Commomkvdb.save(t_key, t_obj)

                #member recent +key
                #Member.add_key_rencent_comment_topic(author, t_key)
                rt_obj = kv.get('comment-topic-' + author)
                if rt_obj:
                    olist = rt_obj.split(',')
                    if t_key in olist:
                        olist.remove(t_key)
                    olist.insert(0, t_key)
                    kv.set('comment-topic-' + author,
                           ','.join(olist[:MEMBER_RECENT_TOPIC]))
                else:
                    kv.set('comment-topic-' + author, t_key)

                #recent comment in home +key
                Commomkvdb.add_key_rencent_topic('recent-comment-topic-home',
                                                 t_key)
                #all comment counter +1
                Count.key_incr('all-comment-num')
                #notifications
                if t_obj['author'] != author:
                    mentions = findall_mentions(
                        c_obj['content'] + ' @%s ' % t_obj['author'], author)
                else:
                    mentions = findall_mentions(c_obj['content'], author)
                if mentions:
                    tqueue = TaskQueue('default')
                    tqueue.add(
                        Task('/task/mentions/' + t_key,
                             'member=' + ','.join(mentions),
                             delay=5))

                #set for check spam
                #u_comment_time = kv.get('u_comment_time:'+author)
                if u_comment_time:
                    tlist = u_comment_time.split(',')
                    if str(int_time) not in tlist:
                        tlist.insert(0, str(int_time))
                        u_comment_time = ','.join(tlist[:MEMBER_RECENT_TOPIC])
                        kv.set('u_comment_time:' + author, u_comment_time)
                else:
                    u_comment_time = str(int_time)
                    kv.set('u_comment_time:' + author, u_comment_time)

                ##set new sr_code
                cur_user = self.cur_user()
                code_list = [cur_user['code']]
                u_topic_time = kv.get('u_topic_time:' + author)
                if u_topic_time:
                    code_list.append(u_topic_time)
                if u_comment_time:
                    code_list.append(u_comment_time)
                self.set_cookie('usercode',
                                md5(''.join(code_list)).hexdigest(),
                                path="/",
                                expires_days=365)

                #del cache
                cachekeys = [
                    'get_topic_by_keys:recent-comment-topic-home',
                    'get_topic_by_keys:comment-topic-' + author,
                    'get_comment_topic_by_keys:recent-topic-home',
                    'get_comment_topic_by_keys:recent-comment-topic-home',
                    'cur_user:'******'recent-topic-home')
                if tks and t_key in tks.split(','):
                    cachekeys.append('get_topic_by_keys:recent-topic-home')
                if id_num < EACH_PAGE_COMMENT_NUM:
                    cachekeys.append('get_comments:%s:1' % t_key)
                else:
                    cachekeys.append('get_comments:%s:%d' % (t_key, [
                        i for i in range(1, id_num, EACH_PAGE_COMMENT_NUM)
                    ][-1]))
                clear_cache_multi(cachekeys)

                self.redirect('/' + t_key)
                return
        else:
            self.set_status(403)
            self.write('错误: 403 (请返回填写内容 或 内容太长了)')
Пример #18
0
def get_queue(queue_name):
    """ 获取队列
    """
    if queue_name not in CachedQueue:
        CachedQueue[queue_name] = TaskQueue(queue_name)
    return CachedQueue[queue_name]