def add_task(user_dict, url, user): ''' add sae queue task ,fetch next page ''' url = url + '?&from=%s' % user queue = TaskQueue('fetch_next') queue.add(Task(url)) return None
def cron_sync_click(): authorization = ctx.request.header('Authorization').split(' ')[1] print "@@@@@@get basic: %s \n" % authorization answer = base64.b64encode('jblog:jblog').decode('utf-8') if authorization != answer: raise unauthorized() counter_queue.add(Task('/tasks/sync-click', "time=1min"))
def send_mail(subject='', body='', recipients=None, reply_to='*****@*****.**'): queue.add(Task(url_for('util.send_mail'), json.dumps({ 'subject': subject, 'body': body, 'recipients': recipients, 'reply_to': reply_to, }) ))
def blog(id): blog = Blog.get(id) if not blog: raise notfound() blog.content = markdown2.markdown(blog.content, extras=["code-friendly"]) if 'SERVER_SOFTWARE' not in os.environ: blog.image = '/' + blog.image tags = get_tags_from_blog(blog) blog.tags = tags blog.click = kv.get(id) if not blog.click: kv.add(id, 0) blog.click = 0 counter_queue.add(Task('/tasks/counter', "blog_id=%s" % id)) tags = all_tags() return dict(blog=blog, user=ctx.request.user, tags=tags)
def post(self, nodeid='1'): n_obj = Node.get_by_key('n-' + str(nodeid)) if not n_obj: self.set_status(404) self.write('404') return errors = [] author = str(self.get_cookie('username', '')) title = self.get_argument('title', '') content = self.get_argument('content', '') t_obj = TOPIC_DICT.copy() if title and content: if len(title) <= TITLE_MAX_S and len(content) <= CONTENT_MAX_S: int_time = int(time()) #check spam u_topic_time = kv.get('u_topic_time:' + author) if u_topic_time: tlist = u_topic_time.split(',') if len(tlist) == MEMBER_RECENT_TOPIC and ( int_time - int(tlist[-1])) < 3600: self.write( u'403:不要发帖太频繁了 <a href="/newpost/%s">请返回</a>' % nodeid) return #check repeat content = textilize(content) #content = safe_encode(content) con_md5 = md5(content.encode('utf-8')).hexdigest() if mc.get('c_' + con_md5): self.write(u'403:请勿灌水 <a href="/newpost/%s">请返回</a>' % nodeid) return else: mc.set('c_' + con_md5, '1', 36000) t_obj['title'] = title t_obj['nodeid'] = str(nodeid) t_obj['nodename'] = n_obj['name'] t_obj['author'] = author t_obj['add'] = int_time t_obj['content'] = content if n_obj['count']: topic_id = int(n_obj['count']) + 1 else: topic_id = 1 if Topic.add(topic_id, t_obj): topic_key = 't-%s-%s' % (str(nodeid), str(topic_id)) #node count +1 n_obj['count'] = str(topic_id) Commomkvdb.save('n-' + str(nodeid), n_obj) #member recent +key #Member.add_key_rencent_topic(author, topic_key) rt_obj = kv.get('topic-' + author) if rt_obj: olist = rt_obj.split(',') if topic_key not in olist: olist.insert(0, topic_key) rt_obj = ','.join(olist[:MEMBER_RECENT_TOPIC]) kv.set('topic-' + author, rt_obj) else: rt_obj = topic_key kv.set('topic-' + author, topic_key) #recent in home +key Commomkvdb.add_key_rencent_topic('recent-topic-home', topic_key) #all topic counter +1 Count.key_incr('all-topic-num') #hot node tqueue = TaskQueue('default') tqueue.add( Task('/task/hotnode/%s/%s' % ('n-' + str(nodeid), str(topic_id)), delay=5)) #notifications mentions = findall_mentions(t_obj['content'], author) if mentions: tqueue.add( Task('/task/mentions/' + topic_key, 'member=' + ','.join(mentions), delay=8)) #set for check spam #u_topic_time = kv.get('u_topic_time:'+author) if u_topic_time: tlist = u_topic_time.split(',') if str(int_time) not in tlist: tlist.insert(0, str(int_time)) u_topic_time = ','.join( tlist[:MEMBER_RECENT_TOPIC]) kv.set('u_topic_time:' + author, u_topic_time) else: u_topic_time = str(int_time) kv.set('u_topic_time:' + author, u_topic_time) ##set new sr_code cur_user = self.cur_user() code_list = [cur_user['code'], u_topic_time] u_comment_time = kv.get('u_comment_time:' + author) if u_comment_time: code_list.append(u_comment_time) self.set_cookie('usercode', md5(''.join(code_list)).hexdigest(), path="/", expires_days=365) #del cache clear_cache_multi([ 'get_topic_by_keys:recent-topic-home', 'get_topic_by_keys:topic-' + author, 'get_comment_topic_by_keys:recent-topic-home', 'get_comment_topic_by_keys:recent-comment-topic-home', 'cur_user:'******'/' + topic_key) return else: errors.append("服务器出现错误,请稍后再试") else: t_obj['title'] = title t_obj['content'] = content errors.append(u"注意标题和内容的最大字数:%s %d" % (len(title), len(content))) else: errors.append("标题和内容必填") self.echo('newpost.html', { 'title': "发新帖子", 'errors': errors, 'n_obj': n_obj, 't_obj': t_obj, }, layout='_layout.html')
def post(self, nodeid, topicid): author = str(self.get_cookie('username', '')) content = self.get_argument('content', '') if author and content and len(content) <= COMMENT_MAX_S: int_time = int(time()) #check spam u_comment_time = kv.get('u_comment_time:' + author) if u_comment_time: tlist = u_comment_time.split(',') if len(tlist) == MEMBER_RECENT_TOPIC and ( int_time - int(tlist[-1])) < 3600: self.write(u'403:不要回复太频繁了 <a href="/t-%s-%s">请返回</a>' % (nodeid, topicid)) return #check repeat content = textilize(content) #content = safe_encode(content) con_md5 = md5(content.encode('utf-8')).hexdigest() if mc.get('c_' + con_md5): self.write(u'403:请勿灌水 <a href="/t-%s-%s">请返回</a>' % (nodeid, topicid)) return else: mc.set('c_' + con_md5, '1', 36000) ## t_key = 't-%s-%s' % (str(nodeid), str(topicid)) t_obj = Topic.get_by_key(t_key) if t_obj['cnum']: id_num = int(t_obj['cnum']) + 1 else: id_num = 1 c_key = 't-%s-%s-%d' % (str(nodeid), str(topicid), id_num) c_obj = COMMENT_DICT.copy() c_obj['author'] = author c_obj['add'] = int_time c_obj['content'] = content if Commomkvdb.save(c_key, c_obj): #topic commont count +1 t_obj['cnum'] = id_num t_obj['reply'] = author t_obj['edit'] = int_time Commomkvdb.save(t_key, t_obj) #member recent +key #Member.add_key_rencent_comment_topic(author, t_key) rt_obj = kv.get('comment-topic-' + author) if rt_obj: olist = rt_obj.split(',') if t_key in olist: olist.remove(t_key) olist.insert(0, t_key) kv.set('comment-topic-' + author, ','.join(olist[:MEMBER_RECENT_TOPIC])) else: kv.set('comment-topic-' + author, t_key) #recent comment in home +key Commomkvdb.add_key_rencent_topic('recent-comment-topic-home', t_key) #all comment counter +1 Count.key_incr('all-comment-num') #notifications if t_obj['author'] != author: mentions = findall_mentions( c_obj['content'] + ' @%s ' % t_obj['author'], author) else: mentions = findall_mentions(c_obj['content'], author) if mentions: tqueue = TaskQueue('default') tqueue.add( Task('/task/mentions/' + t_key, 'member=' + ','.join(mentions), delay=5)) #set for check spam #u_comment_time = kv.get('u_comment_time:'+author) if u_comment_time: tlist = u_comment_time.split(',') if str(int_time) not in tlist: tlist.insert(0, str(int_time)) u_comment_time = ','.join(tlist[:MEMBER_RECENT_TOPIC]) kv.set('u_comment_time:' + author, u_comment_time) else: u_comment_time = str(int_time) kv.set('u_comment_time:' + author, u_comment_time) ##set new sr_code cur_user = self.cur_user() code_list = [cur_user['code']] u_topic_time = kv.get('u_topic_time:' + author) if u_topic_time: code_list.append(u_topic_time) if u_comment_time: code_list.append(u_comment_time) self.set_cookie('usercode', md5(''.join(code_list)).hexdigest(), path="/", expires_days=365) #del cache cachekeys = [ 'get_topic_by_keys:recent-comment-topic-home', 'get_topic_by_keys:comment-topic-' + author, 'get_comment_topic_by_keys:recent-topic-home', 'get_comment_topic_by_keys:recent-comment-topic-home', 'cur_user:'******'recent-topic-home') if tks and t_key in tks.split(','): cachekeys.append('get_topic_by_keys:recent-topic-home') if id_num < EACH_PAGE_COMMENT_NUM: cachekeys.append('get_comments:%s:1' % t_key) else: cachekeys.append('get_comments:%s:%d' % (t_key, [ i for i in range(1, id_num, EACH_PAGE_COMMENT_NUM) ][-1])) clear_cache_multi(cachekeys) self.redirect('/' + t_key) return else: self.set_status(403) self.write('错误: 403 (请返回填写内容 或 内容太长了)')
def startQueue(group): from sae.taskqueue import TaskQueue, Task if group in GROUPS: for x in xrange(5): html = grabHTML(URL_START[group]) if html: print 'Get html from "%s" success!' % (URL_START[group]) break else: if x < 4: print 'Get html from "%s" fail, retrying: %d' % ( URL_START[group], x + 1) else: print 'Get html from "%s" fail!' % (URL_START[group]) return None page = etree.HTML(html) tq = TaskQueue('crawler') if group == 'nmb': res = page.xpath(XPATH_START[group]) paramlist = [] for r in res: para = {'u': r.find('h4/a').attrib['href'], 's': 'qiniu'} extra = r.find("p/span") if extra: para['e'] = ''.join(extra.itertext()) pass tq.add( Task('/crawler/info/%s/handler/?%s' % (group, urlencode(para)))) return '%d tasks added' % len(res) if group == 'ske': root = page.xpath("//div[@id='sectionMain']")[0] count = 0 para = {'s': 'qiniu'} for e in root.getchildren(): if e.tag == "span": para['e'] = e.find("h3").text.encode('utf-8') elif e.tag == "ul": for a in e.findall("li/dl/dd/h3/a"): para['u'] = a.attrib.get('href') tq.add( Task('/crawler/info/%s/handler/?%s' % (group, urlencode(para)))) count += 1 pass return '%d tasks added' % count else: res = page.xpath(XPATH_START[group]) if res: tl = [ Task('crawler/info/%s/handler/?%s' % (group, urlencode({ 'u': r, 's': 'qiniu' }))) for r in res ] tq.add(tl) return '%d tasks added' % len(res) else: return None pass
def as_task(self, **kwargs): key = self.get_key() self.save_to_mc(key) return Task(self.get_execute_uri(key), **kwargs)