Ejemplo n.º 1
0
    def post(self, *ar, **kw):
        time = self.request.get('time')

        today = datetime.date.today()
        if not time or datetime.date.fromtimestamp(float(time)) < today:
            logging.info(
                "Ignoring deprecated task UpdateRanksHandler:post(time = " +
                time + ")")
            return

        last_key = self.request.get('last_key')
        if not last_key:
            query = db.GqlQuery(
                'SELECT __key__ FROM KeywordSearchEngine ORDER BY __key__')
        else:
            query = db.GqlQuery(
                'SELECT __key__ FROM KeywordSearchEngine WHERE __key__ > :last_key ORDER BY __key__',
                last_key=db.Key(last_key))
        entities = query.fetch(100)
        if entities:
            default_queue = taskqueue.Queue("default")
            se_calls_queue = taskqueue.Queue("search-engine-calls")
            for key in entities:
                task = taskqueue.Task(url='/tasks/update_keyword_se_rank',
                                      params={'key': key})
                se_calls_queue.add(task)
                last_key = key
            task = taskqueue.Task(url='/tasks/update_ranks',
                                  params={
                                      'time': time,
                                      'last_key': last_key
                                  })
            default_queue.add(task)
Ejemplo n.º 2
0
 def get(self):
     totalPart = memcache.get('sitemap.xml_total_part')
     
     if (totalPart is None):
         
         pQueue = taskqueue.Queue(name = 'CreateSitemap')
         taskurl = 'http://' + self.request.host_url
         pTask = taskqueue.Task(url='/sitemap.xml/Create', params=dict(url=taskurl))
     
         pQueue.add(pTask)
         logging.info('Task queue started!')
         
         xml = ''
         
     else:
         partNo = 1
         xml = self.xmlHeader()
         
         while(partNo <= totalPart):
             key = 'sitemap.xml_part' + str (partNo)
             partNo += 1
             
             partBody = memcache.get(key)
             
             if (partBody is None):
                 partBody = ''
             xml += partBody
             
         xml += "</urlset>\n"
     
     self.response.headers['Content-Type'] = 'application/xml'
     self.response.out.write(xml)
Ejemplo n.º 3
0
	def post(self, API_VERSION, ACCOUNT_SID, *args):
		format = response.response_format(self.request.path.split('/')[-1])
		if parameters.required(['From','To','Body'],self.request):
			Message = messages.Message.new(
										To = self.request.get('To'),
										From = self.request.get('From'),
										Body = self.request.get('Body'),
										AccountSid = ACCOUNT_SID,
										Direction = 'outbound-api',
										Status = 'queued'
									)
			if self.request.get('StatusCallback',None) is not None:
				Message.StatusCallback = self.request.get('StatusCallback')
			response_data = Message.get_dict()
			self.response.out.write(response.format_response(response.add_nodes(self,response_data,format),format))
			Message.put()
			#DO SOME THINGS DEPENDING ON ACCOUNT SETTINGS
			#DEFAULT WILL BE TO SEND MESSAGE, CHARGE FOR IT AND UPDATE WHEN SENT
			Message.send()
			#make sure put happens before callback happens
			if Message.StatusCallback is not None:
				taskqueue.Queue('StatusCallbacks').add(taskqueue.Task(url='/Callbacks/SMS', params = {'SmsSid':Message.Sid}))
		else:
			#This should either specify a twilio code either 21603 or 21604
			self.response.out.write(response.format_response(errors.rest_error_response(400,"Missing Parameters",format),format))
 def saveBookmark(self):
   bm = ''.join(["saveBookmarks_" , str(time.time())])
   memcache.add(bm, self.tempList, 120)
   self.tempList = []
   queue = taskqueue.Queue("bookmark")
   queue.add(taskqueue.Task(url = "/bookmark/q/put", params = {'key': bm}))
   pass
Ejemplo n.º 5
0
 def post(self, *ar, **kw):
     key = self.request.get('keyword_se_key')
     batch_size = 10
     query = models.KeywordRankLog.all(keys_only=True).filter(
         'keyword_se = ', db.Key(key))
     count = query.count()
     db.delete(query.fetch(batch_size))
     if count > batch_size:
         queue = taskqueue.Queue("default")
         task = taskqueue.Task(url='/tasks/delete/keyword_rank_logs',
                               params={'keyword_se_key': key})
         queue.add(task)
Ejemplo n.º 6
0
    def post(self):
        memcache.flush_all()
        logging.info('Memcache has all flushed! ')
        db.delete(DBCache().all())

        if (DBCache().all().count() > 1):
            pQueue = taskqueue.Queue(name='DeleteDBCache')
            taskurl = 'http://' + self.request.host_url
            pTask = taskqueue.Task(url='/cacheflush', params=dict(url=taskurl))

            pQueue.add(pTask)
        else:
            logging.info('DBcache has all flushed! ')
Ejemplo n.º 7
0
    def buildXml(self, offSet):

        count = self.buildElement(offSet)
        
        partNo =  memcache.get('sitemap.xml_total_part')
        if (partNo is None):
            partNo = 0
        partNo += 1
        memcache.set('sitemap.xml_total_part', partNo, self.sitemapCacheTime)
        
        logging.info('Start caching part ' + str(partNo))
        
        finishedCount = offSet + count

        key = 'sitemap.xml_part' + str (partNo)
        
        memcache.set(key, self.XMLBody, self.sitemapCacheTime)
        memcache.set('tmp_sitemap_finished_count', finishedCount, 60*5)


        if (count == self.PreLoad):
            
            pQueue = taskqueue.Queue(name = 'CreateSitemap')
            taskurl = 'http://' + self.request.host_url
            pTask = taskqueue.Task(url='/sitemap.xml/Create', params=dict(url=taskurl))
        
            pQueue.add(pTask)
            logging.info('Task queue added!')
            
        else:
            pQueue = taskqueue.Queue(name = 'CreateSitemap')
            pQueue.purge()
            
            memcache.delete('tmp_sitemap_maxload')
            memcache.delete('tmp_sitemap_finished_count')
            
            logging.info('Purged all temp values.')
            
        return count
Ejemplo n.º 8
0
 def post(self, *ar, **kw):
     if not config.data_retention_days:
         return
     batch_size = 10
     retention_delta = datetime.timedelta(days=config.data_retention_days)
     date = datetime.datetime.today() - retention_delta
     query = models.KeywordRankLog.all(keys_only=True).filter(
         'date < ', date)
     count = query.count()
     db.delete(query.fetch(batch_size))
     if count > batch_size:
         queue = taskqueue.Queue("default")
         task = taskqueue.Task(url='/tasks/delete/old_logs')
         queue.add(task)
Ejemplo n.º 9
0
 def post(self, *ar, **kw):
     key = self.request.get('account_key')
     batch_size = 10
     query = models.Site.all(keys_only=True).filter('account = ',
                                                    db.Key(key))
     count = query.count()
     site_keys = query.fetch(batch_size)
     for site_key in site_keys:
         models.Site.cascade_delete(site_key)
     db.delete(site_keys)
     if count > batch_size:
         queue = taskqueue.Queue("default")
         task = taskqueue.Task(url='/tasks/delete/sites',
                               params={'account_key': key})
         queue.add(task)
Ejemplo n.º 10
0
 def post(self, *ar, **kw):
     key = self.request.get('keyword_key')
     batch_size = 10
     query = models.KeywordSearchEngine.all(keys_only=True).filter(
         'keyword = ', db.Key(key))
     count = query.count()
     kw_se_keys = query.fetch(batch_size)
     for kw_se_key in kw_se_keys:
         models.KeywordSearchEngine.cascade_delete(kw_se_key)
     db.delete(kw_se_keys)
     if count > batch_size:
         queue = taskqueue.Queue("default")
         task = taskqueue.Task(url='/task/delete/keyword_search_engines',
                               params={'keyword_key': key})
         queue.add(task)
Ejemplo n.º 11
0
	def disconnect(self,StatusCallback = None,StatusCallbackMethod = 'POST'):
		self.Status = 'complete'
		self.EndTime = datetime.datetime.now()
		self.Duration = (self.EndTime - self.StartTime).seconds
		if self.Direction == 'outgoing-api' or self.Direction == 'outbound-dial':
			#should be dependent on country code, but will need more work
			self.Price = self.Duration * (0.02)
		elif self.Direction == 'inbound':
			self.Price = self.Duration * (0.01)
		self.put()
		if StatusCallback is not None:
			try:
				taskqueue.Queue('StatusCallbacks').add(taskqueue.Task(url='/Callbacks/Call', params = {'CallSid':self.Sid,'StatusCallback':StatusCallback,'StatusCallbackMethod':StatusCallbackMethod}))
			except Exception, e:
				pass
def do_for_candidacy(candidacy, seat):
    global global_email_delta

    assert seat.frozen_local_issues

    if not candidacy.candidate.validated_email():
        log("Not queueing, invalid email " + str(candidacy.candidate.email) + " for candidacy " + seat.name + ", " + candidacy.candidate.name)
    else:
        global_email_delta += 2 # two seconds between sending each mail, try to keep within GAE limits
        if options.real:
            log(str(global_email_delta) + " queued invite for candidacy " + seat.name + ", " + candidacy.candidate.name + " email: " + candidacy.candidate.email)
            eta = datetime.datetime.utcnow() + datetime.timedelta(seconds=global_email_delta) # AppEngine servers use UTC
            taskqueue.Queue('survey-email').add(taskqueue.Task(url='/task/invite_candidacy_survey/' + str(candidacy.key().name()), eta = eta))
            candidacy.log("Queued task to send survey invite email")
        else:
            log(str(global_email_delta) + " would queue invite for candidacy " + seat.name + ", " + candidacy.candidate.name+ " email: " + candidacy.candidate.email)
Ejemplo n.º 13
0
    def post(self, *ar, **kw):
        if not config.max_inactive_account_days:
            return
        batch_size = 10
        inactivity_delta = datetime.timedelta(
            days=config.max_inactive_account_days)
        date = datetime.datetime.today() - inactivity_delta

        query = models.Account.all(keys_only=True).filter(
            'last_login < ', date)
        count = query.count()
        account_keys = query.fetch(batch_size)
        for account_key in account_keys:
            models.Account.cascade_delete(account_key)
        db.delete(account_keys)
        if count > batch_size:
            queue = taskqueue.Queue("default")
            task = taskqueue.Task(url='/task/delete/inactive_accounts')
            queue.add(task)
Ejemplo n.º 14
0
def add_task(queue_name='default',payload=None,**kwargs):
  # A payload can also be sent
  if kwargs.get('params', None):
    for json_type in ['kwargs', 'entities']: #
       if kwargs['params'].get(json_type, None):
         from django.utils import simplejson
         kwargs['params'][json_type] = simplejson.dumps(
         kwargs['params'][json_type])
       
  from google.appengine.api.labs import taskqueue
  queue = taskqueue.Queue(name=queue_name)
  try:
    task = taskqueue.Task(payload=payload, **kwargs)
    logging.info('adding task: %s' % kwargs)
    queue.add(task)
  # TODO: Why don't these exceptions work? 
  except (taskqueue.TaskAlreadyExistsError, taskqueue.TombstonedTaskError):
    logging.warning('unable to create task with name %s' %
    kwargs.get('name','(no name provided)'), exc_info=True)
Ejemplo n.º 15
0
    def post(self, page):
        try:

            queue = taskqueue.Queue("import")
            wpfile = page.param('wpfile')
            #global imt
            imt = import_wordpress(wpfile)
            imt.parse()
            #OptionSet.setValue('wpimport_data',imt)

            memcache.set("imt", imt)
            queue.add(taskqueue.Task(url="/admin/wp_import"))
            return self.render_content("wpimport.html", {'postback': True})

        except Exception, e:

            return self.error(
                "Import Error:<p  style='color:red;font-size:11px;font-weight:normal'>%s</p>"
                % e.message)
Ejemplo n.º 16
0
def task_average_response_by_party(request, party_key_name, refined_issue_key_name):
    party = Party.get_by_key_name(party_key_name)
    refined_issue = RefinedIssue.get_by_key_name(refined_issue_key_name)

    arbp = db.Query(AverageResponseByParty).filter('party =', party).filter('refined_issue =', refined_issue).get()
    if not arbp:
        arbp = AverageResponseByParty(party = party, refined_issue = refined_issue,
                average_agreement = None, processing_running_total = 0, 
                processing_running_count = 0, processing_next_key = None)

    chunk = db.Query(Candidacy).filter('deleted = ', False).filter('survey_filled_in =', True)

    # carry on calculation where we left off
    if arbp.processing_last_candidacy == None:
        assert arbp.processing_running_total == 0
        assert arbp.processing_running_count == 0
    else:
        chunk = chunk.filter('__key__ >', arbp.processing_last_candidacy.key())

    # do 100 candidacies at a time, as too slow otherwise
    chunk.fetch(10) # XXX

    candidacy = None
    for candidacy in chunk:
        survey_response = db.Query(SurveyResponse).filter('candidacy =', candidacy).filter('refined_issue =', refined_issue).get()
        if survey_response:
            arbp.processing_running_total += survey_response.agreement
            arbp.processing_running_count += 1
    arbp.processing_last_candidacy = candidacy

    # if we've finished, work out average
    if candidacy == None:
        arbp.average_agreement = float(arbp.processing_running_total) / float(arbp.processing_running_count)

    arbp.put()

    # calculate next chunk
    if candidacy == None:
        return HttpResponse("Calculation complete for " + party.name + " question: " + refined_issue.question)
    else:
        taskqueue.Queue('average-calc').add(taskqueue.Task(url='/task/average_response_by_party/' + party_key_name + "/" + refined_issue_key_name))
        return HttpResponse("Done " + str(arbp.processing_running_count) + ", queued next chunk for " + party.name + " question: " + question)
Ejemplo n.º 17
0
    def buildTask(self, offSet):

        count = self.buildElement(offSet)

        finishedCount = offSet + count
        CacheTempData().save('tmp_sitemap_finished_count', finishedCount)

        pQueue = taskqueue.Queue(name='CreateSitemap')

        if (count == self.PreLoad):
            taskurl = 'http://' + self.request.host_url
            pTask = taskqueue.Task(url='/sitemap.xml/Create',
                                   params=dict(url=taskurl))

            pQueue.add(pTask)
            logging.info('Task queue added!')

        else:
            CacheXMLSiteMap().finish()
            pQueue.purge()

            logging.info('Purged all temp values.')
Ejemplo n.º 18
0
    def get(self):
        totalPart = CacheXMLSiteMap().getCount()

        if (totalPart is None):

            pQueue = taskqueue.Queue(name='CreateSitemap')
            taskurl = 'http://' + self.request.host_url
            pTask = taskqueue.Task(url='/sitemap.xml/Create',
                                   params=dict(url=taskurl))

            pQueue.add(pTask)
            logging.info('Task queue started!')

            xml = ''

        else:
            xml = self.xmlHeader()
            xml += CacheXMLSiteMap().load()
            print x
            xml += "</urlset>\n"

        self.response.headers['Content-Type'] = 'application/xml'
        self.response.out.write(xml)
Ejemplo n.º 19
0
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import random
from wsgiref.handlers import CGIHandler
from google.appengine.api.labs import taskqueue
from google.appengine.api import memcache
from google.appengine.api.capabilities import CapabilitySet
memcache_service = CapabilitySet('memcache', methods=['set','get'])
hot_handler_queue = taskqueue.Queue(name='hothandler')
HOT_HANDLER_PREFIX = '/_ah/queue/hothandler/'
def wsgi_app(env, res):
    """ visit '/_ah/queue/hothandler/start' as admin to start a task """
    token = env['PATH_INFO'].replace(HOT_HANDLER_PREFIX,'')
    cur_token = memcache.get(HOT_HANDLER_PREFIX)
    if cur_token is None:
        if not memcache_service.is_enabled():
            cur_token = token
    if token in [cur_token, 'start']:
        next_token = str(random.random())
        url = '%s%s'%(HOT_HANDLER_PREFIX, next_token)
        next_task = taskqueue.Task(countdown=10, url=url)
        hot_handler_queue.add(next_task)
        memcache.set(HOT_HANDLER_PREFIX, next_token)
    res('200 OK',[('Content-Type','text/plain')])
Ejemplo n.º 20
0
 def fetch_old(self):
   logging.info("fetch old buzz for %s" % self.username)
   fetch_queue = taskqueue.Queue("fetch-old")
   task = taskqueue.Task(url='/fetchold',params=dict(topic=self.topic)) #self.buzztopic
   fetch_queue.add(task)
Ejemplo n.º 21
0
def startTask(url, queue_name='default', context=None, **kwargs):
  """Adds a new task to one of the queues.
  """

  queue = taskqueue.Queue(name=queue_name)
  return queue.add(taskqueue.Task(url=url, params=context))
Ejemplo n.º 22
0
 def get(self, *ar, **kw):
     default_queue = taskqueue.Queue("default")
     task = taskqueue.Task(url='/tasks/update_ranks',
                           params={'time': time.time()})
     default_queue.add(task)
Ejemplo n.º 23
0
	def post(self):
		if not self.is_login:
			self.redirect(users.create_login_url(self.request.uri))


		try:
				#global imt
				imt=memcache.get("imt")
				#imt=OptionSet.getValue('wpimport_data')
				import_data=imt.pop()
				#if tdata=='men':
				memcache.set('imt',imt)
				#else:
				#	OptionSet.setValue('wpimport_data',imt)
				try:
					cmtimport=memcache.get("cmtimport")
				except:
					cmtimport=False

				if import_data:
					try:
						if import_data[0]=='cat':

							_cat=import_data[1]
							nicename=_cat['slug']
							cat=Category.get_by_key_name(nicename)
							if not cat:
								cat=Category(key_name=nicename)
							cat.name=_cat['name']
							cat.slug=nicename
							cat.put()
						elif import_data[0]=='entry':
							_entry=import_data[1]
							logging.debug('importing:'+_entry['title'])
							hashkey=str(hash(_entry['title']))
							entry=Entry.get_by_key_name(hashkey)
							if not entry:
								entry=Entry(key_name=hashkey)

							entry.title=_entry['title']
							entry.author=self.login_user
							entry.is_wp=True
						   #entry.date=datetime.strptime( _entry['pubDate'],"%a, %d %b %Y %H:%M:%S +0000")
							try:
								entry.date=datetime.strptime( _entry['pubDate'][:-6],"%a, %d %b %Y %H:%M:%S")
							except:
								try:
									entry.date=datetime.strptime( _entry['pubDate'][0:19],"%Y-%m-%d %H:%M:%S")
								except:
									entry.date=datetime.now()
							entry.entrytype=_entry['post_type']
							entry.content=_entry['content']

							entry.excerpt=_entry['excerpt']
							entry.post_id=_entry['post_id']
							entry.slug=urldecode(_entry['post_name'])
							entry.entry_parent=_entry['post_parent']
							entry.menu_order=_entry['menu_order']

							for cat in _entry['categories']:
								c=Category.get_by_key_name(cat['slug'])
								if c:
									entry.categorie_keys.append(c.key())
							entry.settags(','.join(_entry['tags']))
				##				for tag in _entry['tags']:
				##					entry.tags.append(tag)
							if _entry['published']:
								entry.save(True)
							else:
								entry.save()
							if cmtimport:
								for com in _entry['comments']:
										try:
											date=datetime.strptime(com['date'][0:19],"%Y-%m-%d %H:%M:%S")
										except:
											date=datetime.now()
										comment=Comment(author=com['author'],
														content=com['content'],
														entry=entry,
														date=date
														)
										try:
											comment.email=com['email']
											comment.weburl=com['weburl']
										except:
											pass
										try:
											if len(com['ip'])>4:
												comment.ip=com['ip']
										except:
											pass
										comment.store()
					finally:
						queue=taskqueue.Queue("import")
						queue.add(taskqueue.Task( url="/admin/wp_import"))
		except Exception,e :
			logging.info("import error: %s"%e.message)
Ejemplo n.º 24
0
 def get(self, *ar, **kw):
     if not config.max_inactive_account_days:
         return
     default_queue = taskqueue.Queue("default")
     task = taskqueue.Task(url='/tasks/delete/inactive_accounts')
     default_queue.add(task)
Ejemplo n.º 25
0
    def get(self):
        outString=""
        postKey=self.request.get("key")
        postPdf=self.request.get("pdf")
        postClean=self.request.get("clean")
        postFront=self.request.get("front")
        outTitle=''
        if postKey!=userKey:
            outString='It works'
            self.response.out.write(outString)
            return
        else:
            postUrl=self.request.get("url")
            if sendMail==True:
                ddqueue = taskqueue.Queue('default')
                ddqueue.add(taskqueue.Task(url='/mainTask', 
                            params={'key': postKey,'url': postUrl,'pdf': postPdf,'clean': postClean},
                            retry_options=taskqueue.TaskRetryOptions(task_retry_limit=3)))
            if postFront=='0':
                outString='It works'
                self.response.out.write(outString)
                return
            try:
                result= urlfetch.fetch(postUrl)
                self.response.out.write(outString)
            except:
                result=0
            if result!=0:
                if result.status_code== 200:
                    ###this line solve the <!-- tag problem of some webpage(like sina blog) 
                    tmps=result.content.replace('\xe2\x80\x93','--')
                    tdc=chardet.detect(tmps)
                    if tdc.get('confidence')>=0.6:
                        htmlCode=tmps.decode(tdc.get('encoding'),'ignore')
                    else:
                        try:
                            htmlCode=tmps.decode('utf-8')
                        except:
                            try:
                                htmlCode=tmps.decode('gbk')
                            except:
                                try:
                                    htmlCode=tmps.decode('gb2312')
                                except:
                                    htmlCode=tmps.decode(chardet.detect(tmps).get('encoding'),'ignore')

                else:
                    htmlCode=""
                if htmlCode!="":
                    readData = Readability(htmlCode, postUrl)
                    outString=readData.content
                    outTitle=readData.title
                else:
                    outString=''
                    outTitle=''
                if postClean=='0':
                    outString=htmlCode
            else:
                outString=''
                outTitle=''
        if postPdf and postPdf!='0':
            self.response.headers['Content-Type'] = 'application/pdf'
            if postClean!='0':
                outString=u'''<style type="text/css">
                @font-face { 
                font-family: "'''+fontName+'''"; 
                src: url("fonts/'''+fontFile+'''") 
                }
                html { 
                font-family: '''+fontName+'''; 
                } 
</style>'''+u'<html><head><title>'+outTitle+u'</title></head><body>'+u'<h1>'+outTitle+u'</h1>'+outString+u'</body></html>'
            else:
                outString=outString=u'''<style type="text/css">
                @font-face { 
                font-family: "'''+fontName+'''"; 
                src: url("fonts/'''+fontFile+'''") 
                }
                html { 
                font-family: '''+fontName+'''; 
                } 
                </style>'''+outString
            rawData=StringIO(outString.encode('utf-8'))
            output=StringIO()
            pisa.log.setLevel('WARNING') #suppress debug log output
            pdf = pisa.CreatePDF(
            rawData,
            output,
            encoding='utf-8',
            )
            pdfData=pdf.dest.getvalue()        
            self.response.out.write(pdfData)
        else:
            self.response.headers['Content-Type'] = 'text/html'
            if postClean!='0':
                outString=u'<html><head><title>'+outTitle+u'</title></head><body>'+u'<h1>'+outTitle+u'</h1>'+outString+u'</body></html>'
            self.response.out.write(outString)
Ejemplo n.º 26
0
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
import datetime
import feedparser
import fetchpage
import hashlib
import logging
import model
import random
import sys
import time
import urllib
import urlparse
import zrqutil

default_feed_queue = taskqueue.Queue("feed")
add_feed_queue = taskqueue.Queue("add-feed")
HUB_URL = 'http://pubsubhubbub.appspot.com/'
PIPE_OUTPUT_FEED_XML_MEMCACHE_KEY = 'PIPE_OUTPUT_FEED_XML'
INPUT_FEED_XML_MEMCACHE_KEY = 'INPUT_FEED_XML'
PIPE_MEMCACHE_KEY = 'PIPE'


class PublishHandler(BaseRequestHandler):
    def get(self, slug):
        if slug and slug[0] == '/':
            slug = slug[1:]
        is_remove = self.request.get('is_remove')
        if slug:
            if is_remove == 1:
                feed_xml_memcache_key = PIPE_OUTPUT_FEED_XML_MEMCACHE_KEY + '_' + slug
Ejemplo n.º 27
0
from google.appengine.api.labs import taskqueue
from google.appengine.ext import webapp, db
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
from model import FeedEntry
import datetime
import feedparser
import hashlib
import logging
import model
import random
import time
import urllib
import zzzutil

feed_queue = taskqueue.Queue("feed")


class PublishHandler(BaseRequestHandler):
    def get(self, slug):
        if slug and slug[0] == '/':
            slug = slug[1:]
        if slug:
            publish_url = self.request.host_url + "/pipes/" + slug
            post_params = {
                'hub.mode': 'publish',
                'hub.url': publish_url,
            }
            payload = urllib.urlencode(post_params)
            hub_url = 'http://pubsubhubbub.appspot.com/'
            try:
Ejemplo n.º 28
0
 def get(self, *ar, **kw):
     if not config.data_retention_days:
         return
     default_queue = taskqueue.Queue("default")
     task = taskqueue.Task(url='/tasks/delete/old_logs')
     default_queue.add(task)
Ejemplo n.º 29
0
def dispatchBookEvent(book, reader, url, queue_name):

	task = taskqueue.Task(url=url, params={'book' : book.identifier(), 'reader' : reader.identifier()})
	queue = taskqueue.Queue(queue_name)
	
	queue.add(task)
Ejemplo n.º 30
0
def add(url, params, queue='default'):
    q = taskqueue.Queue(queue)
    t = taskqueue.Task(url=url, method=POST, params=params)
    q.add(t)