def defer(obj, *args, **kwargs): """Defers a callable for execution later. The default deferred URL of /_ah/queue/deferred will be used unless an alternate URL is explicitly specified. If you want to use the default URL for a queue, specify _url=None. If you specify a different URL, you will need to install the handler on that URL (see the module docstring for details). Args: obj: The callable to execute. See module docstring for restrictions. _countdown, _eta, _name, _url, _queue: Passed through to the task queue - see the task queue documentation for details. args: Positional arguments to call the callable with. kwargs: Any other keyword arguments are passed through to the callable. """ taskargs = dict((x, kwargs.pop(("_%s" % x), None)) for x in ("countdown", "eta", "name")) taskargs["url"] = kwargs.pop("_url", _DEFAULT_URL) taskargs["headers"] = _TASKQUEUE_HEADERS queue = kwargs.pop("_queue", _DEFAULT_QUEUE) pickled = serialize(obj, *args, **kwargs) try: task = taskqueue.Task(payload=pickled, **taskargs) task.add(queue) except taskqueue.TaskTooLargeError: key = _DeferredTaskEntity(data=pickled).put() pickled = serialize(run_from_datastore, str(key)) task = taskqueue.Task(payload=pickled, **taskargs) task.add(queue)
def post(self, *ar, **kw): time = self.request.get('time') today = datetime.date.today() if not time or datetime.date.fromtimestamp(float(time)) < today: logging.info( "Ignoring deprecated task UpdateRanksHandler:post(time = " + time + ")") return last_key = self.request.get('last_key') if not last_key: query = db.GqlQuery( 'SELECT __key__ FROM KeywordSearchEngine ORDER BY __key__') else: query = db.GqlQuery( 'SELECT __key__ FROM KeywordSearchEngine WHERE __key__ > :last_key ORDER BY __key__', last_key=db.Key(last_key)) entities = query.fetch(100) if entities: default_queue = taskqueue.Queue("default") se_calls_queue = taskqueue.Queue("search-engine-calls") for key in entities: task = taskqueue.Task(url='/tasks/update_keyword_se_rank', params={'key': key}) se_calls_queue.add(task) last_key = key task = taskqueue.Task(url='/tasks/update_ranks', params={ 'time': time, 'last_key': last_key }) default_queue.add(task)
def join_command(self, msg): m = re.match(r'^#(?P<channel>' + Channel.CHANNEL_NAME_REGEX + ')$', msg.arg) if not m: msg.reply('* Bad /join syntax') return name = m.group('channel') if self.person.channel and (self.person.channel.name == name): msg.reply('* You\'re already in #%s!' % name) return # Leave the existing channel, and tell them about it. if self.person.channel: old = self.person.channel message = '%s has left %s' % (self.person, old) self.Broadcast(old, message, system=True) self.Log(old, message, system=True) self.person.channel = None taskqueue.Task(url='/task/update-channel-stats', params={'channel': old.name}).add('stats') channel = Channel.ChannelByName(name, create=True) if channel.num_members >= self._CHANNEL_SIZE_LIMIT: msg.reply('* Sorry, too many people (%d) already in %s' % (channel.num_members, channel)) return self.person.channel = channel self.person.put() msg.reply('* You have joined %s' % channel) message = '%s has joined %s' % (self.person, channel) self.Broadcast(channel, message, system=True) self.Log(channel, message, system=True) taskqueue.Task(url='/task/update-channel-stats', params={'channel': channel.name}).add('stats')
def get(self, slug): if slug and slug[0] == '/': slug = slug[1:] logging.debug('the slug is %s', slug) if slug == 'allpipes': pipes = model.Pipe.gql("WHERE auto_refresh = :1", True).fetch(200) logging.debug('fetch %d pipes', len(pipes)) for pipe in pipes: feed_queue.add( taskqueue.Task(url="/refreshfeed/" + pipe.pipe_web_address)) return ['ok'] pass if slug: pipe = model.Pipe.gql("WHERE pipe_web_address = :1", slug).get() if pipe and pipe.auto_refresh: feedUrl = pipe.feed_url logging.debug('start refresh feed, the feedUrl is ' + feedUrl) fetch_count = 0 while True: fetch_count += 1 if fetch_count > 10: break try: resp = urlfetch.fetch(feedUrl) break except Exception, e: logging.exception(e) feed_xml = resp.content logging.debug('we fetch %d times and the size of feed is %s', fetch_count, str(len(feed_xml))) entries = self.get_entries(feed_xml, pipe) logging.debug('we fetch %d entries', len(entries)) if len(entries) > 0: entries_map = { 'entries': entries, 'fetch_index': 0, 'put_index': 0 } random_str = ''.join( [chr(random.randint(97, 122)) for i in range(0, 16)]) random_key = slug + '_' + random_str memcache.set(random_key, entries_map, 120) feed_queue.add( taskqueue.Task(url='/dealFeedEntry/set', params={ 'key': random_key, 'pipe_web_address': pipe.pipe_web_address })) #db.put(entries) pass pass return ['ok']
def notify(self, event, emails, phones, week=False): count = 0 util.twit_event(event) for email in emails: taskqueue.Task(url='/notify', params={ 'event': event.key(), 'email': email.email, 'week': week }).add() count += 1 for phone in phones: taskqueue.Task(url='/notify', params={ 'event': event.key(), 'phone': phone.phone, 'week': week }).add() count += 1 return count
def reschedule(cls, base_path, mapreduce_spec, serial_id, queue_name=None): """Schedule new update status callback task. Args: base_path: mapreduce handlers url base path as string. mapreduce_spec: mapreduce specification as MapreduceSpec. serial_id: id of the invocation as int. queue_name: The queue to schedule this task on. Will use the current queue of execution if not supplied. """ task_name = ControllerCallbackHandler.get_task_name( mapreduce_spec, serial_id) task_params = ControllerCallbackHandler.controller_parameters( mapreduce_spec, serial_id) if not queue_name: queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME", "default") controller_callback_task = taskqueue.Task( url=base_path + "/controller_callback", name=task_name, params=task_params, countdown=_CONTROLLER_PERIOD_SEC) if not _run_task_hook(mapreduce_spec.get_hooks(), "enqueue_controller_task", controller_callback_task, queue_name): try: controller_callback_task.add(queue_name) except (taskqueue.TombstonedTaskError, taskqueue.TaskAlreadyExistsError), e: logging.warning("Task %r with params %r already exists. %s: %s", task_name, task_params, e.__class__, e)
def ScheduleCategoryUpdate(result_parent_key): """Add a task to update a category's statistics. The task is handled by base.admin.UpdateCategory which then calls UpdateCategory below. """ # Give the task a name to ensure only one task for each ResultParent. result_parent = ResultParent.get(result_parent_key) category = result_parent.category name = 'categoryupdate-%s' % str(result_parent_key).replace('_', '-under-') url = '/_ah/queue/update-category/%s/%s' % (category, result_parent_key) task = taskqueue.Task(url=url, name=name, params={ 'category': category, 'user_agent_key': result_parent.user_agent.key(), }) attempt = 0 while attempt < 3: try: task.add(queue_name='update-category') break except: attempt += 1 logging.info('Cannot add task(attempt %s): %s:%s' % (attempt, sys.exc_type, sys.exc_value))
def UpdateChannelStats(self): """Recompute num_members for a channel.""" channel = Channel.ChannelByName(self.request.get('channel'), create=False) if not channel: return # channel became empty? num_members = int(self.request.get('num_members', '0')) q = Person.all(keys_only=True).filter('channel =', channel).order('__key__') start_at = self.request.get('start_at') if start_at: q.filter('__key__ >', db.Key(start_at)) people = q.fetch(self._STATS_BATCH) if people: # More to go. num_members += len(people) params = { 'channel': channel.name, 'num_members': num_members, 'start_at': str(people[-1]), } taskqueue.Task(url='/task/update-channel-stats', params=params).add('stats') return # Finished channel.num_members = num_members channel.put() logging.debug('%s now has %d members.' % (channel, num_members))
def Rss(self): message = 'rss summary' params = { 'channel': 'rss', 'message': message, } taskqueue.Task(url='/task/broadcast', params=params).add('chats')
def Dummy(self): params = { 'channel': 'rss', 'message': 'Hi!', } taskqueue.Task(url='/task/broadcast', params=params).add('chats') print 'hi!'
def post(self, API_VERSION, ACCOUNT_SID, *args): format = response.response_format(self.request.path.split('/')[-1]) if parameters.required(['From','To','Body'],self.request): Message = messages.Message.new( To = self.request.get('To'), From = self.request.get('From'), Body = self.request.get('Body'), AccountSid = ACCOUNT_SID, Direction = 'outbound-api', Status = 'queued' ) if self.request.get('StatusCallback',None) is not None: Message.StatusCallback = self.request.get('StatusCallback') response_data = Message.get_dict() self.response.out.write(response.format_response(response.add_nodes(self,response_data,format),format)) Message.put() #DO SOME THINGS DEPENDING ON ACCOUNT SETTINGS #DEFAULT WILL BE TO SEND MESSAGE, CHARGE FOR IT AND UPDATE WHEN SENT Message.send() #make sure put happens before callback happens if Message.StatusCallback is not None: taskqueue.Queue('StatusCallbacks').add(taskqueue.Task(url='/Callbacks/SMS', params = {'SmsSid':Message.Sid})) else: #This should either specify a twilio code either 21603 or 21604 self.response.out.write(response.format_response(errors.rest_error_response(400,"Missing Parameters",format),format))
def post(self): entities = self._GetQuery().fetch(50) if len(entities) == 0: return db.delete(entities) task = taskqueue.Task(url=self._GetTaskUrl()) task.add(queue_name="cleanup")
def get(self): totalPart = memcache.get('sitemap.xml_total_part') if (totalPart is None): pQueue = taskqueue.Queue(name = 'CreateSitemap') taskurl = 'http://' + self.request.host_url pTask = taskqueue.Task(url='/sitemap.xml/Create', params=dict(url=taskurl)) pQueue.add(pTask) logging.info('Task queue started!') xml = '' else: partNo = 1 xml = self.xmlHeader() while(partNo <= totalPart): key = 'sitemap.xml_part' + str (partNo) partNo += 1 partBody = memcache.get(key) if (partBody is None): partBody = '' xml += partBody xml += "</urlset>\n" self.response.headers['Content-Type'] = 'application/xml' self.response.out.write(xml)
def get(self): # pylint: disable-msg=C6409 """Handle initial request and spawn a fetch worker.""" now = datetime.datetime.now(pytz.utc) use_force = bool(self.request.get('use_force', False)) is_cron = self.request.headers.get('X-Appengine-Cron') == 'true' logging.info('Headers %s', self.request.headers) user = users.get_current_user() try: offset = int(self.request.get('offset', default_value=0)) except ValueError: offset = 0 params = { 'offset': offset, 'utc_reset_day': now.weekday(), 'utc_reset_hour': now.hour, } if use_force: params['use_force'] = '1' if is_cron: params['is_cron'] = '1' if user: params['email'] = user.email() logging.info('Starting fetch chain with params %s', params) fetch_task = taskqueue.Task(url='/report/fetch', params=params) fetch_task.add(queue_name='snippet-fetch-queue') if not is_cron: return self.redirect('/?msg=Snippets+sent.')
def UpdateStatsCache(request): """Load rankers into memcache.""" category = request.REQUEST.get('category') browsers_str = request.REQUEST.get('browsers') is_uncached_update = request.REQUEST.get('is_uncached_update') if not category: logging.info('UpdateStatsCache: Must set category') return http.HttpResponseServerError('Must set "category".') logging.info('UpdateStatsCache: category=%s, browsers=%s', category, browsers_str) if not browsers_str: logging.info('UpdateStatsCache: Must set "browsers".') return http.HttpResponseServerError('Must set "browsers".') browsers = browsers_str.split(',') if is_uncached_update: num_checked_browsers = len(browsers) browsers = result_stats.CategoryStatsManager.FindUncachedStats( category, browsers) logging.debug('Uncached \'%s\' stats (count: %s out of %s): %s', category, len(browsers), num_checked_browsers, browsers) # Only process one browser in each task. if len(browsers) > 1: attempt = 0 while attempt < 3: try: taskqueue.Task(params={ 'category': category, 'browsers': ','.join(browsers[1:]), }).add(queue_name='update-stats-cache') break except: attempt += 1 result_stats.CategoryStatsManager.UpdateStatsCache(category, browsers[:1]) return http.HttpResponse('Success.')
def saveBookmark(self): bm = ''.join(["saveBookmarks_" , str(time.time())]) memcache.add(bm, self.tempList, 120) self.tempList = [] queue = taskqueue.Queue("bookmark") queue.add(taskqueue.Task(url = "/bookmark/q/put", params = {'key': bm})) pass
def post(self, slug): random_key = self.request.get('key') pipe_web_address = self.request.get('pipe_web_address') entries_map = memcache.get(random_key) if not entries_map: return entries = entries_map['entries'] fetch_index = entries_map['fetch_index'] put_index = entries_map['put_index'] while fetch_index < len(entries): try: entries[fetch_index].content = zzzutil.fetch_page( entries[fetch_index].link, '<!content_tag txt>', '<!/content_tag txt>') fetch_index += 1 except runtime.DeadlineExceededError, e: logging.exception(e) logging.debug('we have fetched %d contents', fetch_index) entries_map['fetch_index'] = fetch_index entries_map['entries'] = entries memcache.set(random_key, entries_map, 120) feed_queue.add( taskqueue.Task(url="/dealFeedEntry/set/", params={'key': random_key})) return except:
def schedule_mapreduce(state, mapper_input_readers, eta, countdown): state.put() readers_json = [reader.to_json_str() for reader in mapper_input_readers] taskqueue.Task( url=base_path + "/kickoffjob_callback", params={"mapreduce_spec": state.mapreduce_spec.to_json_str(), "input_readers": simplejson.dumps(readers_json)}, eta=eta, countdown=countdown).add(queue_name, transactional=True)
def post(self, kml_id): importer = taskqueue.Task(url='/tasks/import_kml_layer', params={ 'kml_id': kml_id, 'layer': self.request.form['layer'] }) importer.add() return "That's it! It make yake a few minutes for mills to be updated."
def UpdateAllStatsCache(request, batch_size=UPDATE_ALL_BATCH_SIZE, is_uncached_update=False): categories_str = request.REQUEST.get('categories') if categories_str: categories = categories_str.split(',') else: categories = [s.category for s in all_test_sets.GetVisibleTestSets()] if not categories: return http.HttpResponseServerError('No categories given.') elif len(categories) > 1: for category in categories: attempt = 0 while attempt < 3: try: task = taskqueue.Task(url=request.path, params={'categories': category}) task.add(queue_name='update-stats-cache') break except: attempt += 1 return http.HttpResponse('Queued stats cache update for categories: %s' % categories) category = categories[0] test_set = all_test_sets.GetTestSet(category) browsers = result_stats.CategoryBrowserManager.GetAllBrowsers(category) logging.info('Update all stats cache: %s', category) for i in range(0, len(browsers), batch_size): params={ 'category': category, 'browsers': ','.join(browsers[i:i+batch_size]), } if is_uncached_update: params['is_uncached_update'] = 1 attempt = 0 while attempt < 3: try: taskqueue.Task(params=params).add(queue_name='update-stats-cache') break except: attempt += 1 logging.info('Added task for browsers %s to %s.', i, i+batch_size) return http.HttpResponse('Done creating update tasks.')
def runSchoolTypeUpdate(request, *args, **kwargs): """Appengine Task that adds school_type as University for existing Student entities in batches. Addition of required school_type property to Student model requires addition of corresponding value to all the existing Student entities in the datastore. Since this property is introduced during GSoC 2009 all students should be University students. This task sets the school_type value to "University" to all the existing entities. Args: request: Django Request object """ from soc.logic.models.student import logic as student_logic fields = {} post_dict = request.POST start_key = post_dict.get('start_key') if start_key: # retrieve the last student entity that was converted start = student_logic.getFromKeyName(start_key) if not start: # invalid starting student key specified, log and return OK return error_handler.logErrorAndReturnOK( 'Invalid Student Key specified: %s' %(start_key)) fields['__key__ >'] = start.key() # get the first batch_size number of StudentProjects entities = student_logic.getForFields(fields, limit=DEF_BATCH_SIZE) for entity in entities: entity.school_type = 'University' db.put(entities) if len(entities) == DEF_BATCH_SIZE: # spawn new task starting from the last new_start = entities[DEF_BATCH_SIZE-1].key().id_or_name() # pass along these params as POST to the new task task_params = {'start_key': new_start} new_task = taskqueue.Task(params=task_params, url=request.META['PATH_INFO']) new_task.add() # task completed, return OK return HttpResponse('OK')
def post(self, *ar, **kw): key = self.request.get('keyword_se_key') batch_size = 10 query = models.KeywordRankLog.all(keys_only=True).filter( 'keyword_se = ', db.Key(key)) count = query.count() db.delete(query.fetch(batch_size)) if count > batch_size: queue = taskqueue.Queue("default") task = taskqueue.Task(url='/tasks/delete/keyword_rank_logs', params={'keyword_se_key': key}) queue.add(task)
def put_state(state): state.put() done_callback = spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK) if done_callback: taskqueue.Task( url=done_callback, headers={"Mapreduce-Id": spec.mapreduce_id}).add( spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE, "default"), transactional=True)
def post(self): memcache.flush_all() logging.info('Memcache has all flushed! ') db.delete(DBCache().all()) if (DBCache().all().count() > 1): pQueue = taskqueue.Queue(name='DeleteDBCache') taskurl = 'http://' + self.request.host_url pTask = taskqueue.Task(url='/cacheflush', params=dict(url=taskurl)) pQueue.add(pTask) else: logging.info('DBcache has all flushed! ')
def spawnUpdateTask(entity): """Spawns a task to update the state of the task. """ update_params = { 'ghop_task_key': entity.key().name(), } update_url = '/tasks/ghop/task/update' new_task = taskqueue.Task(eta=entity.deadline, params=update_params, url=update_url) new_task.add('ghop-update')
def _editRecordPost(self, request, params, context, template, record_entity): """Handles the POST request for editing a GradingRecord. Args: request: a Django Request object params: the params for this view context: the context for the webpage template: the location of the template used for this view record_entity: a GradingRecord entity """ from google.appengine.api.labs import taskqueue from soc.modules.gsoc.logic.models.student_project import logic as \ student_project_logic survey_logic = params['logic'] record_logic = survey_logic.getRecordLogic() post_dict = request.POST form = params['record_edit_form'](post_dict) if not form.is_valid(): return self._constructResponse(request, record_entity, context, form, params) _, fields = forms_helper.collectCleanedFields(form) record_entity = record_logic.updateEntityProperties( record_entity, fields) if 'save_update' in post_dict: # also update the accompanying StudentProject student_project_logic.updateProjectsForGradingRecords( [record_entity]) elif 'save_update_mail' in post_dict: # update the StudentProject and send an email about the result student_project_logic.updateProjectsForGradingRecords( [record_entity]) # pass along these params as POST to the new task task_params = {'record_key': record_entity.key().id_or_name()} task_url = '/tasks/grading_survey_group/mail_result' mail_task = taskqueue.Task(params=task_params, url=task_url) mail_task.add('mail') # Redirect to the same page redirect = request.META['HTTP_REFERER'] return http.HttpResponseRedirect(redirect)
def leave_command(self, msg): if not self.person.channel: msg.reply('* Hey, you aren\'t in a channel!') else: message = '%s has left %s' % (self.person, self.person.channel) self.Broadcast(self.person.channel, message, system=True) self.Log(self.person.channel, message, system=True) name = self.person.channel.name self.person.channel = None self.person.put() msg.reply('* You have left #%s' % name) taskqueue.Task(url='/task/update-channel-stats', params={'channel': name}).add('stats')
def post(self, *ar, **kw): if not config.data_retention_days: return batch_size = 10 retention_delta = datetime.timedelta(days=config.data_retention_days) date = datetime.datetime.today() - retention_delta query = models.KeywordRankLog.all(keys_only=True).filter( 'date < ', date) count = query.count() db.delete(query.fetch(batch_size)) if count > batch_size: queue = taskqueue.Queue("default") task = taskqueue.Task(url='/tasks/delete/old_logs') queue.add(task)
def _onCreate(self, entity): """Update all the tasks the student has claimed or are awaiting registration. """ task_params = { 'student_key': entity.key().id_or_name(), } task_url = '/tasks/ghop/task/update/student_status' new_task = taskqueue.Task(params=task_params, url=task_url) new_task.add('ghop-update') super(Logic, self)._onCreate(entity)
def startUpdateWithUrl(request, task_url): """Spawns an update task for the given task URL. Args: request: Django Request object task_url: The URL used to run this update task Returns: True iff the new task is successfully added to the Task Queue API """ new_task = taskqueue.Task(url=task_url) new_task.add() return True