def store_all_schedules(league=None, division=None, gender=None, age=None): q = opl_db.Division.all() if league: q.filter("league = ", league) if division: q.filter("division = ", division) if gender: q.filter("gender =",gender) if age: q.filter("age = ", age) for r in q.run(): logging.debug('R:',r.league,' ',r.gender) if len(r.sched_urls) > 0: for u in r.sched_urls: parms = { 'u' : u, 'l' : r.league, 'g' : r.gender, 'd' : r.division, 'a' : r.age } t = Task(method='GET', url='/store-schedule?'+urllib.urlencode(parms)); t.add() ##fetch_schedule_results(u, r.league, r.division, r.gender, r.age) else: ##fetch_schedule_results(r.url, r.league, r.division, r.gender, r.age) parms = { 'u' : r.url, 'l' : r.league, 'g' : r.gender, 'd' : r.division, 'a' : r.age } t = Task(method='GET', url='/store-schedule?'+urllib.urlencode(parms)); t.add() return 'done'
def _EnqueueUserRetrievalTasks(self, message_criteria, owner_email): """Efficiently add tasks to enumerate domain users as a list (bulk add). Bulk add() saves roundtrips (rpc calls). Args: message_criteria: String criteria (message-id) to recall. owner_email: String email address of user running this recall. """ user_retrieval_tasks = [] # Use countdown to space out these requests a little. # There is a 15 request/s quota on the Admin SDK API. limit_requests_s = 15 for prefix_counter, email_prefix in enumerate(PartitionEmailPrefixes()): user_retrieval_tasks.append( Task(countdown=(prefix_counter / limit_requests_s), name='%s_%s_%s' % ( view_utils.CreateSafeUserEmailForTaskName(owner_email), email_prefix, view_utils.GetCurrentDateTimeForTaskName()), params={'email_prefix': email_prefix, 'message_criteria': message_criteria, 'owner_email': owner_email, 'task_key_id': self._task_key_id}, target='recall-backend', url='/backend/retrieve_domain_users')) self._AddUserRetrievalTask(task=user_retrieval_tasks)
def reset_endpoints(): """ Handler which creates reset endpoint tasks for selected endpoints/orgs. """ endpoint_indexes = request.form.getlist('endpoint_index') org_uid = request.form.get('org_uid') if not endpoint_indexes: flash("At least one endpoint is required") return render_template('select_endpoints.html', endpoints=ENDPOINTS, org_uid=org_uid), 200 if org_uid: taskqueue.add(target='admin', url='/admin/reset_endpoints_task/{}'.format(org_uid), params={'endpoint_index': endpoint_indexes}) flash("Kicked off reset of {} endpoints for {}".format( len(endpoint_indexes), org_uid)) return redirect(prefix('/')) else: count = query_to_tasks( query=Org.query(Org.status == CONNECTED), queue=Queue('admin'), task_generator=lambda key: Task( url='/admin/reset_endpoints_task/{}'.format(key.string_id()), params={'endpoint_index': endpoint_indexes})) flash("Kicked off reset of {} endpoints for {} orgs".format( len(endpoint_indexes), count)) return redirect(prefix('/commands'))
def update_all_feeds(interval_id): """Update all feeds for a specific interval""" if request.headers.get('X-Appengine-Cron') != 'true': raise ndb.Return(jsonify_error(message='Not a cron call')) for feed_type, feed_class in FEED_TYPE_TO_CLASS.iteritems(): feeds = Feed.for_interval(interval_id) success = 0 more = True cursor = None futures = [] while more: feeds_to_fetch, cursor, more = yield feeds.fetch_page_async(BATCH_SIZE, start_cursor=cursor) feeds_to_fetch = filter(lambda x: getattr(x, 'external_polling_bucket', DEFAULT_POLLING_BUCKET) == DEFAULT_POLLING_BUCKET, feeds_to_fetch) keys = ','.join([x.key.urlsafe() for x in feeds_to_fetch]) if not keys: continue futures.append(Queue('poll').add_async(Task(url=url_for('tq_feed_poll-canonical'), method='POST', params={'keys': keys}))) success += 1 for future in futures: yield future logger.info('queued poll for %d feeds at interval_id=%s', success, interval_id) raise ndb.Return(jsonify(status='ok'))
def post_all_feeds(): """Post all new items for feeds for a specific interval""" if request.headers.get('X-Appengine-Cron') != 'true': raise ndb.Return(jsonify_error(message='Not a cron call')) logger.info('Starting a post job') futures = [] for feed_type, feed_class in FEED_TYPE_TO_CLASS.iteritems(): feeds = feed_class.query(feed_class.is_dirty == True) logger.info("Got some feeds_count: %s feeds_type: %s", feeds.count(), feed_type) success = 0 more = True cursor = None while more: feeds_to_fetch, cursor, more = yield feeds.fetch_page_async(BATCH_SIZE, start_cursor=cursor) keys = ','.join([x.key.urlsafe() for x in feeds_to_fetch]) if not keys: continue futures.append(Queue().add_async(Task(url=url_for('tq_feed_post-canonical'), method='POST', params={'keys': keys}))) success += len(feeds_to_fetch) logger.info('queued post for %d feeds feed_type:%s', success, feed_type) for future in futures: yield future logger.info('Finished Post Job') yield write_epoch_to_stat(Stat, 'post_job') raise ndb.Return(jsonify(status='ok'))
def instagram_push_update(): data = request.stream.read() instagram_client_secret = Configuration.value_for_name('instagram_client_secret') server_signature = request.headers.get('X-Hub-Signature', None) signature = hmac.new(str(instagram_client_secret), data, digestmod=hashlib.sha1).hexdigest() if server_signature != signature: logger.warn('Got PuSH subscribe POST from instagram w/o valid signature: sent=%s != expected=%s', server_signature, signature) raise ndb.Return('') logger.info('Got PuSH body: %s', data) logger.info('Got PuSH headers: %s', request.headers) parsed_feed = json.loads(data) user_ids = [int(x.get('object_id')) for x in parsed_feed] feeds = InstagramFeed.query(InstagramFeed.user_id.IN(user_ids)) cursor = None more = True keys = [] while more: feed_keys, cursor, more = feeds.fetch_page(BATCH_SIZE, keys_only=True, start_cursor=cursor) keys += feed_keys keys = ','.join([x.urlsafe() for x in keys]) if keys: yield Queue('poll').add_async(Task(url=url_for('tq_feed_poll-canonical'), method='POST', params={'keys': keys})) raise ndb.Return('ok')
def _EnqueueMasterRecallTask(self, owner_email, message_criteria, task_key_id): """Add master recall task with error handling. Args: owner_email: String email address of user running this recall. message_criteria: String criteria (message-id) to recall. task_key_id: Int unique id of the parent task. Raises: re-raises any task queue errors. """ task_name = '%s_%s' % (view_utils.CreateSafeUserEmailForTaskName( owner_email), view_utils.GetCurrentDateTimeForTaskName()) master_task = Task(name=task_name, params={ 'owner_email': owner_email, 'task_key_id': task_key_id, 'message_criteria': message_criteria }, target='0.recall-backend', url='/backend/recall_messages') try: master_task.add(queue_name='recall-messages-queue') except TaskQueueError: view_utils.FailRecallTask( task_key_id=task_key_id, reason_string='Failed to enqueue master task.') raise
def _EnqueueUserRecallTasks(self, message_criteria, owner_email): """Efficiently add tasks for each user to recall messages (bulk add). Bulk add() saves roundtrips (rpc calls). Args: message_criteria: String criteria (message-id) to recall. owner_email: String email address of user running this recall. """ if recall_task.RecallTaskModel.IsTaskAborted(self._task_key_id): return cursor = None while True: user_recall_tasks = [] results, cursor, unused_more = ( domain_user.DomainUserToCheckModel.FetchOnePageOfActiveUsersForTask( task_key_id=self._task_key_id, cursor=cursor)) for user in results: user_recall_tasks.append(Task( name='%s_%s_%s' % ( view_utils.CreateSafeUserEmailForTaskName(owner_email), view_utils.CreateSafeUserEmailForTaskName(user.user_email), view_utils.GetCurrentDateTimeForTaskName()), params={'message_criteria': message_criteria, 'task_key_id': self._task_key_id, 'user_email': user.user_email, 'user_key_id': user.key.id()}, target='recall-backend', url='/backend/recall_user_messages')) if not user_recall_tasks: break self._AddUserRecallTasks(user_recall_tasks=user_recall_tasks)
def store_all_division_standings(league, division, gender, age, agegroup): q = opl_db.Division.all() if league: q.filter("league =", league) if division: q.filter("division = ", division) if gender: q.filter("gender =", gender) if age: q.filter("age =", age) if agegroup: q.filter("agegroup = ", agegroup) for r in q.run(): parms = { 'l': r.league, 'u': r.url, 'a': r.age, 'ag': r.agegroup, 'g': r.gender, 'd': r.division } url = '/store-division-standings?' + urllib.urlencode(parms) logging.debug('URL:' + url) t = Task(payload=None, method='GET', url=url) t.add() return 'Done'
def MakeFollows(self): """ # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ADD FOLLOWS FOR ADMIN USERS # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ """ nextURL = None firstURL = self.request.get('nexturl') query = PointRoot.query().order(PointRoot.url) if firstURL: query = query.filter(PointRoot.url >= firstURL) pointRoots = query.fetch(11) if len(pointRoots) == 11: nextURL = pointRoots[-1].url pointRootsToReview = pointRoots[:10] else: pointRootsToReview = pointRoots i = 0 for pointRoot in pointRootsToReview: pointRootKey = pointRoot.key followers = {} versions = pointRoot.getAllVersions() for point in versions: if point.version == 1: followers[point.authorURL] = 'created' elif not point.authorURL in followers: followers[point.authorURL] = 'edited' for comment in pointRoot.getComments(): if not comment.userUrl in followers: followers[comment.userUrl] = 'commented' logging.info('ROOT: %s FOLLOWERS: %s' % (pointRoot.url, str(followers))) for url in followers.iterkeys(): followType = followers[url] previousNamespace = namespace_manager.get_namespace() if previousNamespace and previousNamespace != '': namespace_manager.set_namespace('') # DEFAULT NAMESPACE usr = WhysaurusUser.getByUrl(url) namespace_manager.set_namespace(previousNamespace) else: usr = WhysaurusUser.getByUrl(url) logging.info('Trying to follow for U:%s, R:%s, T:%s' % (url, pointRoot.url, followType)) f = None f = Follow.createFollow(usr.key, pointRootKey, followType) if f: i = i + 1 logging.info('ADDED follow for U:%s, R:%s, T:%s' % (url, pointRoot.url, followType)) logging.info('Added %d follows' % i) if nextURL: t = Task(url="/MakeFollows", params={'nexturl': nextURL}) t.add(queue_name="notifications") logging.info('Requeing MakeFollows task to start at url %s ' % nextURL)
def schedule_task_for_each_table(self, project_id, dataset_id, page_token=None): list_response = self.big_query.list_tables(project_id, dataset_id, page_token=page_token) if 'tables' in list_response: table_id_list = [table['tableReference']['tableId'] for table in list_response['tables']] tasks = self.create_table_tasks(project_id, dataset_id, table_id_list) Tasks.schedule(queue_name='bigquery-tables', tasks=tasks) else: logging.info("Dataset %s.%s is empty", project_id, dataset_id) return if 'nextPageToken' in list_response: url = '/bigQuery/project/%s/dataset/%s?pageToken=%s' % ( project_id, dataset_id, list_response['nextPageToken']) task_name = '%s-%s-%s-%s' % (project_id, dataset_id, list_response['nextPageToken'], datetime.utcnow().strftime("%Y%m%d")) next_task = Task( method='GET', url=url, name=task_name) Tasks.schedule(queue_name='bigquery-list', tasks=[next_task]) else: logging.info("There is no more tables in this dataset")
def create(cls,**kwargs): ''' Creates an Interest based on the arguments passed in kwargs Once an interest is created is added to matchmaker queue :param cls: :param kwargs: :return: ''' eventThrottler = EventThrottler(kwargs['username']) # set the # of events to 3 instead of 5, and at join_flex_interest (for consistency). Throttler at 10 minutes if eventThrottler.number_of_cached_events() > 3: return False, "You have too many concurrent events.", None event = Event(category = kwargs['category'], date_entered = datetime.utcnow(), type = Event.EVENT_TYPE_FLEX_INTEREST, username = kwargs['username'], building_name = kwargs['building_name'], ) if 'expiration' in kwargs and kwargs['expiration'] != "": event.expiration = kwargs['expiration'] if 'start_time' in kwargs and kwargs['start_time'] != "": event.start_time = kwargs['start_time'] if 'duration' in kwargs and kwargs['duration'] != "": event.duration = kwargs['duration'] if 'min_number_of_people_to_join' in kwargs and kwargs['min_number_of_people_to_join'] != 'None' and kwargs['min_number_of_people_to_join'] != "": event.min_number_of_people_to_join = kwargs['min_number_of_people_to_join'] event.type = Event.EVENT_TYPE_SPECIFIC_INTEREST if 'max_number_of_people_to_join' in kwargs and kwargs['max_number_of_people_to_join'] != 'None' and kwargs['max_number_of_people_to_join'] != "": event.max_number_of_people_to_join = kwargs['max_number_of_people_to_join'] if 'note' in kwargs and kwargs['note'] != "": event.note = kwargs['note'] if 'meeting_place' in kwargs and kwargs['meeting_place'] != "": event.meeting_place = kwargs['meeting_place'] if 'activity_location' in kwargs and kwargs['activity_location'] != "": event.activity_location = kwargs['activity_location'] event.put() if os.environ.get('ENV_TYPE') is None: if event.type == Event.EVENT_TYPE_FLEX_INTEREST: task = Task(url='/match_maker/',method='GET',params={'interest': event.key.urlsafe()}) else: task = Task(url='/match_maker/',method='GET',params={'activity': event.key.urlsafe()}) task.add('matchmaker') logging.info('event created') logging.info('match maker task queued') eventThrottler.increment_activity_count() return True, 'success', event
def queue_token_run(token): cls = token.prosthetic.classname logging.info("Queueing run task for token %s for %s on %s" % (token.oauth_key, token.weavr_name, cls)) task = Task(url='/runner/run_task/', method='POST', params={'token': token.oauth_key}) task.add('default')
def __init__(self, queue_name, url, **kwargs): ''' kwargs are the same used on Task class (https://developers.google.com/appengine/docs/python/taskqueue/tasks#Task) ''' super(TaskQueueCommand, self).__init__() self._task = Task(url=url, **kwargs) self._queue_name = queue_name
def queueNightlyTask(self): now = PST.convert(datetime.datetime.now()) tomorrow = now + datetime.timedelta(days=1) half_past_midnight = datetime.time(hour=7, minute=30) # PST taskTime = datetime.datetime.combine(tomorrow, half_past_midnight) t = Task(url='/job/DBIntegrityCheck', method="GET", eta=taskTime) t.add(queue_name="dbchecker")
def get(self, model=""): url = '/droptable/%s' % model logging.debug('getting ready to start task to drop the %s table' % model) task = Task(url=url, params={}) task.add('crawler') self.response.out.write( 'got it. started the background task to delete all %s entities' % model)
def send(self): user = self.current_user message = self.request.get('message') t = Task(url='/broadcastChatroom', params={ 'userName': user.name, 'message': message }) t.add(queue_name="notifications")
def QueueTask(self): taskurl = self.request.get('task') if taskurl: fullurl = '/' + taskurl t = Task(url=fullurl) t.add(queue_name="notifications") self.response.out.write('OK I wrote %s to the notifications queue' % fullurl) else: self.response.out.write('Need a task URL parameter')
def test_schedule_can_work_with_array_of_tasks(self): # given task1 = Task( url='/example/task1', ) task2 = Task( url='/example/task2', ) # when Tasks.schedule("default", [task1, task2]) # then executed_tasks = self.taskqueue_stub.get_filtered_tasks( queue_names="default" ) self.assertEqual(len(executed_tasks), 2, "Should create two tasks in queue") self.assertEqual(executed_tasks[0].url, '/example/task1') self.assertEqual(executed_tasks[1].url, '/example/task2')
def get(self): """Queue run tasks for each registered Weavr""" logging.info("Running cron job. Queueing run tasks.") for prosthetic in ProstheticData.all(keys_only=True): logging.info("Queueing run task for %s" % str(prosthetic)) task = Task(url='/runner/prosthetic_task/', method='GET', params={'key': str(prosthetic)}) task.add('default') logging.info("Finished running cron job.")
def createTask(phone, msg, sec): logging.debug("Creating new task to fire in %s minutes" % str(int(sec) / 60)) task = Task(url='/reminder', params={ 'phone': phone, 'msg': msg }, countdown=sec) task.add('reminders')
def create(cls, **kwargs): corr_id = request_correlation_id.get() if corr_id: if 'headers' in kwargs: kwargs['headers'][request_correlation_id.HEADER_NAME] = corr_id else: kwargs['headers'] = { request_correlation_id.HEADER_NAME: corr_id } return Task(**kwargs)
def queueEventRecord(cls, userKeyUrlsafe, entityKey1Urlsafe, entityKey2Urlsafe, eventName): taskParams = {'userKeyUrlsafe': userKeyUrlsafe, 'eventName': eventName} if entityKey1Urlsafe: taskParams['entityKey1Urlsafe'] = entityKey1Urlsafe if entityKey2Urlsafe: taskParams['entityKey2Urlsafe'] = entityKey2Urlsafe t = Task(url='/recordEvent', params=taskParams) t.add(queue_name="recordEvents")
def _on_event_formation(self): #Queue it for life cycle management if os.environ.get('ENV_TYPE') is None: if self._event.start_time is not None and self._event.start_time != "": task_execution_time = self._event.start_time - timedelta(minutes=5) if self._event.expiration is not None and self._event.expiration != "": task_execution_time = datetime.utcnow() + timedelta(minutes=5) self._event.start_time = datetime.utcnow() + timedelta(minutes=10) self._event.put() goTask = Task(eta=task_execution_time, url='/activity_life_cycle/',method='GET',params={'activity': self._event.key.urlsafe()}) goTask.add('activityLifeCycle')
def _start_activity_closure_process(self, activity): if os.environ.get('ENV_TYPE') is None: if os.environ.get('SERVER_SOFTWARE', '').startswith('Development'): eta = 1800 else: eta = 259200 task = Task(url='/activity_closure/', method='GET', params={'activity_key': activity.key.urlsafe()}, countdown=eta) task.add('activityClosure')
def _start_post_activity_completion_process(self, activity): if os.environ.get('ENV_TYPE') is None: if os.environ.get('SERVER_SOFTWARE', '').startswith('Development'): eta = 420 else: eta = 2100 task = Task(url='/post_activity_completion/', method='GET', params={'activity_key': activity.key.urlsafe()}, countdown=eta) task.add('postActivityCompletion')
def _queue_tasks(self): scanners = QuestionsScanner.all() tasks = [] for scanner in scanners: domain = scanner.key().name() task = Task(url='/tasks/scan_new_questions/%s' % (domain, )) tasks.append(task) if len(tasks) > 0: queue = Queue(name="scannewquestions") queue.add(tasks)
def addTasks(self): q = Comic.all() queue = Queue(name='update-queue') tasks = [] for comic in q: if len(tasks) >= 100: queue.add(tasks) tasks = [] else: task = Task(url='/tasks/update/' + str(comic.id)) tasks.append(task) self.response.out.write("OK")
def test_items_to_tasks(self): """ Verifies that multiple pages of tasks get queued up properly. """ count = task_utils.items_to_tasks( items=[1, 2, 3], queue=Queue('adapter-update'), task_generator=lambda item: Task(url='/something/{}'.format(item))) self.assertEqual(count, 3) task_count = len(self.taskqueue.get_filtered_tasks()) self.assertEqual(task_count, 3)
def get(self, routeID=""): # create a new task with this link #crawlURL = "http://webwatch.cityofmadison.com/webwatch/Ada.aspx" crawlURL = URLBASE + 'r=' + routeID task = Task(url='/crawl/routelist/crawlingtask', params={ 'crawl': crawlURL, 'routeID': '00' }) task.add('crawler') logging.info("Added new task for %s" % crawlURL) return