def search_fb(fbl, style_name): chunks = get_chunks(style_name) for chunk in chunks: taskqueue.add( method='GET', url='/tools/search_fb_for_events_for_chunk?' + urls.urlencode(dict(user_id=fbl.fb_uid or 'random', chunk=json.dumps(chunk))), queue_name='keyword-search', )
def load_users(user_ids, allow_cache=True, **kwargs): allow_cache_arg = (allow_cache and '1' or '0') for fb_uid in user_ids: taskqueue.add( method='GET', url='/tasks/load_users?' + urls.urlencode(dict(user_id=fb_uid, user_ids=fb_uid, allow_cache=allow_cache_arg)), **kwargs )
def load_users(user_ids, allow_cache=True, **kwargs): allow_cache_arg = (allow_cache and '1' or '0') for fb_uid in user_ids: taskqueue.add(method='GET', url='/tasks/load_users?' + urls.urlencode( dict(user_id=fb_uid, user_ids=fb_uid, allow_cache=allow_cache_arg)), **kwargs)
def search_fb(fbl, style_name): chunks = get_chunks(style_name) for chunk in chunks: taskqueue.add( method='GET', url='/tools/search_fb_for_events_for_chunk?' + urls.urlencode( dict(user_id=fbl.fb_uid or 'random', chunk=json.dumps(chunk))), queue_name='keyword-search', )
def create_user_with_fbuser(fb_uid, fb_user, access_token, access_token_expires, location, ip, send_email=True, referer=None, client=None, send_new_user_email=True): user = users.User(id=fb_uid) user.ip = ip user.fb_access_token = access_token user.fb_access_token_expires = access_token_expires user.expired_oauth_token = False user.expired_oauth_token_reason = None user.location = location # grab the cookie to figure out who referred this user logging.info("Referer was: %s", referer) if referer: #STR_ID_MIGRATE user.inviting_fb_uid = long(referer) user.clients = [client] user.send_email = send_email user.distance = '50' user.distance_units = 'miles' user.min_attendees = 0 user.creation_time = datetime.datetime.now() user.login_count = 1 user.last_login_time = user.creation_time user.compute_derived_properties(fb_user) logging.info("Saving user with name %s", user.full_name) user.put() logging.info("Requesting background load of user's friends") # Must occur after User is put with fb_access_token taskqueue.add(method='GET', url='/tasks/track_newuser_friends?' + urls.urlencode({'user_id': fb_uid}), queue_name='slow-queue') # Now load their potential events, to make "add event page" faster (and let us process/scrape their events) backgrounder.load_potential_events_for_users([fb_uid]) fbl = fb_api.FBLookup(fb_uid, user.fb_access_token) if send_new_user_email: try: new_user_email.email_for_user(user, fbl, should_send=True) except new_user_email.NoEmailException as e: logging.info('Not sending new-user email due to: %s', e) return user
def load_sources(fb_source_ids, allow_cache=True, fb_uid='random', **kwargs): task_size = 10 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_source_ids), task_size): taskqueue.add( method='GET', url='/sources/scrape?' + urls.urlencode(dict(user_id=fb_uid, source_ids=','.join(fb_source_ids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs )
def load_events(fb_event_ids, allow_cache=True, fb_uid='701004', **kwargs): task_size = 10 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_event_ids), task_size): taskqueue.add( method='GET', url='/tasks/load_events?' + urls.urlencode(dict(user_id=fb_uid, event_ids=','.join(fb_event_ids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs )
def load_sources(fb_source_ids, allow_cache=True, fb_uid='random', **kwargs): task_size = 10 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_source_ids), task_size): taskqueue.add( method='GET', url='/sources/scrape?' + urls.urlencode( dict(user_id=fb_uid, source_ids=','.join(fb_source_ids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs)
def load_events(fb_event_ids, allow_cache=True, fb_uid='701004', **kwargs): task_size = 10 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_event_ids), task_size): taskqueue.add( method='GET', url='/tasks/load_events?' + urls.urlencode( dict(user_id=fb_uid, event_ids=','.join(fb_event_ids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs)
def load_potential_events_for_users(fb_uids, allow_cache=True, **kwargs): #OPT: support more than one fbuser context per request in BaseTaskFacebookRequestHandler.initialize() task_size = 1 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_uids), task_size): taskqueue.add(method='GET', url='/tasks/load_potential_events_for_user?' + urls.urlencode( dict(user_id=','.join(fb_uids[i:i + task_size]), user_ids=','.join(fb_uids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs)
def load_potential_events_for_users(fb_uids, allow_cache=True, **kwargs): #OPT: support more than one fbuser context per request in BaseTaskFacebookRequestHandler.initialize() task_size = 1 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_uids), task_size): taskqueue.add( method='GET', url='/tasks/load_potential_events_for_user?' + urls.urlencode( dict(user_id=','.join(fb_uids[i:i + task_size]), user_ids=','.join(fb_uids[i:i + task_size]), allow_cache=allow_cache_arg) ), queue_name='slow-queue', **kwargs )
def setup_reminders(fb_uid, fb_user_events): event_results_json = fb_user_events['events']['data'] event_ids = [x['id'] for x in event_results_json] existing_events = [ x.string_id() for x in eventdata.DBEvent.get_by_ids(event_ids, keys_only=True) if x ] logging.info("For user %s's %s events, %s are real dance events", fb_uid, len(event_ids), len(existing_events)) for event in event_results_json: if event['id'] not in existing_events: continue logging.info("%s is dance event, checking dates..", event['id']) start_time = parser.parse(event['start_time']) # Otherwise it's at a specific time (we need the time with the timezone info included) # Also try to get it ten minutes before the Facebook event comes in, so that we aren't seen as the "dupe". start_notify_window = start_time - datetime.timedelta(hours=1, minutes=10) # I think 30 days is the limit for appengine tasks with ETA set, but it gets trickier with all the timezones. # And really, we run this code multiple times a day, so don't need to plan out more than a day or two. now = datetime.datetime.now(start_time.tzinfo) future_cutoff = now + datetime.timedelta(days=2) # Any time after start_notify_window, we could have sent a notification. # If we try to re-add the taskqueue after that timestamp has passed, # we may re-add a task that has already completed, and re-notify a user. # Double-check in our logs that this does not occur....(it shouldn't!) end_notify_window = start_notify_window # Ignore events that started in the past if end_notify_window < now: continue # And ignore events that are too far in the future to care about yet if start_notify_window > future_cutoff: continue logging.info("For event %s, sending notifications at %s", event['id'], start_notify_window) try: taskqueue.add( method='GET', name='notify_user-%s-%s' % (fb_uid, event['id']), queue_name='mobile-notify-queue', eta=start_notify_window, url='/tasks/remind_user?' + urls.urlencode(dict(user_id=fb_uid, event_id=event['id'])), ) except (taskqueue.TaskAlreadyExistsError, taskqueue.TombstonedTaskError) as e: logging.info("Error adding task: %s", e)
def wrapped_func(*args, **kwargs): try: return func(*args, **kwargs) except Exception as e: self = args[0] logging.exception("API retry failure") url = self.request.path body = self.request.body logging.error(e) logging.error("Retrying URL %s", url) logging.error("With Payload %r", body) taskqueue.add(method='POST', url=url, payload=body, countdown=60 * 60) raise
def create_user_with_fbuser( fb_uid, fb_user, access_token, access_token_expires, location, ip, send_email=True, referer=None, client=None, send_new_user_email=True ): user = users.User(id=fb_uid) user.ip = ip user.fb_access_token = access_token user.fb_access_token_expires = access_token_expires user.expired_oauth_token = False user.expired_oauth_token_reason = None user.location = location # grab the cookie to figure out who referred this user logging.info("Referer was: %s", referer) if referer: #STR_ID_MIGRATE user.inviting_fb_uid = long(referer) user.clients = [client] user.send_email = send_email user.distance = '50' user.distance_units = 'miles' user.min_attendees = 0 user.creation_time = datetime.datetime.now() user.login_count = 1 user.last_login_time = user.creation_time user.compute_derived_properties(fb_user) logging.info("Saving user with name %s", user.full_name) user.put() logging.info("Requesting background load of user's friends") # Must occur after User is put with fb_access_token taskqueue.add(method='GET', url='/tasks/track_newuser_friends?' + urls.urlencode({'user_id': fb_uid}), queue_name='slow-queue') # Now load their potential events, to make "add event page" faster (and let us process/scrape their events) backgrounder.load_potential_events_for_users([fb_uid]) fbl = fb_api.FBLookup(fb_uid, user.fb_access_token) if send_new_user_email: try: new_user_email.email_for_user(user, fbl, should_send=True) except new_user_email.NoEmailException as e: logging.info('Not sending new-user email due to: %s', e) return user