def get_login_url(self): final_url = self.request.path + '?' + urls.urlencode(self.request.GET) params = dict(next=final_url) if 'deb' in self.request.arguments(): params['deb'] = self.request.get('deb') self.debug_list = self.request.get('deb').split(',') else: self.debug_list = [] logging.info("Debug list is %r", self.debug_list) login_url = '/login?%s' % urls.urlencode(params) return login_url
def get_login_url(self): final_url = self.request.path + '?' + urls.urlencode(self.request.GET) params = dict(next=final_url) if 'deb' in self.request.GET: params['deb'] = self.request.GET['deb'] self.debug_list = self.request.GET['deb'].split(',') else: self.debug_list = [] logging.info("Debug list is %r", self.debug_list) login_url = '/login?%s' % urls.urlencode(params) return login_url
def post(self, path, args, post_args): if not args: args = {} token = self.random_access_token() if token: if post_args is not None: post_args["access_token"] = token else: args["access_token"] = token post_data = None if post_args is None else urls.urlencode(post_args) f = urllib.urlopen("https://graph.facebook.com/" + path + "?" + urls.urlencode(args), post_data) result = f.read() return json.loads(result)
def load_targeting_key(cursor, geoname): geo_search = { 'location_types': 'city,region', 'country_code': geoname.country_code, 'q': get_query(geoname), 'type': 'adgeolocation', 'access_token': get_config()['app_access_token'], 'locale': 'en_US', # because app_access_token is locale-less and seems to use a IP-locale fallback } cursor.execute( 'select data from AdGeoLocation where q = ? and country_code = ?', (geo_search['q'], geo_search['country_code'])) result = cursor.fetchone() if not result: result = urllib.urlopen('https://graph.facebook.com/v2.9/search?%s' % urls.urlencode(geo_search)).read() result_json = json.loads(result) array_json = json.dumps(result_json['data']) data = { 'geonameid': data['geonameid'], 'q': geo_search['q'], 'country_code': geo_search['country_code'], 'data': array_json, } sqlite_db.insert_record(cursor, 'AdGeoLocation', data) print '\n'.join('- ' + str(x) for x in result_json['data'])
def url(cls, path, fields=None, **kwargs): if fields: return '/%s/%s?%s' % (cls.version, path, urls.urlencode( dict(fields=','.join(fields), **kwargs))) else: return '/%s/%s' % (cls.version, path)
def handle_alternate_login(self, request): # If the mobile app sent the user to a /....?uid=XX&access_token_md5=YY URL, # then let's verify the parameters, and log the user in as that user if request.get('uid'): if request.get('access_token'): fbl = fb_api.FBLookup(request.get('uid'), request.get('access_token')) fb_user = fbl.get(fb_api.LookupUser, 'me') logging.info("Requested /me with given access_token, got %s", fb_user) if fb_user['profile']['id'] == request.get('uid'): user = users.User.get_by_id(request.get('uid')) access_token_md5 = hashlib.md5(user.fb_access_token).hexdigest() self.set_login_cookie(request.get('uid'), access_token_md5=access_token_md5) if request.get('access_token_md5'): user = users.User.get_by_id(request.get('uid')) if user and request.get('access_token_md5') == hashlib.md5(user.fb_access_token).hexdigest(): # Authenticated! Now save cookie so subsequent requests can trust that this user is authenticated. # The subsequent request will see a valid user_login param (though without an fb_cookie_uid) self.set_login_cookie(request.get('uid'), access_token_md5=self.request.get('access_token_md5')) # But regardless of whether the token was correct, let's redirect and get rid of these url params. current_url_args = {} for arg in sorted(self.request.GET): if arg in ['uid', 'access_token', 'access_token_md5']: continue current_url_args[arg] = self.request.GET.getall(arg) final_url = self.request.path + '?' + urls.urlencode(current_url_args, doseq=True) # Make sure we immediately stop running the initialize() code if we return a URL here return final_url else: return False
def check_language(text): if len(text) > MAX_LENGTH: logging.info("trimming text from %s to %s", len(text), MAX_LENGTH) text = text[:MAX_LENGTH] base_url = 'https://www.googleapis.com/language/translate/v2/detect' params = {'key': API_KEY, 'q': text} form_data = urls.urlencode(params) if urllib2_fallback: request = urllib2.Request(base_url, form_data, {'X-HTTP-Method-Override': 'GET'}) response_content = urllib2.urlopen(request).read() else: result = urlfetch.fetch(url=base_url, payload=form_data, method=urlfetch.POST, headers={'X-HTTP-Method-Override': 'GET'}) if result.status_code != 200: error = "result status code is %s for content %s" % (result.status_code, result.content) logging.error(error) raise Exception("Error in translation: %s" % error) response_content = result.content json_content = json.loads(response_content) real_results = json_content['data']['detections'][0][0] logging.info("text classification returned %s", real_results) if real_results['confidence'] > 0.10: return real_results['language'] else: return None
def url(cls, path, fields=None, **kwargs): if fields: if isinstance(fields, basestring): raise ValueError('Must pass in a list to fields: %r' % fields) kwargs['fields'] = ','.join(fields) if kwargs: return '/%s/%s?%s' % (cls.version, path, urls.urlencode(kwargs)) else: return '/%s/%s' % (cls.version, path)
def get_json(self, **kwargs): if self.use_private_key: kwargs['client'] = 'free-dancedeets' unsigned_url_path = "%s?%s" % (self.path, urls.urlencode(kwargs)) private_key = google_maps_private_key decoded_key = base64.urlsafe_b64decode(private_key) signature = hmac.new(decoded_key, unsigned_url_path, hashlib.sha1) encoded_signature = base64.urlsafe_b64encode(signature.digest()) url = "%s%s&signature=%s" % (self.protocol_host, unsigned_url_path, encoded_signature) else: unsigned_url_path = "%s?%s" % (self.path, urls.urlencode(kwargs)) url = "%s%s&key=%s" % (self.protocol_host, unsigned_url_path, google_server_key) logging.info('geocoding url: %s', url) result = urllib.urlopen(url).read() logging.info('geocoding results: %s', result) return json.loads(result)
def load_users(user_ids, allow_cache=True, **kwargs): allow_cache_arg = (allow_cache and '1' or '0') for fb_uid in user_ids: taskqueue.add(method='GET', url='/tasks/load_users?' + urls.urlencode( dict(user_id=fb_uid, user_ids=fb_uid, allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs)
def load_sources(fb_source_ids, allow_cache=True, fb_uid='random', **kwargs): task_size = 10 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_source_ids), task_size): taskqueue.add( method='GET', url='/sources/scrape?' + urls.urlencode( dict(user_id=fb_uid, source_ids=','.join(fb_source_ids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs)
def load_events(fb_event_ids, allow_cache=True, fb_uid='701004', **kwargs): task_size = 10 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_event_ids), task_size): taskqueue.add( method='GET', url='/tasks/load_events?' + urls.urlencode( dict(user_id=fb_uid, event_ids=','.join(fb_event_ids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs)
def load_potential_events_for_users(fb_uids, allow_cache=True, **kwargs): #OPT: support more than one fbuser context per request in BaseTaskFacebookRequestHandler.initialize() task_size = 1 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_uids), task_size): taskqueue.add(method='GET', url='/tasks/load_potential_events_for_user?' + urls.urlencode( dict(user_id=','.join(fb_uids[i:i + task_size]), user_ids=','.join(fb_uids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs)
def _create_rpc_for_batch(self, batch_list, use_access_token): post_args = {'batch': json.dumps(batch_list)} token = self.random_access_token() if use_access_token and token: post_args["access_token"] = token else: post_args["access_token"] = '%s|%s' % (facebook.FACEBOOK_CONFIG['app_id'], facebook.FACEBOOK_CONFIG['secret_key']) post_args["include_headers"] = False # Don't need to see all the caching headers per response post_data = None if post_args is None else urls.urlencode(post_args) rpc = urlfetch.create_rpc(deadline=DEADLINE) urlfetch.make_fetch_call(rpc, "https://graph.facebook.com/", post_data, "POST") self.fb_fetches += len(batch_list) return rpc, token
def setup_reminders(fb_uid, fb_user_events): event_results_json = fb_user_events['events']['data'] event_ids = [x['id'] for x in event_results_json] existing_events = [ x.string_id() for x in eventdata.DBEvent.get_by_ids(event_ids, keys_only=True) if x ] logging.info("For user %s's %s events, %s are real dance events", fb_uid, len(event_ids), len(existing_events)) for event in event_results_json: if event['id'] not in existing_events: continue logging.info("%s is dance event, checking dates..", event['id']) start_time = parser.parse(event['start_time']) # Otherwise it's at a specific time (we need the time with the timezone info included) # Also try to get it ten minutes before the Facebook event comes in, so that we aren't seen as the "dupe". start_notify_window = start_time - datetime.timedelta(hours=1, minutes=10) # I think 30 days is the limit for appengine tasks with ETA set, but it gets trickier with all the timezones. # And really, we run this code multiple times a day, so don't need to plan out more than a day or two. now = datetime.datetime.now(start_time.tzinfo) future_cutoff = now + datetime.timedelta(days=2) # Any time after start_notify_window, we could have sent a notification. # If we try to re-add the taskqueue after that timestamp has passed, # we may re-add a task that has already completed, and re-notify a user. # Double-check in our logs that this does not occur....(it shouldn't!) end_notify_window = start_notify_window # Ignore events that started in the past if end_notify_window < now: continue # And ignore events that are too far in the future to care about yet if start_notify_window > future_cutoff: continue logging.info("For event %s, sending notifications at %s", event['id'], start_notify_window) try: taskqueue.add( method='GET', name='notify_user-%s-%s' % (fb_uid, event['id']), queue_name='mobile-notify-queue', eta=start_notify_window, url='/tasks/remind_user?' + urls.urlencode(dict(user_id=fb_uid, event_id=event['id'])), ) except (taskqueue.TaskAlreadyExistsError, taskqueue.TombstonedTaskError) as e: logging.info("Error adding task: %s", e)
def create_user_with_fbuser(fb_uid, fb_user, access_token, access_token_expires, location, send_email=False, referer=None, client=None): user = users.User(id=fb_uid) user.fb_access_token = access_token user.fb_access_token_expires = access_token_expires user.expired_oauth_token = False user.expired_oauth_token_reason = None user.location = location # grab the cookie to figure out who referred this user logging.info("Referer was: %s", referer) if referer: #STR_ID_MIGRATE user.inviting_fb_uid = long(referer) user.clients = [client] user.send_email = send_email user.distance = '50' user.distance_units = 'miles' user.min_attendees = 0 user.creation_time = datetime.datetime.now() user.login_count = 1 user.last_login_time = user.creation_time user.compute_derived_properties(fb_user) logging.info("Saving user with name %s", user.full_name) user.put() logging.info("Requesting background load of user's friends") # Must occur after User is put with fb_access_token taskqueue.add(method='GET', url='/tasks/track_newuser_friends?' + urls.urlencode({'user_id': fb_uid}), queue_name='slow-queue') # Now load their potential events, to make "add event page" faster (and let us process/scrape their events) #potential_events_reloading.load_potential_events_for_user_ids(fbl, [fb_uid]) backgrounder.load_potential_events_for_users([fb_uid]) return user
def setup_reminders(fb_uid, fb_user_events): event_results_json = fb_user_events['events']['data'] event_ids = [x['id'] for x in event_results_json] existing_events = [x.string_id() for x in eventdata.DBEvent.get_by_ids(event_ids, keys_only=True) if x] logging.info("For user %s's %s events, %s are real dance events", fb_uid, len(event_ids), len(existing_events)) for event in event_results_json: if event['id'] not in existing_events: continue logging.info("%s is dance event, checking dates..", event['id']) start_time = parser.parse(event['start_time']) # Otherwise it's at a specific time (we need the time with the timezone info included) # Also try to get it ten minutes before the Facebook event comes in, so that we aren't seen as the "dupe". start_notify_window = start_time - datetime.timedelta(hours=1, minutes=10) # I think 30 days is the limit for appengine tasks with ETA set, but it gets trickier with all the timezones. # And really, we run this code multiple times a day, so don't need to plan out more than a day or two. now = datetime.datetime.now(start_time.tzinfo) future_cutoff = now + datetime.timedelta(days=2) # Any time after start_notify_window, we could have sent a notification. # If we try to re-add the taskqueue after that timestamp has passed, # we may re-add a task that has already completed, and re-notify a user. # Double-check in our logs that this does not occur....(it shouldn't!) end_notify_window = start_notify_window # Ignore events that started in the past if end_notify_window < now: continue # And ignore events that are too far in the future to care about yet if start_notify_window > future_cutoff: continue logging.info("For event %s, sending notifications at %s", event['id'], start_notify_window) try: taskqueue.add( method='GET', name='notify_user-%s-%s' % (fb_uid, event['id']), queue_name='mobile-notify-queue', eta=start_notify_window, url='/tasks/remind_user?' + urls.urlencode(dict( user_id=fb_uid, event_id=event['id'])), ) except (taskqueue.TaskAlreadyExistsError, taskqueue.TombstonedTaskError) as e: logging.info("Error adding task: %s", e)
def get_targeting_data(fbl, db_event): short_country = None city_key = None venue_id = db_event.venue.get('id') if venue_id: # Target to people in the same country as the event. Should improve signal/noise ratio. if db_event.country: country = db_event.country.upper() if country in iso3166.countries_by_name: short_country = iso3166.countries_by_name[country].alpha2 city_state_country_list = [ db_event.city, db_event.state ] city_state_country = ', '.join(x for x in city_state_country_list if x) kw_params = { 'q': city_state_country, 'country_code': short_country, } geo_target = fbl.get(LookupGeoTarget, urls.urlencode(kw_params)) good_targets = [x for x in geo_target['search']['data'] if x['supports_city']] if good_targets: # Is usually an integer, but in the case of HK and SG (city/country combos), it can be a string city_key = good_targets[0]['key'] # if we want state-level targeting, 'region_id' would be the city's associated state if not short_country: geocode = db_event.get_geocode() if geocode: short_country = geocode.country() feed_targeting = {} if short_country: feed_targeting['countries'] = [short_country] if city_key: feed_targeting['cities'] = [city_key] return feed_targeting
def get_targeting_data(fbl, db_event): city_key = None short_country = db_event.country if short_country: city_state_list = [ db_event.city, db_event.state, ] city_state = ', '.join(x for x in city_state_list if x) geo_search = { 'type': 'adgeolocation', 'location_types': 'city', 'country_code': db_event.country, 'q': city_state, } geo_target = fbl.get(LookupGeoTarget, urls.urlencode(geo_search), allow_cache=False) good_targets = geo_target['search']['data'] if good_targets: # Is usually an integer, but in the case of HK and SG (city/country combos), it can be a string city_key = good_targets[0]['key'] # if we want state-level targeting, 'region_id' would be the city's associated state if not short_country: geocode = db_event.get_geocode() if geocode: short_country = geocode.country() feed_targeting = {} # Target by city if we can, otherwise use the country if city_key: feed_targeting['cities'] = [{'key': city_key}] elif short_country: feed_targeting['countries'] = [short_country] full_targeting = {'geo_locations': feed_targeting} return full_targeting
def fql_url(cls, fql): return "/%s/fql?%s" % (cls.version, urls.urlencode(dict(q=fql)))
def handle_search(self, form): validated = form.validate() if not validated: for field, errors in form.errors.items(): for error in errors: self.add_error(u"%s error: %s" % ( getattr(form, field).label.text, error )) if not self.request.get('calendar'): search_query = None search_results = [] sponsored_studios = {} onebox_links = [] if validated: search_query = form.build_query() if self.indexing_bot: search_results = self.search_class(search_query).get_search_results(full_event=True) search_results = [x for x in search_results if x.db_event.is_indexable()] else: search_results = self.search_class(search_query).get_search_results() if 'class' in form.deb.data: from classes import class_index class_results = class_index.ClassSearch(search_query).get_search_results() for result in class_results: sponsored_studios.setdefault(result.sponsor, set()).add(result.actual_city_name) search_results += class_results search_results.sort(key=lambda x: (x.start_time, x.actual_city_name, x.name)) onebox_links = onebox.get_links_for_query(search_query) # We can probably speed this up 2x by shrinking the size of the fb-event-attending objects. a list of {u'id': u'100001860311009', u'name': u'Dance InMinistry', u'rsvp_status': u'attending'} is 50% overkill. a = time.time() friends.decorate_with_friends(self.fbl, search_results) logging.info("Decorating with friends-attending took %s seconds", time.time() - a) a = time.time() rsvp.decorate_with_rsvps(self.fbl, search_results) logging.info("Decorating with personal rsvp data took %s seconds", time.time() - a) past_results, present_results, grouped_results = search.group_results(search_results) if search_query and search_query.time_period in search_base.TIME_ALL_FUTURE: present_results = past_results + present_results past_results = [] self.display['num_upcoming_results'] = sum([len(x.results) for x in grouped_results]) + len(present_results) self.display['past_results'] = past_results self.display['ongoing_results'] = present_results self.display['grouped_upcoming_results'] = grouped_results self.display['sponsored_studios'] = sponsored_studios self.display['onebox_links'] = onebox_links if form.time_period.data == search_base.TIME_PAST: self.display['selected_tab'] = 'past' elif self.request.get('calendar'): self.display['selected_tab'] = 'calendar' else: self.display['selected_tab'] = 'present' self.display['form'] = form if form.location.data and form.keywords.data: self.display['result_title'] = '%s dance events near %s' % (form.keywords.data, form.location.data) elif form.location.data: self.display['result_title'] = '%s dance events' % form.location.data elif form.keywords.data: self.display['result_title'] = '%s dance events' % form.keywords.data else: self.display['result_title'] = 'Dance events' request_params = form.url_params() self.display['past_view_url'] = '/events/relevant?past=1&%s' % urls.urlencode(request_params) self.display['upcoming_view_url'] = '/events/relevant?%s' % urls.urlencode(request_params) self.display['calendar_view_url'] = '/events/relevant?calendar=1&%s' % urls.urlencode(request_params) self.display['calendar_feed_url'] = '/calendar/feed?%s' % urls.urlencode(request_params) self.jinja_env.globals['CHOOSE_RSVPS'] = rsvp.CHOOSE_RSVPS self.render_template(self.template_name)
def handle_search(self, form): validated = form.validate() if not validated: for field, errors in form.errors.items(): for error in errors: self.add_error(u"%s error: %s" % ( getattr(form, field).label.text, error )) if not self.request.get('calendar'): search_query = None search_results = [] sponsored_studios = {} onebox_links = [] if validated: search_query = form.build_query() search_results = self.search_class(search_query).get_search_results() if 'class' in form.deb.data: from classes import class_index class_results = class_index.ClassSearch(search_query).get_search_results() for result in class_results: sponsored_studios.setdefault(result.sponsor, set()).add(result.actual_city_name) search_results += class_results search_results.sort(key=lambda x: (x.start_time, x.actual_city_name, x.name)) onebox_links = onebox.get_links_for_query(search_query) # We can probably speed this up 2x by shrinking the size of the fb-event-attending objects. a list of {u'id': u'100001860311009', u'name': u'Dance InMinistry', u'rsvp_status': u'attending'} is 50% overkill. a = time.time() friends.decorate_with_friends(self.fbl, search_results) logging.info("Decorating with friends-attending took %s seconds", time.time() - a) a = time.time() rsvp.decorate_with_rsvps(self.fbl, search_results) logging.info("Decorating with personal rsvp data took %s seconds", time.time() - a) past_results, present_results, grouped_results = search.group_results(search_results) if search_query and search_query.time_period in search_base.TIME_ALL_FUTURE: present_results = past_results + present_results past_results = [] self.display['num_upcoming_results'] = sum([len(x.results) for x in grouped_results]) + len(present_results) self.display['past_results'] = past_results self.display['ongoing_results'] = present_results self.display['grouped_upcoming_results'] = grouped_results self.display['sponsored_studios'] = sponsored_studios self.display['onebox_links'] = onebox_links if form.time_period.data == search_base.TIME_PAST: self.display['selected_tab'] = 'past' elif self.request.get('calendar'): self.display['selected_tab'] = 'calendar' else: self.display['selected_tab'] = 'present' self.display['form'] = form if form.location.data and form.keywords.data: self.display['result_title'] = '%s dance events near %s' % (form.keywords.data, form.location.data) elif form.location.data: self.display['result_title'] = '%s dance events' % form.location.data elif form.keywords.data: self.display['result_title'] = '%s dance events' % form.keywords.data else: self.display['result_title'] = 'Dance events' request_params = form.url_params() self.display['past_view_url'] = '/events/relevant?past=1&%s' % urls.urlencode(request_params) self.display['upcoming_view_url'] = '/events/relevant?%s' % urls.urlencode(request_params) self.display['calendar_view_url'] = '/events/relevant?calendar=1&%s' % urls.urlencode(request_params) self.display['calendar_feed_url'] = '/calendar/feed?%s' % urls.urlencode(request_params) self.jinja_env.globals['CHOOSE_RSVPS'] = rsvp.CHOOSE_RSVPS self.render_template(self.template_name)