def get_json(self, **kwargs): if mr: mr.increment('gmaps-api-%s' % self.name) if self.use_private_key: kwargs['client'] = 'free-dancedeets' unsigned_url_path = "%s?%s" % (self.path, urls.urlencode(kwargs)) private_key = google_maps_private_key decoded_key = base64.urlsafe_b64decode(private_key) signature = hmac.new(decoded_key, unsigned_url_path, hashlib.sha1) encoded_signature = base64.urlsafe_b64encode(signature.digest()) url = "%s%s&signature=%s" % (self.protocol_host, unsigned_url_path, encoded_signature) else: unsigned_url_path = "%s?%s" % (self.path, urls.urlencode(kwargs)) url = "%s%s&key=%s" % (self.protocol_host, unsigned_url_path, google_server_key) logging.info('geocoding url: %s', url) result = urllib.urlopen(url).read() logging.info('geocoding results: %s', result) try: return json.loads(result) except ValueError: return None
def get_login_url(self): final_url = self.request.path + '?' + urls.urlencode(self.request.GET) params = dict(next=final_url) if 'deb' in self.request.GET: params['deb'] = self.request.GET['deb'] self.debug_list = self.request.GET['deb'].split(',') else: self.debug_list = [] logging.info("Debug list is %r", self.debug_list) login_url = '/login?%s' % urls.urlencode(params) return login_url
def post(self, path, args, post_args): if not args: args = {} token = self.random_access_token() if token: if post_args is not None: post_args["access_token"] = token else: args["access_token"] = token post_data = None if post_args is None else urls.urlencode(post_args) f = urllib.urlopen("https://graph.facebook.com/" + path + "?" + urls.urlencode(args), post_data) result = f.read() return json.loads(result)
def check_language(text): if len(text) > MAX_LENGTH: logging.info("trimming text from %s to %s", len(text), MAX_LENGTH) text = text[:MAX_LENGTH] base_url = 'https://www.googleapis.com/language/translate/v2/detect' params = {'key': API_KEY, 'q': text} form_data = urls.urlencode(params) if urllib2_fallback: request = urllib2.Request(base_url, form_data, {'X-HTTP-Method-Override': 'GET'}) response_content = urllib2.urlopen(request).read() else: result = urlfetch.fetch(url=base_url, payload=form_data, method=urlfetch.POST, headers={'X-HTTP-Method-Override': 'GET'}) if result.status_code != 200: error = "result status code is %s for content %s" % ( result.status_code, result.content) logging.error(error) raise Exception("Error in translation: %s" % error) response_content = result.content json_content = json.loads(response_content) real_results = json_content['data']['detections'][0][0] logging.info("text classification returned %s", real_results) if real_results['confidence'] > 0.10: return real_results['language'] else: return None
def load_targeting_key(cursor, geoname): geo_search = { 'location_types': 'city,region', 'country_code': geoname.country_code, 'q': get_query(geoname), 'type': 'adgeolocation', 'access_token': get_config()['app_access_token'], 'locale': 'en_US', # because app_access_token is locale-less and seems to use a IP-locale fallback } cursor.execute( 'select data from AdGeoLocation where q = ? and country_code = ?', (geo_search['q'], geo_search['country_code'])) result = cursor.fetchone() if not result: result = urllib.urlopen('https://graph.facebook.com/v2.9/search?%s' % urls.urlencode(geo_search)).read() result_json = json.loads(result) array_json = json.dumps(result_json['data']) data = { 'q': geo_search['q'], 'country_code': geo_search['country_code'], 'data': array_json, } sqlite_db.insert_record(cursor, 'AdGeoLocation', data) print '\n'.join('- ' + str(x) for x in result_json['data'])
def handle_alternate_login(self, request): # If the mobile app sent the user to a /....?uid=XX&access_token_md5=YY URL, # then let's verify the parameters, and log the user in as that user if request.get('uid'): if request.get('access_token'): fbl = fb_api.FBLookup(request.get('uid'), request.get('access_token')) fb_user = fbl.get(fb_api.LookupUser, 'me') logging.info("Requested /me with given access_token, got %s", fb_user) if fb_user['profile']['id'] == request.get('uid'): user = users.User.get_by_id(request.get('uid')) access_token_md5 = hashlib.md5(user.fb_access_token).hexdigest() self.set_login_cookie(request.get('uid'), access_token_md5=access_token_md5) if request.get('access_token_md5'): user = users.User.get_by_id(request.get('uid')) if user and request.get('access_token_md5') == hashlib.md5(user.fb_access_token).hexdigest(): # Authenticated! Now save cookie so subsequent requests can trust that this user is authenticated. # The subsequent request will see a valid user_login param (though without an fb_cookie_uid) self.set_login_cookie(request.get('uid'), access_token_md5=self.request.get('access_token_md5')) # But regardless of whether the token was correct, let's redirect and get rid of these url params. current_url_args = {} for arg in sorted(self.request.GET): if arg in ['uid', 'access_token', 'access_token_md5']: continue current_url_args[arg] = self.request.GET.getall(arg) final_url = self.request.path + '?' + urls.urlencode(current_url_args, doseq=True) # Make sure we immediately stop running the initialize() code if we return a URL here return final_url else: return False
def load_users(user_ids, allow_cache=True, **kwargs): allow_cache_arg = (allow_cache and '1' or '0') for fb_uid in user_ids: taskqueue.add( method='GET', url='/tasks/load_users?' + urls.urlencode(dict(user_id=fb_uid, user_ids=fb_uid, allow_cache=allow_cache_arg)), **kwargs )
def search_fb(fbl, style_name): chunks = get_chunks(style_name) for chunk in chunks: taskqueue.add( method='GET', url='/tools/search_fb_for_events_for_chunk?' + urls.urlencode(dict(user_id=fbl.fb_uid or 'random', chunk=json.dumps(chunk))), queue_name='keyword-search', )
def load_users(user_ids, allow_cache=True, **kwargs): allow_cache_arg = (allow_cache and '1' or '0') for fb_uid in user_ids: taskqueue.add(method='GET', url='/tasks/load_users?' + urls.urlencode( dict(user_id=fb_uid, user_ids=fb_uid, allow_cache=allow_cache_arg)), **kwargs)
def search_fb(fbl, style_name): chunks = get_chunks(style_name) for chunk in chunks: taskqueue.add( method='GET', url='/tools/search_fb_for_events_for_chunk?' + urls.urlencode( dict(user_id=fbl.fb_uid or 'random', chunk=json.dumps(chunk))), queue_name='keyword-search', )
def create_user_with_fbuser(fb_uid, fb_user, access_token, access_token_expires, location, ip, send_email=True, referer=None, client=None, send_new_user_email=True): user = users.User(id=fb_uid) user.ip = ip user.fb_access_token = access_token user.fb_access_token_expires = access_token_expires user.expired_oauth_token = False user.expired_oauth_token_reason = None user.location = location # grab the cookie to figure out who referred this user logging.info("Referer was: %s", referer) if referer: #STR_ID_MIGRATE user.inviting_fb_uid = long(referer) user.clients = [client] user.send_email = send_email user.distance = '50' user.distance_units = 'miles' user.min_attendees = 0 user.creation_time = datetime.datetime.now() user.login_count = 1 user.last_login_time = user.creation_time user.compute_derived_properties(fb_user) logging.info("Saving user with name %s", user.full_name) user.put() logging.info("Requesting background load of user's friends") # Must occur after User is put with fb_access_token taskqueue.add(method='GET', url='/tasks/track_newuser_friends?' + urls.urlencode({'user_id': fb_uid}), queue_name='slow-queue') # Now load their potential events, to make "add event page" faster (and let us process/scrape their events) backgrounder.load_potential_events_for_users([fb_uid]) fbl = fb_api.FBLookup(fb_uid, user.fb_access_token) if send_new_user_email: try: new_user_email.email_for_user(user, fbl, should_send=True) except new_user_email.NoEmailException as e: logging.info('Not sending new-user email due to: %s', e) return user
def load_sources(fb_source_ids, allow_cache=True, fb_uid='random', **kwargs): task_size = 10 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_source_ids), task_size): taskqueue.add( method='GET', url='/sources/scrape?' + urls.urlencode(dict(user_id=fb_uid, source_ids=','.join(fb_source_ids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs )
def load_events(fb_event_ids, allow_cache=True, fb_uid='701004', **kwargs): task_size = 10 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_event_ids), task_size): taskqueue.add( method='GET', url='/tasks/load_events?' + urls.urlencode(dict(user_id=fb_uid, event_ids=','.join(fb_event_ids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs )
def load_events(fb_event_ids, allow_cache=True, fb_uid='701004', **kwargs): task_size = 10 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_event_ids), task_size): taskqueue.add( method='GET', url='/tasks/load_events?' + urls.urlencode( dict(user_id=fb_uid, event_ids=','.join(fb_event_ids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs)
def load_sources(fb_source_ids, allow_cache=True, fb_uid='random', **kwargs): task_size = 10 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_source_ids), task_size): taskqueue.add( method='GET', url='/sources/scrape?' + urls.urlencode( dict(user_id=fb_uid, source_ids=','.join(fb_source_ids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs)
def url(cls, path, fields=None, **kwargs): if path is None: raise ValueError('Must pass non-empty path argument') if fields: if isinstance(fields, basestring): raise ValueError('Must pass in a list to fields: %r' % fields) kwargs['fields'] = ','.join(fields) if kwargs: delimiter = '&' if '?' in path else '?' return '/%s/%s%s%s' % (cls.version, path, delimiter, urls.urlencode(kwargs)) else: return '/%s/%s' % (cls.version, path)
def load_potential_events_for_users(fb_uids, allow_cache=True, **kwargs): #OPT: support more than one fbuser context per request in BaseTaskFacebookRequestHandler.initialize() task_size = 1 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_uids), task_size): taskqueue.add(method='GET', url='/tasks/load_potential_events_for_user?' + urls.urlencode( dict(user_id=','.join(fb_uids[i:i + task_size]), user_ids=','.join(fb_uids[i:i + task_size]), allow_cache=allow_cache_arg)), queue_name='slow-queue', **kwargs)
def load_potential_events_for_users(fb_uids, allow_cache=True, **kwargs): #OPT: support more than one fbuser context per request in BaseTaskFacebookRequestHandler.initialize() task_size = 1 allow_cache_arg = (allow_cache and '1' or '0') for i in range(0, len(fb_uids), task_size): taskqueue.add( method='GET', url='/tasks/load_potential_events_for_user?' + urls.urlencode( dict(user_id=','.join(fb_uids[i:i + task_size]), user_ids=','.join(fb_uids[i:i + task_size]), allow_cache=allow_cache_arg) ), queue_name='slow-queue', **kwargs )
def setup_reminders(fb_uid, fb_user_events): event_results_json = fb_user_events['events']['data'] event_ids = [x['id'] for x in event_results_json] existing_events = [ x.string_id() for x in eventdata.DBEvent.get_by_ids(event_ids, keys_only=True) if x ] logging.info("For user %s's %s events, %s are real dance events", fb_uid, len(event_ids), len(existing_events)) for event in event_results_json: if event['id'] not in existing_events: continue logging.info("%s is dance event, checking dates..", event['id']) start_time = parser.parse(event['start_time']) # Otherwise it's at a specific time (we need the time with the timezone info included) # Also try to get it ten minutes before the Facebook event comes in, so that we aren't seen as the "dupe". start_notify_window = start_time - datetime.timedelta(hours=1, minutes=10) # I think 30 days is the limit for appengine tasks with ETA set, but it gets trickier with all the timezones. # And really, we run this code multiple times a day, so don't need to plan out more than a day or two. now = datetime.datetime.now(start_time.tzinfo) future_cutoff = now + datetime.timedelta(days=2) # Any time after start_notify_window, we could have sent a notification. # If we try to re-add the taskqueue after that timestamp has passed, # we may re-add a task that has already completed, and re-notify a user. # Double-check in our logs that this does not occur....(it shouldn't!) end_notify_window = start_notify_window # Ignore events that started in the past if end_notify_window < now: continue # And ignore events that are too far in the future to care about yet if start_notify_window > future_cutoff: continue logging.info("For event %s, sending notifications at %s", event['id'], start_notify_window) try: taskqueue.add( method='GET', name='notify_user-%s-%s' % (fb_uid, event['id']), queue_name='mobile-notify-queue', eta=start_notify_window, url='/tasks/remind_user?' + urls.urlencode(dict(user_id=fb_uid, event_id=event['id'])), ) except (taskqueue.TaskAlreadyExistsError, taskqueue.TombstonedTaskError) as e: logging.info("Error adding task: %s", e)
def search(**params): cached_result = _get_cache(params) if cached_result is not None: return json.loads(cached_result) new_params = params.copy() if 'type' not in new_params: raise ValueError('Most pass type= argument') new_params['access_token'] = get_config()['app_access_token'] result = urllib.urlopen('https://graph.facebook.com/v2.9/search?%s' % urls.urlencode(new_params)).read() _set_cache(params, result) return json.loads(result)
def _create_rpc_for_batch(self, batch_list, use_access_token): post_args = {'batch': json.dumps(batch_list)} token = self.random_access_token() if use_access_token and token: post_args["access_token"] = token else: post_args["access_token"] = '%s|%s' % (facebook.FACEBOOK_CONFIG['app_id'], facebook.FACEBOOK_CONFIG['secret_key']) post_args["include_headers"] = False # Don't need to see all the caching headers per response post_data = None if post_args is None else urls.urlencode(post_args) try: rpc = urlfetch.create_rpc(deadline=DEADLINE) urlfetch.make_fetch_call(rpc, "https://graph.facebook.com/", post_data, "POST") except AssertionError: rpc = urllib2.urlopen("https://graph.facebook.com/", data=post_data, timeout=DEADLINE) self.fb_fetches += len(batch_list) return rpc, token
def create_user_with_fbuser( fb_uid, fb_user, access_token, access_token_expires, location, ip, send_email=True, referer=None, client=None, send_new_user_email=True ): user = users.User(id=fb_uid) user.ip = ip user.fb_access_token = access_token user.fb_access_token_expires = access_token_expires user.expired_oauth_token = False user.expired_oauth_token_reason = None user.location = location # grab the cookie to figure out who referred this user logging.info("Referer was: %s", referer) if referer: #STR_ID_MIGRATE user.inviting_fb_uid = long(referer) user.clients = [client] user.send_email = send_email user.distance = '50' user.distance_units = 'miles' user.min_attendees = 0 user.creation_time = datetime.datetime.now() user.login_count = 1 user.last_login_time = user.creation_time user.compute_derived_properties(fb_user) logging.info("Saving user with name %s", user.full_name) user.put() logging.info("Requesting background load of user's friends") # Must occur after User is put with fb_access_token taskqueue.add(method='GET', url='/tasks/track_newuser_friends?' + urls.urlencode({'user_id': fb_uid}), queue_name='slow-queue') # Now load their potential events, to make "add event page" faster (and let us process/scrape their events) backgrounder.load_potential_events_for_users([fb_uid]) fbl = fb_api.FBLookup(fb_uid, user.fb_access_token) if send_new_user_email: try: new_user_email.email_for_user(user, fbl, should_send=True) except new_user_email.NoEmailException as e: logging.info('Not sending new-user email due to: %s', e) return user
def check_language(text): if len(text) > MAX_LENGTH: logging.info("trimming text from %s to %s", len(text), MAX_LENGTH) text = text[:MAX_LENGTH] base_url = 'https://www.googleapis.com/language/translate/v2/detect' params = {'key': API_KEY, 'q': text} form_data = urls.urlencode(params) if urllib2_fallback: request = urllib2.Request(base_url, form_data, {'X-HTTP-Method-Override': 'GET'}) response_content = urllib2.urlopen(request).read() else: result = urlfetch.fetch(url=base_url, payload=form_data, method=urlfetch.POST, headers={'X-HTTP-Method-Override': 'GET'}) if result.status_code != 200: error = "result status code is %s for content %s" % (result.status_code, result.content) logging.error(error) raise Exception("Error in translation: %s" % error) response_content = result.content json_content = json.loads(response_content) real_results = json_content['data']['detections'][0][0] logging.info("text classification returned %s", real_results) if real_results['confidence'] > 0.10: return real_results['language'] else: return None
def fql_url(cls, fql): return "/%s/fql?%s" % (cls.version, urls.urlencode(dict(q=fql)))