def runTest(self): self.assertEqual('US', gmaps_api.lookup_address('San Francisco').country()) self.assertEqual('JP', gmaps_api.lookup_address('Tokyo').country()) # Really long byte-string self.assertEqual( 'RU', gmaps_api.lookup_address( u"г.Сочи , ул.Навагинская 9 / 3 этаж...Молодёжный Творческий Центр им.Артура Тумасяна, Творческий Клуб \" Чип Бар \"" ).country() )
def compute_derived_properties(self, fb_source_common, fb_source_data): if fb_source_common['empty']: # only update these when we have feed data self.fb_info = {} else: self.fb_info = fb_source_data['info'] # LookupThing* (and all fb_info dependencies). Only used for /search_pages functionality self.graph_type = _type_for_fb_source(fb_source_common) if 'name' not in fb_source_common['info']: logging.error('cannot find name for fb event data: %s, cannot update source data...', fb_source_common) return self.name = fb_source_common['info']['name'] self.emails = fb_source_data['info'].get('emails', []) if not self.emails: pass # TODO: trigger basic crawl of website to search for emails feed = fb_source_common['feed']['data'] if len(feed): dt = datetime.datetime.strptime(feed[-1]['created_time'], '%Y-%m-%dT%H:%M:%S+0000') td = datetime.datetime.now() - dt total_seconds = td.seconds + td.days * 24 * 3600 self.feed_history_in_seconds = total_seconds #logging.info('feed time delta is %s', self.feed_history_in_seconds) else: self.feed_history_in_seconds = 0 location = fb_source_data['info'].get('location') if location: if location.get('latitude'): self.latitude = float(location.get('latitude')) self.longitude = float(location.get('longitude')) else: component_names = ['street', 'city', 'state', 'zip', 'region', 'country'] components = [location.get(x) for x in component_names if location.get(x)] address = ', '.join(components) geocode = gmaps_api.lookup_address(address) if geocode: self.latitude, self.longitude = geocode.latlng()
def runTest(self): for address, final_address in formatting_reg_data.iteritems(): logging.info('%s should be formatted as %s', address, final_address) formatted_address = formatting.format_geocode(gmaps_api.lookup_address(address), include_neighborhood=True) if formatted_address != final_address: logging.error('formatted address for %r is %r, should be %r', address, formatted_address, final_address) logging.error('%s', gmaps_api._fetch_geocode_as_json(address=address)) self.assertEqual(final_address, formatted_address)
def runTest(self): for addresses, reformatted_addresses in grouping_lists: logging.info("Formatting addresses: %s", addresses) logging.info("Intended reformatted addresses: %r", reformatted_addresses) geocodes = [gmaps_api.lookup_address(address) for address in addresses] reformatted_parts = formatting.format_geocodes(geocodes, include_neighborhood=True) logging.info("Reformatted addresses: %r", reformatted_parts) self.assertEqual(reformatted_parts, reformatted_addresses)
def runTest(self): for addresses, reformatted_addresses in grouping_lists: logging.info("Formatting addresses: %s", addresses) logging.info("Intended reformatted addresses: %r", reformatted_addresses) geocodes = [ gmaps_api.lookup_address(address) for address in addresses ] reformatted_parts = formatting.format_geocodes( geocodes, include_neighborhood=True) logging.info("Reformatted addresses: %r", reformatted_parts) self.assertEqual(reformatted_parts, reformatted_addresses)
def runTest(self): for address, final_address in formatting_reg_data.iteritems(): logging.info('%s should be formatted as %s', address, final_address) formatted_address = formatting.format_geocode( gmaps_api.lookup_address(address), include_neighborhood=True) if formatted_address != final_address: logging.error('formatted address for %r is %r, should be %r', address, formatted_address, final_address) logging.error( '%s', gmaps_api._fetch_geocode_as_json(address=address)) self.assertEqual(final_address, formatted_address)
def preput(self): self.location_country = None self.geoname_id = None self.city_name = None if self.location: geocode = gmaps_api.lookup_address(self.location) if geocode: self.location_country = geocode.country() city = cities_db.get_nearby_city(geocode.latlng(), country=geocode.country()) if city: self.geoname_id = city.geoname_id self.city_name = city.display_name()
def build_query(self, start_end_query=False): bounds = None country_code = None if self.location.data: geocode = gmaps_api.lookup_address(self.location.data, language=self.locale.data) if geocode.is_country_geocode(): country_code = geocode.country() else: bounds = math.expand_bounds(geocode.latlng_bounds(), self.distance_in_km()) keywords = _get_parsed_keywords(self.keywords.data) common_fields = dict( bounds=bounds, min_attendees=self.min_attendees.data, min_worth=self.min_worth.data, keywords=keywords, country_code=country_code ) query = SearchQuery(start_date=self.start.data, end_date=self.end.data, **common_fields) return query
def build_query(self, start_end_query=False): bounds = None country_code = None if self.location.data: geocode = gmaps_api.lookup_address(self.location.data, language=self.locale.data) if geocode.is_country_geocode(): country_code = geocode.country() else: bounds = math.expand_bounds(geocode.latlng_bounds(), self.distance_in_km()) keywords = _get_parsed_keywords(self.keywords.data) common_fields = dict(bounds=bounds, min_attendees=self.min_attendees.data, min_worth=self.min_worth.data, keywords=keywords, country_code=country_code) query = SearchQuery(start_date=self.start.data, end_date=self.end.data, **common_fields) return query
def promote_events_to_user(user): # TODO: Adjust when we have iphone notifications if not android.can_notify(user): return logging.info("Promoting new events to user %s", user.fb_uid) # Only send notifications for Mike for now user = users.User.get_by_id(user.fb_uid) if not user: logging.error("No user found: %s", user.fb_uid) return if user.expired_oauth_token: logging.info("User has expired token, aborting: %s", user.fb_uid) return user_location = user.location if not user_location: return distance_in_km = user.distance_in_km() min_attendees = user.min_attendees # search for relevant events geocode = gmaps_api.lookup_address(user_location) if not geocode: return None bounds = math.expand_bounds(geocode.latlng_bounds(), distance_in_km) query = search_base.SearchQuery(time_period=search_base.TIME_UPCOMING, bounds=bounds, min_attendees=min_attendees) one_day_ago = time.mktime((datetime.datetime.now() - datetime.timedelta(hours=24)).timetuple()) search_query = search.Search(query) search_query.extra_fields = ['creation_time'] search_results = search_query._get_candidate_doc_events() # TODO: can we move this filter into the search query itself?? recent_events = [x.doc_id for x in search_results if x.field('creation_time').value > one_day_ago] logging.info("Found %s search_results, %s new events", len(search_results), len(recent_events)) for event_id in recent_events: if android.add_notify(user, event_id): logging.info("Sent notification!")
def get(self): event_id = None if self.request.get('event_url'): event_id = urls.get_event_id_from_url( self.request.get('event_url')) elif self.request.get('event_id'): event_id = self.request.get('event_id') self.finish_preload() fb_event = get_fb_event(self.fbl, event_id) if not fb_event: logging.error('No fetched data for %s, showing error page', event_id) return self.show_barebones_page(event_id, "No fetched data") e = eventdata.DBEvent.get_by_id(event_id) if not fb_events.is_public_ish(fb_event): if e: fb_event = e.fb_event else: self.add_error( 'Cannot add secret/closed events to dancedeets!') self.errors_are_fatal() owner_location = None if 'owner' in fb_event['info']: owner_id = fb_event['info']['owner']['id'] location = self._get_location(owner_id, fb_api.LookupProfile, 'profile') or self._get_location( owner_id, fb_api.LookupThingPage, 'info') if location: owner_location = event_locations.city_for_fb_location(location) self.display['owner_location'] = owner_location display_event = search.DisplayEvent.get_by_id(event_id) # Don't insert object until we're ready to save it... if e and e.creating_fb_uid: #STR_ID_MIGRATE creating_user = self.fbl.get(fb_api.LookupProfile, str(e.creating_fb_uid)) if creating_user.get('empty'): logging.warning( 'Have creating-user %s...but it is not publicly visible, so treating as None: %s', e.creating_fb_uid, creating_user) creating_user = None else: creating_user = None potential_event = potential_events.make_potential_event_without_source( event_id) a = time.time() classified_event = event_classifier.get_classified_event( fb_event, potential_event.language) timelog.log_time_since('Running BasicText Classifier', a) self.display['classified_event'] = classified_event dance_words_str = ', '.join(list(classified_event.dance_matches())) if classified_event.is_dance_event(): event_words_str = ', '.join(list(classified_event.event_matches())) else: event_words_str = 'NONE' self.display['classifier_dance_words'] = dance_words_str self.display['classifier_event_words'] = event_words_str self.display['creating_user'] = creating_user self.display['potential_event'] = potential_event self.display['display_event'] = display_event start = time.time() add_result = event_auto_classifier.is_auto_add_event(classified_event) notadd_result = event_auto_classifier.is_auto_notadd_event( classified_event, auto_add_result=add_result) timelog.log_time_since('Running Text Classifier', start) auto_classified = '' if add_result.is_good_event(): auto_classified += 'add: %s.\n' % add_result if notadd_result[0]: auto_classified += 'notadd: %s.\n' % notadd_result[1] self.display['auto_classified_add'] = add_result self.display['auto_classified_notadd'] = notadd_result styles = categories.find_styles(fb_event) event_types = styles + categories.find_event_types(fb_event) self.display['auto_categorized_types'] = ', '.join( x.public_name for x in event_types) a = time.time() fb_event_attending_maybe = get_fb_event( self.fbl, event_id, lookup_type=fb_api.LookupEventAttendingMaybe) timelog.log_time_since('Loading FB Event Attending Data', a) a = time.time() location_info = event_locations.LocationInfo( fb_event, fb_event_attending_maybe=fb_event_attending_maybe, db_event=e, debug=True) self.display['location_info'] = location_info if location_info.fb_address: fb_geocode = gmaps_api.lookup_address(location_info.fb_address) self.display['fb_geocoded_address'] = formatting.format_geocode( fb_geocode) else: self.display['fb_geocoded_address'] = '' city_name = 'Unknown' if location_info.geocode: city = cities_db.get_nearby_city( location_info.geocode.latlng(), country=location_info.geocode.country()) if city: city_name = city.display_name() self.display['ranking_city_name'] = city_name person_ids = fb_events.get_event_attendee_ids(fb_event_attending_maybe) if location_info.geocode: data = person_city.get_data_fields(person_ids, location_info.geocode.latlng()) self.display['attendee_distance_info'] = data else: self.display['attendee_distance_info'] = 'Unknown' matcher = event_attendee_classifier.get_matcher( self.fbl, fb_event, fb_event_attending_maybe=fb_event_attending_maybe, classified_event=classified_event) timelog.log_time_since('Running Attendee Classifier', a) # print '\n'.join(matcher.results) sorted_matches = sorted(matcher.matches, key=lambda x: -len(x.overlap_ids)) matched_overlap_ids = sorted_matches[ 0].overlap_ids if matcher.matches else [] self.display['auto_add_attendee_ids'] = sorted(matched_overlap_ids) self.display['overlap_results'] = [ '%s %s: %s' % (x.top_n, x.name, x.reason) for x in sorted_matches ] self.display['overlap_attendee_ids'] = sorted(matcher.overlap_ids) if matcher.matches: attendee_ids_to_admin_hash_and_event_ids = sorted_matches[ 0].get_attendee_lookups() self.display[ 'attendee_ids_to_admin_hash_and_event_ids'] = attendee_ids_to_admin_hash_and_event_ids self.display['event'] = e self.display['event_id'] = event_id self.display['fb_event'] = fb_event self.jinja_env.filters[ 'highlight_keywords'] = event_classifier.highlight_keywords self.display['track_analytics'] = False self.render_template('admin_edit')
def build_search_results_api( form, search_query, search_results, version, need_full_event, geocode, distance, partial_results=False, skip_people=False ): if geocode: center_latlng, southwest, northeast = search_base.get_center_and_bounds(geocode, distance) else: southwest = None northeast = None onebox_links = [] if search_query: onebox_links = onebox.get_links_for_query(search_query) json_results = [] if search_results: effective_end_date = search_results[-1].start_time.date() if partial_results else search_query.end_date for result in search_results: try: if need_full_event: json_result = canonicalize_event_data(result.db_event, version, event_keywords=result.event_keywords) else: json_result = canonicalize_search_event_data(result, version) # Make sure we trim the event_times to the search window if json_result['event_times']: event_times = [] for event_time in json_result['event_times']: if search_query.start_date: end_date = _parse_fb_time(event_time.get('end_time', event_time['start_time'])) if search_query.start_date > end_date: continue if effective_end_date: start_date = _parse_fb_time(event_time['start_time']) if effective_end_date < start_date: continue event_times.append(event_time) json_result['event_times'] = event_times json_results.append(json_result) except Exception as e: logging.exception("Error processing event %s: %s" % (result.event_id, e)) real_featured_infos = [] try: featured_infos = featured.get_featured_events_for(southwest, northeast) for featured_info in featured_infos: try: featured_info['event'] = canonicalize_event_data(featured_info['event'], version) if featured_info.get('manualImage'): featured_info['event']['picture']['source'] = featured_info['event']['picture']['source'] + '/manual' featured_info['event']['picture']['width'] = 1656 featured_info['event']['picture']['height'] = 630 real_featured_infos.append(featured_info) except Exception as e: logging.exception("Error processing event %s: %s" % (result.event_id, e)) except Exception as e: logging.exception("Error building featured event listing: %s", e) groupings = people_groupings(geocode, distance, skip_people=skip_people) query = {} if form: for field in form: query[field.name] = getattr(field, '_value', lambda: field.data)() if geocode: address_geocode = gmaps_api.lookup_address(geocode.formatted_address()) else: address_geocode = None json_response = { 'results': json_results, 'onebox_links': onebox_links, 'location': geocode.formatted_address() if geocode else None, 'address': address.get_address_from_geocode(address_geocode), 'query': query, } if groupings is not None: json_response['people'] = groupings if version <= (1, 3): json_response['featured'] = [x['event'] for x in real_featured_infos] else: json_response['featuredInfos'] = real_featured_infos if southwest and northeast: json_response['location_box'] = { 'southwest': { 'latitude': southwest[0], 'longitude': southwest[1], }, 'northeast': { 'latitude': northeast[0], 'longitude': northeast[1], }, } return json_response
def _geocodable_location(form, field): if field.data: geocode = gmaps_api.lookup_address(field.data) if not geocode: raise wtforms.ValidationError("Did not understand location: %s" % field.data)
def update_mailchimp(user): ctx = context.get() mailchimp_list_id = -1 if ctx: params = ctx.mapreduce_spec.mapper.params mailchimp_list_id = params.get('mailchimp_list_id', mailchimp_list_id) if mailchimp_list_id == -1: mailchimp_list_id = mailchimp_api.LIST_ID trimmed_locale = user.locale or '' if '_' in trimmed_locale: trimmed_locale = trimmed_locale.split('_')[0] if not user.email: mr.increment('mailchimp-error-no-email') logging.info('No email for user %s: %s', user.fb_uid, user.full_name) return if user.mailchimp_email != user.email: # When some old users are saved, their mailchimp email will be None, # so we don't really need to worry about them here. logging.info('Updating user email to %s with old mailchimp email %s', user.email, user.mailchimp_email) if user.mailchimp_email != None: mr.increment('mailchimp-update-email-error-response') try: user_data = mailchimp_api.update_email(mailchimp_api.LIST_ID, user.mailchimp_email, user.email) except mailchimp_api.UserNotFound: mr.increment('mailchimp-update-email-error-not-found') logging.error( 'Updating user %s email to mailchimp, returned not found', user.fb_uid) else: logging.info('Result: %s', user_data) if user_data['email_address'] == user.email: logging.info( 'Updating user %s email to mailchimp, returned OK', user.fb_uid) else: mr.increment('mailchimp-update-email-error-response') logging.error( 'Updating user %s email to mailchimp, returned %s', user.fb_uid, user_data) # Mark our current mailchimp_email down, so we can update it properly later if desired. user.mailchimp_email = user.email # Now that Mailchimp knows about our new user email, # we can update/reference it using the normal add_members() below. member = { 'email_address': user.email, # Mailchimp is the official store of 'are they subscribed', so let's not overwrite it here 'status_if_new': 'subscribed', 'language': trimmed_locale, 'merge_fields': { 'USER_ID': user. fb_uid, # necessary so we can update our local datastore on callbacks 'FIRSTNAME': user.first_name or '', 'LASTNAME': user.last_name or '', 'FULLNAME': user.full_name or '', 'NAME': user.first_name or user.full_name or '', 'WEEKLY': unicode(user.send_email), 'EXPIRED': unicode(user.expired_oauth_token), 'LASTLOGIN': user.last_login_time.strftime('%Y-%m-%d') if user.last_login_time else '', }, 'timestamp_signup': user.creation_time.strftime('%Y-%m-%dT%H:%M:%S'), 'timestamp_opt': user.creation_time.strftime('%Y-%m-%dT%H:%M:%S'), } if user.location: geocode = gmaps_api.lookup_address(user.location) if geocode: user_latlong = geocode.latlng() member['location'] = { 'latitude': user_latlong[0], 'longitude': user_latlong[1], } else: logging.warning('User %s (%s) had un-geocodable address: %s', user.fb_uid, user.full_name, user.location) if not runtime.is_prod_appengine(): mr.increment('mailchimp-api-call') result = mailchimp_api.add_members(mailchimp_list_id, [member]) if result['errors']: mr.increment('mailchimp-error-response') logging.error( 'Writing user %s to mailchimp returned %s on input: %s', user.fb_uid, result['errors'], member) else: logging.info('Writing user %s to mailchimp returned OK', user.fb_uid)
def update_mailchimp(user): ctx = context.get() mailchimp_list_id = -1 if ctx: params = ctx.mapreduce_spec.mapper.params mailchimp_list_id = params.get('mailchimp_list_id', mailchimp_list_id) if mailchimp_list_id == -1: mailchimp_list_id = mailchimp_api.LIST_ID trimmed_locale = user.locale or '' if '_' in trimmed_locale: trimmed_locale = trimmed_locale.split('_')[0] if not user.email: mr.increment('mailchimp-error-no-email') logging.info('No email for user %s: %s', user.fb_uid, user.full_name) return if user.mailchimp_email != user.email: # When some old users are saved, their mailchimp email will be None, # so we don't really need to worry about them here. logging.info('Updating user email to %s with old mailchimp email %s', user.email, user.mailchimp_email) if user.mailchimp_email != None: mr.increment('mailchimp-update-email-error-response') try: user_data = mailchimp_api.update_email(mailchimp_api.LIST_ID, user.mailchimp_email, user.email) except mailchimp_api.UserNotFound: mr.increment('mailchimp-update-email-error-not-found') logging.error('Updating user %s email to mailchimp, returned not found', user.fb_uid) else: logging.info('Result: %s', user_data) if user_data['email_address'] == user.email: logging.info('Updating user %s email to mailchimp, returned OK', user.fb_uid) else: mr.increment('mailchimp-update-email-error-response') logging.error('Updating user %s email to mailchimp, returned %s', user.fb_uid, user_data) # Mark our current mailchimp_email down, so we can update it properly later if desired. user.mailchimp_email = user.email # Now that Mailchimp knows about our new user email, # we can update/reference it using the normal add_members() below. member = { 'email_address': user.email, # Mailchimp is the official store of 'are they subscribed', so let's not overwrite it here 'status_if_new': 'subscribed', 'language': trimmed_locale, 'merge_fields': { 'USER_ID': user.fb_uid, # necessary so we can update our local datastore on callbacks 'FIRSTNAME': user.first_name or '', 'LASTNAME': user.last_name or '', 'FULLNAME': user.full_name or '', 'NAME': user.first_name or user.full_name or '', 'WEEKLY': unicode(user.send_email), 'EXPIRED': unicode(user.expired_oauth_token), 'LASTLOGIN': user.last_login_time.strftime('%Y-%m-%d') if user.last_login_time else '', }, 'timestamp_signup': user.creation_time.strftime('%Y-%m-%dT%H:%M:%S'), 'timestamp_opt': user.creation_time.strftime('%Y-%m-%dT%H:%M:%S'), } if user.location: geocode = gmaps_api.lookup_address(user.location) if geocode: user_latlong = geocode.latlng() member['location'] = { 'latitude': user_latlong[0], 'longitude': user_latlong[1], } else: logging.warning('User %s (%s) had un-geocodable address: %s', user.fb_uid, user.full_name, user.location) if not runtime.is_prod_appengine(): mr.increment('mailchimp-api-call') result = mailchimp_api.add_members(mailchimp_list_id, [member]) if result['errors']: mr.increment('mailchimp-error-response') logging.error('Writing user %s to mailchimp returned %s on input: %s', user.fb_uid, result['errors'], member) else: logging.info('Writing user %s to mailchimp returned OK', user.fb_uid)