def test_json(self): obj = json_serializer.load_string(self.string) self.assertEqual(obj['hello'], 'world') self.assertEqual(obj['int'], 1234) result = json_serializer.dump_string(obj) self.assertEqual(self.string, result)
def get_photo_album_detail_fragment(db_session, album_se, author): # select the first 3 photos in the album for the cover art # TODO complete cover_photos = [] facebook_service_id = data_access.service.name_to_id('facebook') if album_se.service_id == facebook_service_id: json_obj = load_string(album_se.json) # get the cover photo if it exists otherwise get top photos photo_id = json_obj.get('cover_photo') if photo_id: try: photo_se = db_session.query(ServiceEvent). \ filter(and_(ServiceEvent.service_id == facebook_service_id, ServiceEvent.event_id == photo_id)). \ one() photo = get_photo_detail_fragment(photo_se) if photo: cover_photos.append(photo) except NoResultFound: pass else: cover_photos = _get_top_album_photos(db_session, album_se) else: cover_photos = _get_top_album_photos(db_session, album_se) return {'photo_count': _get_album_count(db_session, album_se), 'cover_photos': cover_photos}
def fromJSONFields(self, fromJSON, auxData=None): super(LinkedInConnectEvent, self).fromJSONFields(fromJSON, auxData) # query for the connection's profile image and bio connection = self.raw_json['updateContent']['person']['connections']['values'][0] if connection['id'] != 'private': url = 'http://api.linkedin.com/v1/people/id=%s:(headline,summary,picture-url)' % connection['id'] try: # request the user's updates contentJSON = make_request(auxData, url, {'x-li-format': 'json'}) contentObj = json_serializer.load_string(contentJSON) self.headline = contentObj.get('headline') self.summary = contentObj.get('summary') self.photo = contentObj.get('pictureUrl') except urllib2.URLError: self.log.error('***ERROR*** parse error') self.log.error(contentJSON) pass return self
def fetch(self, service_id, service_author_id, service_event_id, callback): asm = self.get_author_service_map(service_author_id) # TODO - temporary until we figure out a better solution for # not over-driving Twitter with un-authenticated events if not asm.access_token: return if asm.access_token: consumer = oauth.Consumer(self.oauth_config['key'], self.oauth_config['secret']) token = oauth.Token(asm.access_token, asm.access_token_secret) client = oauth.Client(consumer, token) args = {'id': service_event_id, 'include_entities': '1', 'trim_user': '******'} # if not authenticated provide the user_id query arg if not asm.access_token: args['user_id'] = asm.service_author_id url = TWEET_STATUS % (self.oauth_config['endpoint'], urllib.urlencode(args)) # TODO - remove the try/except once figure out a better solution for not # exceeding Twitter's rate limits try: json_obj = json_serializer.load_string(make_request(client, url)) if asm.access_token \ else json_serializer.load(urllib2.urlopen(url)) except urllib2.URLError, e: logging.error('ERROR REQUEST URL: {0}'.format(url)) logging.error('ERROR REASON: {0}, {1}'.format(e.code, e.read())) raise
def post_facebook_feed(request): facebook_notification = json_serializer.load_string(request.body) events = convert_facebook_notification(facebook_notification) send_messages(request.message_client, events) return Response()
def test_correlate_twitter_event(self): event = '{"created_at":"Mon Jun 25 02:00:33 +0000 2012","id":217074771807059968,"id_str":"217074771807059968","text":"Second Time Around, Hope for Gay Marriage in Maine http:\/\/t.co\/YFT32DLX","source":"web","truncated":false,"in_reply_to_status_id":null,"in_reply_to_status_id_str":null,"in_reply_to_user_id":null,"in_reply_to_user_id_str":null,"in_reply_to_screen_name":null,"user":{"id":807095,"id_str":"807095","name":"The New York Times","screen_name":"nytimes","location":"New York, NY","description":"Where the Conversation Begins. Follow for breaking news, NYTimes.com home page articles, special features and RTs of our journalists. ","url":"http:\/\/www.nytimes.com\/","protected":false,"followers_count":5414039,"friends_count":637,"listed_count":104350,"created_at":"Fri Mar 02 20:41:42 +0000 2007","favourites_count":3,"utc_offset":-18000,"time_zone":"Eastern Time (US & Canada)","geo_enabled":false,"verified":true,"statuses_count":85325,"lang":"en","contributors_enabled":true,"is_translator":false,"profile_background_color":"FFFFFF","profile_background_image_url":"http:\/\/a0.twimg.com\/profile_background_images\/4432187\/twitter_post.png","profile_background_image_url_https":"https:\/\/si0.twimg.com\/profile_background_images\/4432187\/twitter_post.png","profile_background_tile":true,"profile_image_url":"http:\/\/a0.twimg.com\/profile_images\/2044921128\/finals_normal.png","profile_image_url_https":"https:\/\/si0.twimg.com\/profile_images\/2044921128\/finals_normal.png","profile_link_color":"004276","profile_sidebar_border_color":"323232","profile_sidebar_fill_color":"E7EFF8","profile_text_color":"000000","profile_use_background_image":true,"show_all_inline_media":false,"default_profile":false,"default_profile_image":false,"following":null,"follow_request_sent":null,"notifications":null},"geo":null,"coordinates":null,"place":null,"contributors":null,"retweet_count":92,"entities":{"hashtags":[],"urls":[{"url":"http:\/\/t.co\/YFT32DLX","expanded_url":"http:\/\/nyti.ms\/KHpcpD","display_url":"nyti.ms\/KHpcpD","indices":[51,71]}],"user_mentions":[]},"favorited":false,"retweeted":false,"possibly_sensitive":false}' event = json_serializer.load_string(event) interpreter = event_interpreter.create_event_interpreter("twitter", event, None, None) hash_id, uri = event_correlator._correlate_event(interpreter) self.assertEqual(hash_id, "UaMGQijlLKVWm-7OBQwUWv7pyakgAoHwvvTUvgtz7is") self.assertEqual( uri, "http://www.nytimes.com/2012/06/25/us/politics/second-time-around-hope-for-gay-marriage-in-maine.html" )
def fetch(self, service_id, service_author_id, service_event_id, callback): asm = self.get_author_service_map(service_author_id) consumer = oauth.Consumer(self.oauth_config['key'], self.oauth_config['secret']) token = oauth.Token(asm.access_token, asm.access_token_secret) client = oauth.Client(consumer, token) # check if this event isCommentable or isLikable event_json, = db.Session().query(ServiceEvent.json). \ filter(and_(ServiceEvent.author_service_map_id == asm.id, ServiceEvent.event_id == service_event_id)).one() event_obj = json_serializer.load_string(event_json) update_obj = None if event_obj.get("isCommentable", False): url = UPDATE_COMMENTS % (self.oauth_config['endpoint'], service_event_id) update_obj = json_serializer.load_string(make_request(client, url, {'x-li-format': 'json'})) likes_obj = None if event_obj.get("isLikable", False): url = UPDATE_LIKES % (self.oauth_config['endpoint'], service_event_id) likes_obj = json_serializer.load_string(make_request(client, url, {'x-li-format': 'json'})) # merge update and likes together into one object if update_obj or likes_obj: if update_obj: event_obj['updateComments'] = update_obj if likes_obj: event_obj['isLiked'] = likes_obj['_total'] > 0 event_obj['numLikes'] = likes_obj['_total'] event_obj['likes'] = likes_obj interpreter = LinkedinEventInterpreter(event_obj, asm, self.oauth_config) callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), event_obj))
def get_location_fragment(se): location_info = None if se.service_id == data_access.service.name_to_id('instagram') and se.json: json_obj = load_string(se.json) if 'location' in json_obj: # TODO this needs to be normalized to thisis.me's structure using the event interpreter location_info = json_obj['location'] return location_info
def get_photo_detail_fragment(se): size_ordered_images = {} if se.service_id == data_access.service.name_to_id('facebook'): json_obj = load_string(se.json) # for some reason not all facebook photo events have an image property; if # it doesn't skip it if 'images' not in json_obj: logging.warning('Skipping Facebook event with no images') return None for candidate in json_obj.get('images', []): size = candidate.get('width', 0) * candidate.get('height', 0) image = {'image_url': candidate['source'], 'width': candidate['width'], 'height': candidate['height']} size_ordered_images[size] = image elif se.service_id == data_access.service.name_to_id('instagram'): json_obj = load_string(se.json) for candidate in json_obj['images'].itervalues(): size = candidate.get('width', 0) * candidate.get('height', 0) image = {'image_url': candidate['url'], 'width': candidate['width'], 'height': candidate['height']} size_ordered_images[size] = image images = [] for size, image in sorted(size_ordered_images.iteritems(), key=lambda x: x[1]): images.append(image) return images
def _create_shared_services(correlated_events): sources = [] for service_event in correlated_events: service_event_interpreter = event_interpreter.create_event_interpreter( service.id_to_service[service_event.service_id].service_name, json_serializer.load_string(service_event.json), None, None) sources.append( {'service_name': service.id_to_service[service_event.service_id].service_name, 'event_id': service_event.id, 'service_event_id': service_event.event_id, 'service_event_url': service_event_interpreter.url(), 'author_id': service_event.author_id, 'service_id': service_event.service_id}) return sources
def init_connection(self): connection = self.json["updateContent"]["person"]["connections"]["values"][0] if connection["id"] != "private" and self._client is None: url = "%speople/id=%s:(headline,summary,picture-url)" % (self.oauth_config["endpoint"], connection["id"]) # request the user's updates json_str = make_request(self.get_oauth_client(), url, {"x-li-format": "json"}) json_obj = json_serializer.load_string(json_str) self.headline = json_obj.get("headline") self.summary = json_obj.get("summary") self.photo = json_obj.get("pictureUrl") else: self.headline = None self.summary = None self.photo = None
def test_twitter_event(self): event = '{"created_at":"Fri Jun 22 22:44:42 +0000 2012","id":216300709476433920,"id_str":"216300709476433920","text":"And a link: http:\/\/t.co\/6fgHbu70 http:\/\/t.co\/OVJDl4UA","source":"web","truncated":false,"in_reply_to_status_id":null,"in_reply_to_status_id_str":null,"in_reply_to_user_id":null,"in_reply_to_user_id_str":null,"in_reply_to_screen_name":null,"user":{"id":569626455,"id_str":"569626455","name":"Jos\u00e9 Garc\u00eda Sancio","screen_name":"jagsancio","location":"","description":"","url":null,"protected":false,"followers_count":1,"friends_count":8,"listed_count":0,"created_at":"Thu May 03 00:26:13 +0000 2012","favourites_count":0,"utc_offset":null,"time_zone":null,"geo_enabled":false,"verified":false,"statuses_count":5,"lang":"en","contributors_enabled":false,"is_translator":false,"profile_background_color":"C0DEED","profile_background_image_url":"http:\/\/a0.twimg.com\/images\/themes\/theme1\/bg.png","profile_background_image_url_https":"https:\/\/si0.twimg.com\/images\/themes\/theme1\/bg.png","profile_background_tile":false,"profile_image_url":"http:\/\/a0.twimg.com\/sticky\/default_profile_images\/default_profile_6_normal.png","profile_image_url_https":"https:\/\/si0.twimg.com\/sticky\/default_profile_images\/default_profile_6_normal.png","profile_link_color":"0084B4","profile_sidebar_border_color":"C0DEED","profile_sidebar_fill_color":"DDEEF6","profile_text_color":"333333","profile_use_background_image":true,"show_all_inline_media":false,"default_profile":true,"default_profile_image":true,"following":null,"follow_request_sent":null,"notifications":null},"geo":null,"coordinates":null,"place":null,"contributors":null,"retweet_count":0,"entities":{"hashtags":[],"urls":[{"url":"http:\/\/t.co\/6fgHbu70","expanded_url":"http:\/\/en.wikipedia.org\/wiki\/World","display_url":"en.wikipedia.org\/wiki\/World","indices":[12,32]}],"user_mentions":[],"media":[{"id":216300709480628224,"id_str":"216300709480628224","indices":[33,53],"media_url":"http:\/\/p.twimg.com\/AwB0WWlCAAAUkEj.jpg","media_url_https":"https:\/\/p.twimg.com\/AwB0WWlCAAAUkEj.jpg","url":"http:\/\/t.co\/OVJDl4UA","display_url":"pic.twitter.com\/OVJDl4UA","expanded_url":"http:\/\/twitter.com\/jagsancio\/status\/216300709476433920\/photo\/1","type":"photo","sizes":{"large":{"w":800,"h":450,"resize":"fit"},"small":{"w":340,"h":191,"resize":"fit"},"thumb":{"w":150,"h":150,"resize":"crop"},"medium":{"w":600,"h":337,"resize":"fit"}}}]},"favorited":false,"retweeted":false,"possibly_sensitive":false}' event = json_serializer.load_string(event) interpreted = event_interpreter.create_event_interpreter('twitter', event, self.author_service_map, self.oauth_config) self.assertEqual(interpreted.event_type(), models.ServiceObjectType.STATUS_TYPE) self.assertEqual(interpreted.event_id(), '216300709476433920') self.assertEqual(interpreted.created_time(), datetime.datetime(2012, 6, 22, 22, 44, 42)) self.assertEqual(interpreted.updated_time(), None) self.assertEqual(interpreted.headline(), None) self.assertEqual(interpreted.tagline(), 'And a link: http://t.co/6fgHbu70 http://t.co/OVJDl4UA') self.assertEqual(interpreted.content(), 'And a link: http://t.co/6fgHbu70 http://t.co/OVJDl4UA') self.assertEqual(interpreted.photo(), 'http://p.twimg.com/AwB0WWlCAAAUkEj.jpg') self.assertEqual(interpreted.url(), None) self.assertEqual(interpreted.auxiliary_content(), None) self.assertEqual(interpreted.origin(), 'web') self.assertEqual(interpreted.original_content_uri(), 'http://en.wikipedia.org/wiki/World')
def get_author_profile(self, service_author_id, asm): asm = self.fetch_begin(service_author_id, asm) args = {'user_id': asm.service_author_id, 'include_entities': True} # Create our OAuth consumer instance if asm.access_token: consumer = oauth.Consumer(self.oauth_config['key'], self.oauth_config['secret']) token = oauth.Token(key=asm.access_token, secret=asm.access_token_secret) client = oauth.Client(consumer, token) url = '%s%s?%s' % (self.oauth_config['endpoint'], USER_INFO, urllib.urlencode(args)) # request the user's profile json_obj = json_serializer.load_string(make_request(client, url)) if asm.access_token \ else json_serializer.load(urllib2.urlopen(url)) profile_json = {} if 'name' in json_obj: profile_json['name'] = json_obj['name'] if 'location' in json_obj: profile_json['location'] = json_obj['location'] if 'profile_image_url' in json_obj: profile_json['picture_url'] = json_obj['profile_image_url'] if 'description' in json_obj: profile_json['headline'] = json_obj['description'] profile_json['public_profile_url'] = 'https://twitter.com/#!/%s' % json_obj['screen_name'] return profile_json
def lineReceived(self, line): if line: try: self.handler.handle(json_serializer.load_string(line)) except: logging.exception('Error handling twitter event: %s', line)
def _analyze_correlated_events(uri, correlated_events): source_service_name = _service_name_from_uri(uri) source_service_object = service.name_to_service.get(source_service_name) shares = _create_shared_services(correlated_events) # for now lets use the "source" event to generate the json event = None source_event = None created_time = datetime.datetime.utcnow() modified_time = datetime.datetime(2000, 1, 1) # Lets see if we can find the original source found_source = False for service_event in correlated_events: # figure out the source event if source_service_object and service_event.service_id == source_service_object.id: source_event = service_event found_source = True elif not found_source: if source_event: source_priority = _priority[source_service_name] event_priority = _priority[service.id_to_service[service_event.service_id].service_name] if event_priority > source_priority: source_event = service_event source_service_name = service.id_to_service[service_event.service_id].service_name else: source_event = service_event source_service_name = service.id_to_service[service_event.service_id].service_name created_time = min(created_time, service_event.create_time) modified_time = max(modified_time, service_event.modify_time) if source_event: source_event_interpreter = event_interpreter.create_event_interpreter( source_service_name, json_serializer.load_string(source_event.json), None, None) if found_source: origin = {'type': 'known', 'known': {'event_id': source_event.id, 'service_event_id': source_event.event_id, 'service_event_url': source_event_interpreter.url(), 'service_name': source_service_name}} else: parsed_uri = urlparse.urlparse(uri) favicon_uri = urlparse.urlunparse(( parsed_uri[0], parsed_uri[1], 'favicon.ico', '', '', '')) origin = {'type': 'unknown', 'unknown': {'event_id': source_event.id, 'service_event_id': source_event.event_id, 'service_event_url': source_event_interpreter.url(), 'service_name': source_service_name, 'domain': parsed_uri.netloc, 'small_icon': favicon_uri, 'url': uri}} event = {'origin': origin, 'shares': shares} else: logging.error( 'Could not create correlation event for url: %s with: %s', uri, correlated_events) return (event, source_event, created_time, modified_time)
def fetch(self, service_author_id, callback): super(TwitterEventCollector, self).fetch(service_author_id, callback) state = self.fetch_begin(service_author_id) self.fetch_log_info(state) asm = state['asm'] args = {'include_rts': 1, 'include_entities': 1, 'trim_user': 1, 'count': 200} # use authenticated access if we can if asm.access_token: consumer = oauth.Consumer(self.oauth_config['key'], self.oauth_config['secret']) token = oauth.Token(asm.access_token, asm.access_token_secret) client = oauth.Client(consumer, token) else: args['user_id'] = asm.service_author_id if asm.most_recent_event_id: args['since_id'] = asm.most_recent_event_id # API endpoint for getting user timeline url = '%s%s?%s' % (self.oauth_config['endpoint'], USER_TIMELINE, urllib.urlencode(args)) min_age = datetime.utcnow() - self.NEW_LOOKBACK_WINDOW last_id = None while True: try: raw_json = json_serializer.load_string(make_request(client, url)) if asm.access_token \ else json_serializer.load(urllib2.urlopen(url)) except urllib2.URLError, e: logging.error('ERROR REQUEST URL: {0}'.format(url)) logging.error('ERROR REASON: {0}, {1}'.format(e.code, e.read())) raise # check if nothing returned and terminate loop if so if len(raw_json) == 0: break for post in raw_json: # process the item #print json.dumps(post, sort_keys=True, indent=2) interpreter = TwitterEventInterpreter(post, asm, self.oauth_config) last_id = interpreter.get_id() # terminate fetching any more events if we've gone beyond the lookback window if interpreter.get_create_time() < min_age: url = None break if self.screen_event(interpreter, state): callback(create_twitter_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post)) if not url: break # setup for the next page (if any) args['max_id'] = long(last_id) - 1 url = '%s%s?%s' % (self.oauth_config['endpoint'], USER_TIMELINE, urllib.urlencode(args))
def get_author_profile(self, service_author_id, asm): asm = self.fetch_begin(service_author_id, asm) # setup what we need for oauth consumer = oauth.Consumer(self.oauth_config['key'], self.oauth_config['secret']) if asm.access_token: token = oauth.Token(key=asm.access_token, secret=asm.access_token_secret) else: token = oauth.Token(self.oauth_config['user1_access_token'], self.oauth_config['user1_access_token_secret']) client = oauth.Client(consumer, token) url_path = self.PROFILE_INFO if asm.access_token \ else self.PUBLIC_PROFILE_INFO.format(urllib.quote(asm.service_author_id, '')) url = '%s%s' % (self.oauth_config['endpoint'], url_path) # request the user's profile json_obj = json_serializer.load_string(make_request(client, url, {'x-li-format': 'json'})) profile_json = {} firstName = lastName = '' if 'firstName' in json_obj: firstName = profile_json['first_name'] = json_obj['firstName'] if 'lastName' in json_obj: lastName = profile_json['last_name'] = json_obj['lastName'] # if we have a non-empty string add it to the json name = ('%s %s' % (firstName, lastName)).strip() if len(name) > 0: profile_json['name'] = name if 'industry' in json_obj: profile_json['industry'] = json_obj['industry'] if 'headline' in json_obj: profile_json['headline'] = json_obj['headline'] if 'pictureUrl' in json_obj: profile_json['picture_url'] = json_obj['pictureUrl'] if 'location' in json_obj and 'name' in json_obj['location']: profile_json['location'] = json_obj['location']['name'] if 'summary' in json_obj: profile_json['summary'] = json_obj['summary'] if 'specialties' in json_obj: profile_json['specialties'] = json_obj['specialties'] if 'publicProfileUrl' in json_obj: profile_json['public_profile_url'] = json_obj['publicProfileUrl'] if 'positions' in json_obj and 'values' in json_obj['positions']: positions = [] for position in json_obj['positions']['values']: position_json = {} if 'company' in position: if 'name' in position['company']: position_json['company'] = position['company']['name'] if 'industry' in position['company']: position_json['industry'] = position['company']['industry'] if 'summary' in position: position_json['summary'] = position['summary'] if 'title' in position: position_json['title'] = position['title'] positions.append(position_json) profile_json['positions'] = positions return profile_json
def fetch(self, service_author_id, callback): super(LinkedinEventCollector, self).fetch(service_author_id, callback) state = self.fetch_begin(service_author_id) self.fetch_log_info(state) asm = state['asm'] # if this author has no access_token they are unauthorized and we # don't collect LinkedIn events for them if not asm.access_token: return service_author_id = asm.service_author_id min_age = datetime.utcnow() - self.NEW_LOOKBACK_WINDOW # setup what we need for oauth consumer = oauth.Consumer(self.oauth_config['key'], self.oauth_config['secret']) token = oauth.Token(key=asm.access_token, secret=asm.access_token_secret) client = oauth.Client(consumer, token) args = {'scope': 'self', 'count': self.PAGE_SIZE} # get only events since last update or past year depending on if this # is the first collection of not if asm.most_recent_event_timestamp: after = calendar.timegm((asm.most_recent_event_timestamp - self.MOST_RECENT_OVERLAP).utctimetuple()) * 1000 else: after = calendar.timegm((datetime.utcnow() - self.NEW_LOOKBACK_WINDOW).utctimetuple()) * 1000 args['after'] = after offset = 0 args['start'] = offset url = '%s%s?%s' % (self.oauth_config['endpoint'], UPDATE_RESOURCE, urllib.urlencode(args, True)) total_count = 0 while url: # request the user's updates raw_json = json_serializer.load_string(make_request(client, url, {'x-li-format': 'json'})) if raw_json == None or raw_json.get('_total', 0) == 0: url = None break for post in raw_json.get('values', []): update_type = post['updateType'] if update_type in self.SUPPORTED_TYPES: if update_type == 'CONN' and post['updateContent']['person']['id'] == service_author_id: # the response can contain multiple connections that the member has made. We'll # separate them into individual responses postClone = copy.deepcopy(post) for connection in post['updateContent']['person']['connections']['values']: postClone['updateContent']['person']['connections'] = {"_total": 1, "values": [copy.deepcopy(connection)]} interpreter = LinkedinEventInterpreter(postClone, asm, self.oauth_config) if interpreter.get_create_time() < min_age: url = None break if self.screen_event(interpreter, state): callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), postClone)) elif (update_type == 'PREC' or update_type == 'SVPR') and post['updateContent']['person']['id'] == service_author_id: interpreter = LinkedinEventInterpreter(post, asm, self.oauth_config) if interpreter.get_create_time() < min_age: url = None break if self.screen_event(interpreter, state): callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post)) elif update_type == 'SHAR': interpreter = LinkedinEventInterpreter(post, asm, self.oauth_config) if interpreter.get_create_time() < min_age: url = None break if self.screen_event(interpreter, state): callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post)) elif update_type == 'MSFC' and post['updateContent']['companyPersonUpdate']['person']['id'] == service_author_id: interpreter = LinkedinEventInterpreter(post, asm, self.oauth_config) if interpreter.get_create_time() < min_age: url = None break if self.screen_event(interpreter, state): callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post)) elif update_type == 'JOBP' and post['updateContent']['job']['jobPoster']['id'] == service_author_id: interpreter = LinkedinEventInterpreter(post, asm, self.oauth_config) if interpreter.get_create_time() < min_age: url = None break if self.screen_event(interpreter, state): callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post)) elif update_type == 'JGRP' and post['updateContent']['person']['id'] == service_author_id: # the response can contain multiple groups that the member has joined. We'll # separate them into individual responses postClone = copy.deepcopy(post) for group in post['updateContent']['person']['memberGroups']['values']: postClone['updateContent']['person']['memberGroups'] = {"_total": 1, "values": [copy.deepcopy(group)]} interpreter = LinkedinEventInterpreter(postClone, asm, self.oauth_config) if interpreter.get_create_time() < min_age: url = None break if self.screen_event(interpreter, state): callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), postClone)) elif update_type == 'STAT' and post['updateContent']['person']['id'] == service_author_id: interpreter = LinkedinEventInterpreter(post, asm, self.oauth_config) if interpreter.get_create_time() < min_age: url = None break if self.screen_event(interpreter, state): callback(create_linkedin_event(asm.author_id, CURRENT_STATE, service_author_id, interpreter.get_id(), post)) else: if not update_type in self.IGNORED_TYPES: logging.warning('???? skipping linkedIn event: %s' % update_type) # if the url is None stop if not url: break # if the url is None stop if not url: break total_count = total_count + raw_json['_count'] if '_count' in raw_json else raw_json['_total'] if raw_json['_total'] == total_count: url = None break offset = offset + self.PAGE_SIZE args['start'] = offset url = '%s%s?%s' % (self.oauth_config['endpoint'], UPDATE_RESOURCE, urllib.urlencode(args, True)) print total_count # terminate the fetch self.fetch_end(state)