def __init__(self, username, password): self.username = username self.password = password self.recent_timeline = [] self.self_tweet = None self.trends = {} self.feeds = [] self.feeds_by_name = {} self.unread_counts = [] e = self.events = Storage( (name, Delegate()) for name in self.event_names) e.following += self.on_following e.trends += self.on_trends e.on_unread_counts += self.on_unread_counts e.recent_timeline += self.on_recent_timeline e.self_tweet += self.on_self_tweet e.on_feeds += self.on_feeds e.on_change_view += self.on_change_view e.on_view += self.on_view_changed def render_tweets(tweets, render_context): return htmlize_tweets(self, tweets) self.social_feed = SocialFeed('twitter_' + self.username, 'twitter_' + self.username, self.get_tweet_feed, render_tweets, lambda: self.account.set_infobox_dirty)
def __init__(self, acct): self.stream_request_outstanding = True self.acct = acct self._init_apis() self.last_stream = True self.last_filter_key = self.filter_key EventMixin.__init__(self) self.social_feed = SocialFeed('facebook_' + self.acct.username, 'newsfeed', self.get_post_feed, self.htmlize_posts, self.set_infobox_dirty)
def __init__(self, **options): self.time_offset = None options['password'] = None oauth_util.OAuthAccountBase.__init__(self, **options) self.connection = None self._dirty = False SocialNetwork.__init__(self, **options) self.social_feed = SocialFeed('linkedin_' + self.username, 'linkedin_' + self.username, self.get_newsfeed_iter, self.generate_newsfeed_html, lambda: self.set_infobox_dirty)
def __init__(self, username, password): self.username = username self.password = password self.recent_timeline = [] self.self_tweet = None self.trends = {} self.feeds = [] self.feeds_by_name = {} self.unread_counts = [] e = self.events = Storage((name, Delegate()) for name in self.event_names) e.following += self.on_following e.trends += self.on_trends e.on_unread_counts += self.on_unread_counts e.recent_timeline += self.on_recent_timeline e.self_tweet += self.on_self_tweet e.on_feeds += self.on_feeds e.on_change_view += self.on_change_view e.on_view += self.on_view_changed def render_tweets(tweets, render_context): return htmlize_tweets(self, tweets) self.social_feed = SocialFeed('twitter_' + self.username, 'twitter_' + self.username, self.get_tweet_feed, render_tweets, lambda: self.account.set_infobox_dirty)
def __init__(self, tweets): self.recent_timeline = tweets from twitter.twitter import htmlize_tweets username = '******' mock_protocol = S(username=username, trends=[], unread_counts=[], self_tweet=None) id = 'twitter_' + username context = 'twitter_feed' self.social_feed = SocialFeed( id, context, self.get_tweet_feed, lambda tweets, ctx: htmlize_tweets(mock_protocol, tweets)) ids = [t['id'] for t in self.recent_timeline] self.social_feed.new_ids(ids)
class MockTwitter(object): def __init__(self, tweets): self.recent_timeline = tweets from twitter.twitter import htmlize_tweets username = '******' mock_protocol = S(username=username, trends=[], unread_counts=[], self_tweet=None) id = 'twitter_' + username context = 'twitter_feed' self.social_feed = SocialFeed(id, context, self.get_tweet_feed, lambda tweets, ctx: htmlize_tweets(mock_protocol, tweets)) ids = [t['id'] for t in self.recent_timeline] self.social_feed.new_ids(ids) def get_tweet_feed(self): return self.recent_timeline
def __init__(self, *a, **k): common.Protocol.StateMixin.__init__(self) oauth_util.OAuthAccountBase.__init__(self, **k) self.count = 0 self.connection = None self._dirty = False filters = k.pop('filters', {}) self.filters = dict( indicators=dict( zip( self.indicators_keys, filters.get('indicators', [True] * len(self.indicators_keys)))), feed=dict( zip(self.feed_keys, filters.get('feed', [True] * len(self.feed_keys)))), ) self.header_funcs = ( ('Home', self.openurl_Home), # ('Profile', self.openurl_Profile), ('Inbox', self.openurl_Inbox), ('Friends', self.openurl_Friends), ('Blog', self.openurl_Blog), ('Post Bulletin', self.openurl_Post), ) if 'password' not in k: k['password'] = None social.network.__init__(self, *a, **k) self._remove_password = not self.protocol_info()['needs_password'] if self._remove_password: self.password = None from social.network import SocialFeed self.social_feed = SocialFeed( 'myspace_' + self.username, 'activities', lambda: iter(self.connection.combined_feed()), self.htmlize_activities, self._set_dirty)
def __init__(self, **options): self.time_offset = None options["password"] = None oauth_util.OAuthAccountBase.__init__(self, **options) self.connection = None self._dirty = False SocialNetwork.__init__(self, **options) self.social_feed = SocialFeed( "linkedin_" + self.username, "linkedin_" + self.username, self.get_newsfeed_iter, self.generate_newsfeed_html, lambda: self.set_infobox_dirty, )
def __init__(self, *a, **k): common.Protocol.StateMixin.__init__(self) oauth_util.OAuthAccountBase.__init__(self, **k) self.count = 0 self.connection = None self._dirty = False filters = k.pop('filters', {}) self.filters = dict(indicators=dict(zip(self.indicators_keys, filters.get('indicators', [True]*len(self.indicators_keys)))), feed = dict(zip(self.feed_keys, filters.get('feed', [True]*len(self.feed_keys)))), ) self.header_funcs = ( ('Home', self.openurl_Home), # ('Profile', self.openurl_Profile), ('Inbox', self.openurl_Inbox), ('Friends', self.openurl_Friends), ('Blog', self.openurl_Blog), ('Post Bulletin', self.openurl_Post), ) if 'password' not in k: k['password'] = None social.network.__init__(self, *a, **k) self._remove_password = not self.protocol_info()['needs_password'] if self._remove_password: self.password = None from social.network import SocialFeed self.social_feed = SocialFeed('myspace_' + self.username, 'activities', lambda: iter(self.connection.combined_feed()), self.htmlize_activities, self._set_dirty)
class FacebookProtocol(EventMixin): events = EventMixin.events | set([ 'stream_requested', 'not_logged_in', 'got_stream', 'status_updated', 'conn_error', 'infobox_dirty', ]) def __init__(self, acct): self.stream_request_outstanding = True self.acct = acct self._init_apis() self.last_stream = True self.last_filter_key = self.filter_key EventMixin.__init__(self) self.social_feed = SocialFeed('facebook_' + self.acct.username, 'newsfeed', self.get_post_feed, self.htmlize_posts, self.set_infobox_dirty) def set_infobox_dirty(self): self.event('infobox_dirty') def htmlize_posts(self, posts, stream_context): '''Convert one facebook newsfeed post into infobox HTML.''' t = FBIB(self.acct) #CAS: pull out the context stuff, the default FBIB grabs self.last_stream, not the one we have context for! return t.get_html(None, set_dirty=False, file='posts.py.xml', dir=t.get_context()['app'].get_res_dir('base'), context=S(posts=posts)) def get_post_feed(self): # TODO bring back feed context. return iter(self.last_stream.posts) @property def filter_key(self): return ['nf', 'lf', 'h'][self.acct.preferred_filter_key] @property def hidden_posts(self): return "and is_hidden=0" if self.acct.show_hidden_posts else '' def get_stream(self): self.stream_request_outstanding = True self.do_get_stream() def _init_apis(self): self._init_digsby() def _init_digsby(self, session_key='', secret='', uid=None): access_token = getattr(self.acct, 'access_token', None) uid = getattr(self.acct, 'uid', None), self.digsby = graphapi.LegacyRESTAPI(access_token, uid=uid) def do_get_stream(self, num_tries=0): from util import default_timer self.start_get_stream = default_timer() if not self.digsby.logged_in: return self.event('not_logged_in') #refresh full stream if pref has changed prev_filter_key, self.last_filter_key = self.last_filter_key, self.filter_key if not isinstance(self.last_stream, dict) or prev_filter_key != self.filter_key: kw = dict(success=lambda *a: self.get_stream_success(num_tries=num_tries, *a), error = lambda *a: self.get_stream_error(num_tries, *a)) updated_time = 0 else: kw = dict(success=self.update_stream_success, error = lambda *a: self.get_stream_error(num_tries, *a)) updated_time = max(self.last_stream.posts + [S(updated_time=0)], key=lambda v: v.updated_time).updated_time # query = self.digsby.multiquery(prepare=True, self.last_run_multi = dict( # birthdays = BIRTHDAY_QUERY % self.digsby.uid, latest_posts = LATEST_POSTS_QUERY % (self.filter_key, self.hidden_posts), posts = UPDATED_POSTS_QUERY % (('%d' % updated_time) + '+0'), # now = NOW_QUERY % self.digsby.uid, events = EVENTS_QUERY, status = STATUS_QUERY, notifications = NOTIFICATIONS_QUERY, apps = APP_QUERY, post_filter_keys = POST_FILTER_KEY_QUERY, filter_keys = FILTER_KEY_QUERY, **UPDATE_STREAM_QUERY) self.digsby.fql.multiquery(queries=self.last_run_multi, **kw) # alerts = self.digsby.notifications.get(prepare=True) # self.digsby.batch.run(method_feed=[alerts, query], **kw) def update_status(self): self.digsby.query(STATUS_QUERY, success=self.status_updated) def status_updated(self, status): status = status[0] if status is not None: status['uid'] = self.digsby.uid self.last_status = status self.event('status_updated') def update_stream_success(self, value): return self.get_stream_success(value, update=True) def get_stream_success(self, value, update=False, num_tries=0): from util import default_timer self.end_get_stream = default_timer() log.debug('stream get took %f seconds', self.end_get_stream - self.start_get_stream) stream = value # v = [] # for val in value: # v.append(simplejson.loads(val, object_hook=facebookapi.storageify)) # alerts, stream = v[:2] self.last_alerts = Alerts(self.acct) from facebookapi import simplify_multiquery try: # print stream new_stream = simplify_multiquery(stream,keys={'posts':None, # 'comments':None, 'latest_posts':None, 'profiles':'id', # 'now':None, 'events':list, 'status':None, 'notifications': None, 'apps' : 'app_id', 'post_filter_keys':None, 'filter_keys':'filter_key'})# 'birthdays':'uid',}) import util.primitives.funcs as funcs # new_stream['comments'] = dict(funcs.groupby(new_stream['comments'], lambda x: x['post_id'])) new_stream['comments'] = {} new_stream['post_ids'] = post_ids = {} for k, v in new_stream['filter_keys'].iteritems(): if not v.get('name'): v['name'] = KNOWN_APPS_LOOKUP.get(k, v.get('name')) new_stream['filter_keys'].update([(k, dict(name=d['name'], icon_url=skin.get(d['icon_url']).path.url())) for k,d in FORCED_KEYS.items()]) new_stream['post_filter_keys'] = dict((post_id, structures.oset(p['filter_key'] for p in vals)) for post_id, vals in funcs.groupby(new_stream['post_filter_keys'], lambda x: x['post_id'])) for post in new_stream['posts']: post['comments']['count'] = int(post['comments']['count']) new_stream['apps'], apps_str = {}, new_stream['apps'] for app_id, app_dict in apps_str.items(): new_stream['apps'][int(app_id)] = app_dict try: new_stream['now'] = new_stream['now'][0].values()[0] except (IndexError, KeyError) as _e: # print_exc() import time new_stream['now'] = time.time() self.last_alerts.event_invites &= set(new_stream['events']) self.last_status = (new_stream['status'][:1] or [Ostorage([('message', ''), ('status_id', 0), ('time', 0)])])[0] self.last_status['uid'] = self.digsby.uid if not isinstance(new_stream['posts'], list): log.error('stream: %r', stream) raise ValueError('Facebook returned type=%r of posts' % type(new_stream['posts'])) for post in new_stream['posts']: #get the new ones post_ids[post['post_id']] = post if 'notifications' in new_stream: import lxml for notification in new_stream['notifications']: title_html = notification.get('title_html', None) if title_html is None: continue s = lxml.html.fromstring(title_html) s.make_links_absolute('http://www.facebook.com', resolve_base_href = False) for a in s.findall('a'): a.tag = 'span' # _c = a.attrib.clear() a.attrib['class'] = 'link notification_link' [x.attrib.pop("data-hovercard", None) for x in s.findall(".//*[@data-hovercard]")] notification['title_html'] = lxml.etree.tostring(s) self.last_alerts.update_notifications(new_stream['notifications']) if update: latest_posts = filter(None, (post_ids.get(post_id, self.last_stream.post_ids.get(post_id)) for post_id in structures.oset([post['post_id'] for post in new_stream['latest_posts']] + [post['post_id'] for post in self.last_stream.posts])))[:POSTS_LIMIT] new_stream['posts'] = latest_posts for post in new_stream['posts']: #update the dict with the combined list post_ids[post['post_id']] = post for key in self.last_stream.comments: if key in post_ids and key not in new_stream.comments: new_stream.comments[key] = self.last_stream.comments[key] for key in self.last_stream.profiles: if key not in new_stream.profiles: new_stream.profiles[key] = self.last_stream.profiles[key] trim_profiles(new_stream) for p in new_stream.posts: p.id = p.post_id # compatability hack for ads self.last_stream = new_stream self.social_feed.new_ids([p['post_id'] for p in self.last_stream.posts]) except Exception, e: traceback.print_exc() return self.get_stream_error(num_tries=num_tries, error=e) self.event('got_stream')
class FacebookProtocol(EventMixin): events = EventMixin.events | set([ 'stream_requested', 'not_logged_in', 'got_stream', 'status_updated', 'conn_error', 'infobox_dirty', ]) def __init__(self, acct): self.stream_request_outstanding = True self.acct = acct self._init_apis() self.last_stream = True self.last_filter_key = self.filter_key EventMixin.__init__(self) self.social_feed = SocialFeed('facebook_' + self.acct.username, 'newsfeed', self.get_post_feed, self.htmlize_posts, self.set_infobox_dirty) def set_infobox_dirty(self): self.event('infobox_dirty') def htmlize_posts(self, posts, stream_context): '''Convert one facebook newsfeed post into infobox HTML.''' t = FBIB(self.acct) #CAS: pull out the context stuff, the default FBIB grabs self.last_stream, not the one we have context for! return t.get_html(None, set_dirty=False, file='posts.py.xml', dir=t.get_context()['app'].get_res_dir('base'), context=S(posts=posts)) def get_post_feed(self): # TODO bring back feed context. return iter(self.last_stream.posts) @property def filter_key(self): return ['nf', 'lf', 'h'][self.acct.preferred_filter_key] @property def hidden_posts(self): return "and is_hidden=0" if self.acct.show_hidden_posts else '' def get_stream(self): self.stream_request_outstanding = True self.do_get_stream() def _init_apis(self): self._init_digsby() def _init_digsby(self, session_key='', secret='', uid=None): access_token = getattr(self.acct, 'access_token', None) uid = getattr(self.acct, 'uid', None), self.digsby = graphapi.LegacyRESTAPI(access_token, uid=uid) def do_get_stream(self, num_tries=0): from util import default_timer self.start_get_stream = default_timer() if not self.digsby.logged_in: return self.event('not_logged_in') #refresh full stream if pref has changed prev_filter_key, self.last_filter_key = self.last_filter_key, self.filter_key if not isinstance(self.last_stream, dict) or prev_filter_key != self.filter_key: kw = dict(success=lambda *a: self.get_stream_success( num_tries=num_tries, *a), error=lambda *a: self.get_stream_error(num_tries, *a)) updated_time = 0 else: kw = dict(success=self.update_stream_success, error=lambda *a: self.get_stream_error(num_tries, *a)) updated_time = max(self.last_stream.posts + [S(updated_time=0)], key=lambda v: v.updated_time).updated_time # query = self.digsby.multiquery(prepare=True, self.last_run_multi = dict( # birthdays = BIRTHDAY_QUERY % self.digsby.uid, latest_posts=LATEST_POSTS_QUERY % (self.filter_key, self.hidden_posts), posts=UPDATED_POSTS_QUERY % (('%d' % updated_time) + '+0'), # now = NOW_QUERY % self.digsby.uid, events=EVENTS_QUERY, status=STATUS_QUERY, notifications=NOTIFICATIONS_QUERY, apps=APP_QUERY, post_filter_keys=POST_FILTER_KEY_QUERY, filter_keys=FILTER_KEY_QUERY, **UPDATE_STREAM_QUERY) self.digsby.fql.multiquery(queries=self.last_run_multi, **kw) # alerts = self.digsby.notifications.get(prepare=True) # self.digsby.batch.run(method_feed=[alerts, query], **kw) def update_status(self): self.digsby.query(STATUS_QUERY, success=self.status_updated) def status_updated(self, status): status = status[0] if status is not None: status['uid'] = self.digsby.uid self.last_status = status self.event('status_updated') def update_stream_success(self, value): return self.get_stream_success(value, update=True) def get_stream_success(self, value, update=False, num_tries=0): from util import default_timer self.end_get_stream = default_timer() log.debug('stream get took %f seconds', self.end_get_stream - self.start_get_stream) stream = value # v = [] # for val in value: # v.append(simplejson.loads(val, object_hook=facebookapi.storageify)) # alerts, stream = v[:2] self.last_alerts = Alerts(self.acct) from facebookapi import simplify_multiquery try: # print stream new_stream = simplify_multiquery( stream, keys={ 'posts': None, # 'comments':None, 'latest_posts': None, 'profiles': 'id', # 'now':None, 'events': list, 'status': None, 'notifications': None, 'apps': 'app_id', 'post_filter_keys': None, 'filter_keys': 'filter_key' }) # 'birthdays':'uid',}) import util.primitives.funcs as funcs # new_stream['comments'] = dict(funcs.groupby(new_stream['comments'], lambda x: x['post_id'])) new_stream['comments'] = {} new_stream['post_ids'] = post_ids = {} for k, v in new_stream['filter_keys'].iteritems(): if not v.get('name'): v['name'] = KNOWN_APPS_LOOKUP.get(k, v.get('name')) new_stream['filter_keys'].update([ (k, dict(name=d['name'], icon_url=skin.get(d['icon_url']).path.url())) for k, d in FORCED_KEYS.items() ]) new_stream['post_filter_keys'] = dict( (post_id, structures.oset(p['filter_key'] for p in vals)) for post_id, vals in funcs.groupby( new_stream['post_filter_keys'], lambda x: x['post_id'])) for post in new_stream['posts']: post['comments']['count'] = int(post['comments']['count']) new_stream['apps'], apps_str = {}, new_stream['apps'] for app_id, app_dict in apps_str.items(): new_stream['apps'][int(app_id)] = app_dict try: new_stream['now'] = new_stream['now'][0].values()[0] except (IndexError, KeyError) as _e: # print_exc() import time new_stream['now'] = time.time() self.last_alerts.event_invites &= set(new_stream['events']) self.last_status = (new_stream['status'][:1] or [ Ostorage([('message', ''), ('status_id', 0), ('time', 0)]) ])[0] self.last_status['uid'] = self.digsby.uid if not isinstance(new_stream['posts'], list): log.error('stream: %r', stream) raise ValueError('Facebook returned type=%r of posts' % type(new_stream['posts'])) for post in new_stream['posts']: #get the new ones post_ids[post['post_id']] = post if 'notifications' in new_stream: import lxml for notification in new_stream['notifications']: title_html = notification.get('title_html', None) if title_html is None: continue s = lxml.html.fromstring(title_html) s.make_links_absolute('http://www.facebook.com', resolve_base_href=False) for a in s.findall('a'): a.tag = 'span' # _c = a.attrib.clear() a.attrib['class'] = 'link notification_link' [ x.attrib.pop("data-hovercard", None) for x in s.findall(".//*[@data-hovercard]") ] notification['title_html'] = lxml.etree.tostring(s) self.last_alerts.update_notifications( new_stream['notifications']) if update: latest_posts = filter(None, (post_ids.get( post_id, self.last_stream.post_ids.get(post_id) ) for post_id in structures.oset( [post['post_id'] for post in new_stream['latest_posts']] + [post['post_id'] for post in self.last_stream.posts])))[:POSTS_LIMIT] new_stream['posts'] = latest_posts for post in new_stream[ 'posts']: #update the dict with the combined list post_ids[post['post_id']] = post for key in self.last_stream.comments: if key in post_ids and key not in new_stream.comments: new_stream.comments[key] = self.last_stream.comments[ key] for key in self.last_stream.profiles: if key not in new_stream.profiles: new_stream.profiles[key] = self.last_stream.profiles[ key] trim_profiles(new_stream) for p in new_stream.posts: p.id = p.post_id # compatability hack for ads self.last_stream = new_stream self.social_feed.new_ids( [p['post_id'] for p in self.last_stream.posts]) except Exception, e: traceback.print_exc() return self.get_stream_error(num_tries=num_tries, error=e) self.event('got_stream')
class TwitterProtocol(object): event_names = ''' state_changed following reply trends on_unread_counts on_feeds on_edit_feed on_view on_change_view status_update_clicked recent_timeline self_tweet on_corrupted_database update_social_ids received_whole_update '''.split() def __init__(self, username, password): self.username = username self.password = password self.recent_timeline = [] self.self_tweet = None self.trends = {} self.feeds = [] self.feeds_by_name = {} self.unread_counts = [] e = self.events = Storage((name, Delegate()) for name in self.event_names) e.following += self.on_following e.trends += self.on_trends e.on_unread_counts += self.on_unread_counts e.recent_timeline += self.on_recent_timeline e.self_tweet += self.on_self_tweet e.on_feeds += self.on_feeds e.on_change_view += self.on_change_view e.on_view += self.on_view_changed def render_tweets(tweets, render_context): return htmlize_tweets(self, tweets) self.social_feed = SocialFeed('twitter_' + self.username, 'twitter_' + self.username, self.get_tweet_feed, render_tweets, lambda: self.account.set_infobox_dirty) def _get_database_path(self): webview = self.webkitcontroller.webview return webview.GetDatabasePath('digsbysocial_' + self.username) def set_options(self, options): guithread(lambda: self.webkitcontroller.JSCall('setAccountOptions', **options)) def on_change_view(self, feed_name): log.info('on_change_view %r', feed_name) window = self.webkitcontroller.FeedWindow if window is not None: log.info(' found a window, calling switch_to_view') window.switch_to_view(feed_name) tlw = window.Top if tlw.IsIconized(): tlw.Iconize(False) window.Top.Raise() else: log.info(' no window found, calling open_timeline_window') self.webkitcontroller.open_timeline_window(feed_name) def on_view_changed(self, feed_name): feed = self.feeds_by_name.get(feed_name, None) if feed is not None and feed.get('query', None) is not None and feed.get('save', False): hooks.notify('digsby.statistics.twitter.viewed_search') def on_feeds(self, feeds): self.feeds = feeds self.feeds_by_name = dict((f['name'], f) for f in feeds) self.feeds_by_name.update(favorites=dict(name='favorites', label=_('Favorites')), history=dict(name='history', label=_('History'))) import twitter_notifications as tnots tnots._update_notifications(self, feeds) self._save_feeds(feeds) def _save_feeds(self, feeds): # don't include non-saved searches def should_save(f): return f['type'] not in ('search', 'user') or f.get('save', False) feeds_pref = filter(should_save, deepcopy(feeds)) # don't serialize certain attributes out to prefs for feed in feeds_pref: for attr in ('count', 'label'): feed.pop(attr) self.set_account_pref('feeds', feeds_pref) @property def account_prefix(self): return 'twitter.' + self.username def account_pref_key(self, name): return _account_pref_key(self.username, name) def set_account_pref(self, name, value): from common import setpref value = simplejson.dumps(value) setpref(self.account_pref_key(name), value) def get_account_pref(self, name, default): return _get_account_pref(self.username, name, default) def on_unread_counts(self, opts): self.unread_counts = opts.get('feeds') self.unread_total = opts.get('total') def on_recent_timeline(self, tweets): self.recent_timeline = [to_storage(t) for t in tweets] self.recent_timeline.reverse() self.events.update_social_ids() def update_social_ids(self): try: t = self._socialtimer except AttributeError: def later(): ids = [p['id'] for p in self.recent_timeline] self.social_feed.new_ids(ids) t = self._socialtimer = wx.PyTimer(later) if not t.IsRunning(): t.StartOneShot(1000) def on_self_tweet(self, tweet): self.self_tweet = to_storage(tweet) def on_following(self, ids): # TODO: stop should actually do something if hasattr(self, 'stream'): self.stream.stop() if common.pref('twitter.streaming', default=False): from twitterstream import TwitterStream self.stream = TwitterStream(self.username, self.password, ids) self.stream.on_tweet += self.on_stream_tweet self.stream.start() def on_trends(self, trends): # TODO: store trends over time? #self.trends.update(trends['trends']) trends = trends['trends'] self.trends = trends[trends.keys()[0]] def on_stream_tweet(self, tweet): if self.webkitcontroller is not None: wx.CallAfter(self.webkitcontroller.realtime_tweet, tweet) def connect(self, accountopts): @guithread def later(): self.webkitcontroller = TwitterWebKitController(self) self.webkitcontroller.initialize(self.username, self.password, self.get_user_feeds(), accountopts) self.init_webkit_methods() def _verify_databases(self): # webkit doesn't release file object locks for corrupted databases, # so check the integrity of the databases we care about here first. # upon any errors, they are deleted. import sqlite3 def try_query_remove_on_error(dbpath, query): '''try a query on database dbpath. dbpath is deleted on any exception.''' dbpath = path(dbpath) log.info('verifying db %r', dbpath) if not dbpath.isfile(): log.info('not a file') return try: conn = sqlite3.connect(dbpath) with conn: conn.execute(query) conn.close() except Exception: traceback.print_exc() with traceguard: log.warning('exception encountered, removing %r', dbpath) dbpath.remove() log.warning('remove completed') # check the integrity of the "index" database that webkit uses to track # each site's database try_query_remove_on_error( path(self.webkitcontroller.webview.GetDatabaseDirectory()) / 'Databases.db', 'select * from Databases limit 1') # calling window.openDatabase is necessary once for the below # _get_database_path() call to work. self.webkitcontroller.webview.RunScript( '''var test_db = window.openDatabase('_test_db_', "1.0", "test db", 1024);''') # ensure the twitter database is okay. try_query_remove_on_error( self._get_database_path(), 'create table if not exists _test (foo int)') def get_user_feeds(self): def deffeed(n): return dict(name=n, type=n) default_feeds = [deffeed(n) for n in ('timeline', 'mentions', 'directs')] userfeeds = self.get_account_pref('feeds', default_feeds) def revert(): log.warning('REVERTING user feeds, was %r:', userfeeds) self.set_account_pref('feeds', default_feeds) return default_feeds from pprint import pprint; pprint(userfeeds) if not isinstance(userfeeds, list): return revert() try: if userfeeds is not default_feeds: for feed in default_feeds: for ufeed in userfeeds: if feed['type'] == ufeed['type']: break else: return revert() except Exception: traceback.print_exc() return revert() return userfeeds def init_webkit_methods(self): # forward some methods to webkitcontroller for method_name in ''' open_timeline_window clear_cache update on_status on_status_with_error_popup add_feed edit_feed delete_feed set_feeds add_group get_users get_prefs'''.split(): setattr(self, method_name, getattr(self.webkitcontroller, method_name)) def json(self, *a, **k): self.webkitcontroller.json(*a, **k) def disconnect(self): self.webkitcontroller.disconnect() def mark_all_as_read(self): self.webkitcontroller.evaljs('markAllAsRead();') def on_reply(self, id, screen_name, text): from .twitter_gui import TwitterFrame TwitterFrame.Reply(id, screen_name, text) def on_retweet(self, id, screen_name, text): from .twitter_gui import TwitterFrame TwitterFrame.Retweet(id, screen_name, text) def on_direct(self, screen_name): from .twitter_gui import TwitterFrame TwitterFrame.Direct(screen_name) def mark_feed_as_read(self, feed_name): self.webkitcontroller.JSCall('markFeedAsRead', feedName=feed_name) def toggle_addstocount(self, feed_name): self.webkitcontroller.JSCall('toggleAddsToCount', feedName=feed_name) def get_ids_and_context(self, _feed_context): #_feed_context ?= tab return list(t['id'] for t in self.get_tweet_feed()), self.recent_timeline def get_tweet_feed(self): self_id = self.self_tweet['id'] if self.self_tweet is not None else None for tweet in self.recent_timeline: if self_id is None or self_id != tweet['id']: yield tweet
def test_feed_item_deleted(self): first_ad_spot = 2 with fakeads(first_ad_index=first_ad_spot): ids=None def get_content(): return [S(id=c) for c in ids] ids_1 = [1, 2, 3, 4, 5] def getitem(t, ctx): return t sf = SocialFeed('foo_test', 'feed', get_content, getitem) # ensure ads are inserted sf.new_ids(ids_1); ids = ids_1 feed = list(sf.get_iterator()) def show(): #print '$$$$ feed: from pprint import pprint pprint(feed) show() assert feed_trends.isad(feed[2]), feed[2] # ensure the ad moves as new items come in ids_2 = [6, 7, 8, 1, 2, 4, 5] sf.new_ids(ids_2); ids = ids_2 feed = list(sf.get_iterator()) show() assert feed_trends.isad(feed[5]) # ensure that new ads appear ids_3 = [11, 12, 13, 14, 15, 16, 6, 7, 8, 1, 2, 4, 5] sf.new_ids(ids_3); ids = ids_3 feed = list(sf.get_iterator()) show() assert feed_trends.isad(feed[1]), feed[1] assert feed_trends.isad(feed[-3]) # no more than 2 ads will ever appear ids_4 = [101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111] + ids_3 sf.new_ids(ids_4); ids = ids_4 feed = list(sf.get_iterator()) self.assertEquals(2, sum(1 if feed_trends.isad(a) else 0 for a in feed))
class LinkedInAccount(SocialNetwork, oauth_util.OAuthAccountBase, jsonrpc.RPCClient): _rpc_handlers = { 'post_comment': 'newsfeed_post_comment', 'next_item': 'newsfeed_next_item', 'initialize_feed': 'newsfeed_initialize_feed', 'near_bottom': 'newsfeed_more_content', #'load_comments' : 'newsfeed_load_comments', 'do_like': 'newsfeed_do_like', 'do_dislike': 'newsfeed_do_dislike', } updatefreq = 10 * 60 # ten minutes class Statuses(SocialNetwork.Statuses): CHECKING = _('Checking now...') BORDER = '0' service = protocol = 'linkedin' AuthClass = oauth_util.InternalBrowserAuthenticator #AuthClass = oauth_util.UserBrowserAuthenticator webroot = 'https://www.linkedin.com' def weblink(self, resource=''): return net.httpjoin(self.webroot, resource) @property def header_funcs(self): return ( (_('Home'), self.weblink()), (_('Inbox'), self.weblink('inBox')), (_('Contacts'), self.weblink('connections')), (_('Profile'), self.weblink('myprofile')), ) def _get_inbox_count_str(self): if self.count: return ' (%d)' % self.count else: return '' def __init__(self, **options): self.time_offset = None options['password'] = None oauth_util.OAuthAccountBase.__init__(self, **options) self.connection = None self._dirty = False SocialNetwork.__init__(self, **options) self.social_feed = SocialFeed('linkedin_' + self.username, 'linkedin_' + self.username, self.get_newsfeed_iter, self.generate_newsfeed_html, lambda: self.set_infobox_dirty) def set_dirty(self): log.info("set dirty") self._dirty = True def _cache_data(self, api_data): self.api_data = api_data api_data = cacheable.cproperty({}, user=True) def get_newsfeed_iter(self): return iter(getattr(getattr(self, 'connection', None), 'updates', [])) def set_infobox_dirty(self): self._dirty = True self.notify('dirty') @property def cache_path(self): return os.path.join(self.protocol, self.name, 'api-results.dat') def get_options(self): opts = {'informed_ach': True, 'post_ach_all': False} opts.update(SocialNetwork.get_options(self)) opts.update(oauth_util.OAuthAccountBase.get_options(self)) return opts def Connect(self): self.error_txt = None log.info('Connect called for %r', self) self._update_now() def _update_now(self): if self.enabled: self.update_now() else: self.set_offline(self.Reasons.NONE) def _connect(self): self.connection.connect() def create_connection(self): if self.connection is not None: raise Exception('Already have a connection') import LinkedInProtocol as LIP self.connection = LIP.LinkedInProtocol(self.username, self.oauth_token, self.api_data, self.filters, time_offset=self.time_offset) self.bind_events() def connect_failed(self, e): self._dirty_error = True self.Disconnect(reason=self.Reasons.CONN_FAIL) def Disconnect(self, reason=None): if reason is None: reason = self.Reasons.NONE self.unbind_events() self.connection = None self.set_offline(reason) common.UpdateMixin.disconnect(self) def handle_rate_limited(self): log.info("rate limited!") self.error_txt = _("API request limit has been reached.") self.handle_update_failed('RATE_LIMIT') def handle_update_failed(self, _reason): reason = getattr(self.Reasons, _reason, self.Reasons.CONN_FAIL) self.Disconnect(reason=reason) def update_item_to_notification(self, item): import weakref import gui.browser.webkit.imageloader as imageloader default_icon = skin.get('BuddiesPanel.BuddyIcons.NoIcon', None) if hasattr(item, 'content_body'): body = item.content_body() else: html = self.generate_item_html(item) body = util.strip_html(html).strip() n = util.Storage(acct=weakref.ref(self), icon=imageloader.LazyWebKitImage( item.person.picture_url, default_icon), body=body, title=item.person.name, url=getattr(item, 'url', item.person.profile_url), post_id=item.id) return n def do_notifications(self, updates): if not updates: return items = [] for item in updates: try: items.append(self.update_item_to_notification(item)) except Exception: traceback.print_exc() common.fire('linkedin.newsfeed', items=items, popupid='%d.linkedin' % id(self), update='paged', badge=skin.get('serviceicons.linkedin', None), buttons=self.get_popup_buttons, onclick=self.on_popup_click) def get_popup_buttons(self, item): self._last_popup_item = item buttons = [] my_item = self.connection.get_post_by_id(item.item.post_id) if my_item is None: return buttons def count_str(count): return (' (%s)' % count) if count and count != 0 else '' if my_item.supports_comments: buttons.append( (_("Comment") + count_str(len(my_item.get_comments())), input_callback(self.on_popup_comment))) return buttons def on_popup_comment(self, item, control, text, options): post_id = options['item'].post_id self._post_comment( post_id, text, success=(lambda *a, **k: (self.set_dirty(), control.update_buttons()))) on_popup_comment.takes_popup_control = True def on_popup_click(self, item): import wx url = getattr(item, 'url', None) if url: import wx wx.LaunchDefaultBrowser(url) def bind_events(self): conn = oauth_util.OAuthAccountBase.bind_events(self) bind = conn.bind bind('on_rate_limit', self.handle_rate_limited) bind('update_error', self.handle_update_failed) bind('newsfeed_updates', self.do_notifications) def unbind_events(self): conn = oauth_util.OAuthAccountBase.unbind_events(self) if conn is None: return unbind = conn.unbind unbind('on_rate_limit', self.handle_rate_limited) unbind('update_error', self.handle_update_failed) unbind('newsfeed_updates', self.do_notifications) @common.action() def SetStatus(self): if common.pref('social.use_global_status', default=False, type=bool): import wx wx.GetApp().SetStatusPrompt([self]) else: log.error( "No alternative to global status dialog for new linked in account!" ) DefaultAction = OpenHomeURL = SetStatus @callbacks.callsback def _set_status(self, new_message, callback=None, **k): # callback.success += lambda * a: self.update_now() if new_message: callback.success += lambda *a: setattr( self.connection.users[self.connection.userid], 'status', new_message) callback.success += lambda *a: self.set_dirty() callback.success += lambda *a: hooks.notify( 'digsby.linkedin.status_updated', self, new_message, *a) self.connection.set_status(new_message, callback=callback) SetStatusMessage = _set_status def disconnect(self, *a, **k): pass def observe_count(self, callback): return NotImplemented def observe_state(self, callback): return NotImplemented def unobserve_count(self, callback): return NotImplemented def unobserve_state(self, callback): return NotImplemented def OnClickHomeURL(self): return self.weblink() def launchbrowser(self, what): import wx wx.LaunchDefaultBrowser(self.weblink(what)) @common.action() def openurl_Home(self): self.launchbrowser('') @common.action() def openurl_Inbox(self): self.launchbrowser('inBox') @common.action() def openurl_Friends(self): self.launchbrowser('connections') @common.action() def openurl_Profile(self): self.launchbrowser('myprofile') @common.action(lambda self: ((self.state == self.Statuses.ONLINE) and common.pref( 'can_has_social_update', False)) or None) def update_now(self): log.info('updating... %r', self) self.start_timer() log.info("current state: %r", self.state) if self.state == self.Statuses.OFFLINE or self.connection is None: self.change_state(self.Statuses.CONNECTING) try: self.create_connection() self._connect() except Exception: traceback.print_exc() self.Disconnect(self.Reasons.CONN_FAIL) return self._update() def request_status(self): self.set_waiting('status') self.connection.request_status() def _on_protocol_connect(self): log.info("connection ready") def handle_connect(self): self.change_state(self.Statuses.AUTHENTICATING) def handle_status(self, status_info=None): log.info('Got status info: %r', status_info) # def update_info(self, **info): # return SocialNetwork.update_info(self, **info) def on_feed_invalidated(self): self.social_feed.new_ids([p.id for p in self.connection.updates]) def _handle_oauth_error(self, details): log.error('oauth error occurred: %r', details) problem = net.WebFormData.parse(details.get('oauth_problem', '')) self.clear_oauth_token() if 'timestamp_refused' in problem: self.error_txt = _( "Please set your computer clock to the correct time / timezone." ) self.Disconnect(self.Reasons.BAD_PASSWORD) def get_authenticator(self, url_generator): AuthClass = self._get_auth_class(prefkey='linkedin.authenticator') return AuthClass( self.username, url_generator, '/linkedin/{username}/oauth'.format(username=self.username), 'LinkedIn Login - %s' % self.username, 'http://www.digsby.com/myspace', 'serviceicons.linkedin') def _authenticate_post(self): log.info("authenticated successfully!") oauth_util.OAuthAccountBase._authenticate_post(self) self.update_now() @callbacks.callsback def newsfeed_post_comment(self, rpc, webview, id, comment, post_id, append=True, callback=None): if append: callback.success += lambda *a: self.append_comments( webview, id, post_id) callback.error += lambda error_obj=None: self.Dexcept( webview, id, error_obj=error_obj) self._post_comment(post_id, comment, callback=callback) @callbacks.callsback def _post_comment(self, post_id, comment, callback=None): self.connection.get_post_by_id(post_id).comments.append( LIO.LinkedInComment( sequence_number=-1, text=comment, person=self.connection.users[self.connection.userid])) self.connection.post_comment(post_id, comment, callback=callback) def append_comments(self, webview, id, post_id): hooks.notify('digsby.linkedin.comment_added', {}) t = LinkedInIB(self) context = {} context['item'] = self.connection.get_post_by_id(post_id) comments_html = t.get_html( None, set_dirty=False, file='comments_list.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=context) comment_link_html = t.get_html( None, set_dirty=False, file='comment_link.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=context) log.debug("comments_html = %r; comment_link_html = %r", comments_html, comment_link_html) self.Dsuccess(webview, id, comments_html=comments_html, comment_link_html=comment_link_html) def generate_newsfeed_html(self, items, _context_id=None, do_comments=True): t = LinkedInIB(self) context = {} context['items'] = items context['conn'] = self.connection context['do_comments'] = do_comments html = t.get_html(None, set_dirty=False, file='items.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=context) return html def generate_item_html(self, item): t = LinkedInIB(self) context = {} context['item'] = item context['conn'] = self.connection context['friend'] = item.person html = t.get_html(None, set_dirty=False, file='%s.tenjin' % item.type, dir=t.get_context()['app'].get_res_dir('base'), context=context) return html def _update_post(self): super(LinkedInAccount, self)._update_post() def newsfeed_next_item(self, rpc, webview, id, *extra, **params): return self.social_feed.jscall_next_item(webview, id) def more_content(self, rpc, webview, id, **params): current_posts, _last_post_id = params.get('current_posts', 0), params.get( 'last_post_id', None) t = LinkedInIB(self) if current_posts < len(self.connection.updates): items = self.connection.updates[current_posts:current_posts + 1] items_html = self.generate_newsfeed_html(items) self.Dsuccess(webview, id, contents=items_html) else: self.Derror(webview, id) def newsfeed_initialize_feed(self, rpc, webview, id, *extra, **params): return self.social_feed.jscall_initialize_feed(webview, id) def newsfeed_do_dislike(self, rpc, webview, id, post_id, **kwds): log.info("do dislike: kwds = %r", kwds) item = self.connection.get_post_by_id(post_id) if item is None: log.error("%r: no post for post_id %r", self, post_id) return if item.user_dislikes(self.connection.userid): log.info("user already dislikes this post") return self.newsfeed_post_comment( rpc, webview, id, LIO.LinkedInComment.DISLIKE, post_id, append=False, success=lambda *a, **k: self.dislike_added(webview, id, post_id)) def dislike_added(self, webview, id, post_id): self.refresh_likes(webview, id, post_id, True) hooks.notify('digsby.linkedin.dislike_added', post_id) def newsfeed_do_like(self, rpc, webview, id, post_id, **kwds): log.info("do like: kwds = %r", kwds) item = self.connection.get_post_by_id(post_id) if item is None: log.error("%r: no post for post_id %r", self, post_id) return if item.user_likes(self.connection.userid): log.info("user already likes this post") return self.newsfeed_post_comment( rpc, webview, id, LIO.LinkedInComment.LIKE, post_id, append=False, success=lambda *a, **k: self.like_added(webview, id, post_id)) def like_added(self, webview, id, post_id): self.refresh_likes(webview, id, post_id, True) hooks.notify('digsby.linkedin.like_added', post_id) def refresh_likes(self, webview, id, post_id, dis=False): \ log.info("refreshing item: %r", post_id) #regen likes block, regen likes link block, send to callback #regen cached post html item_html = self.generate_newsfeed_html( [self.connection.get_post_by_id(post_id)], None) self.Dsuccess(webview, id, item_html=item_html) def _create_activity(self, body): self.connection.create_activity(body=body)
class LinkedInAccount(SocialNetwork, oauth_util.OAuthAccountBase, jsonrpc.RPCClient): _rpc_handlers = { "post_comment": "newsfeed_post_comment", "next_item": "newsfeed_next_item", "initialize_feed": "newsfeed_initialize_feed", "near_bottom": "newsfeed_more_content", #'load_comments' : 'newsfeed_load_comments', "do_like": "newsfeed_do_like", "do_dislike": "newsfeed_do_dislike", } updatefreq = 10 * 60 # ten minutes class Statuses(SocialNetwork.Statuses): CHECKING = _("Checking now...") BORDER = "0" service = protocol = "linkedin" AuthClass = oauth_util.InternalBrowserAuthenticator # AuthClass = oauth_util.UserBrowserAuthenticator webroot = "https://www.linkedin.com" def weblink(self, resource=""): return net.httpjoin(self.webroot, resource) @property def header_funcs(self): return ( (_("Home"), self.weblink()), (_("Inbox"), self.weblink("inBox")), (_("Contacts"), self.weblink("connections")), (_("Profile"), self.weblink("myprofile")), ) def _get_inbox_count_str(self): if self.count: return " (%d)" % self.count else: return "" def __init__(self, **options): self.time_offset = None options["password"] = None oauth_util.OAuthAccountBase.__init__(self, **options) self.connection = None self._dirty = False SocialNetwork.__init__(self, **options) self.social_feed = SocialFeed( "linkedin_" + self.username, "linkedin_" + self.username, self.get_newsfeed_iter, self.generate_newsfeed_html, lambda: self.set_infobox_dirty, ) def set_dirty(self): log.info("set dirty") self._dirty = True def _cache_data(self, api_data): self.api_data = api_data api_data = cacheable.cproperty({}, user=True) def get_newsfeed_iter(self): return iter(getattr(getattr(self, "connection", None), "updates", [])) def set_infobox_dirty(self): self._dirty = True self.notify("dirty") @property def cache_path(self): return os.path.join(self.protocol, self.name, "api-results.dat") def get_options(self): opts = {"informed_ach": True, "post_ach_all": False} opts.update(SocialNetwork.get_options(self)) opts.update(oauth_util.OAuthAccountBase.get_options(self)) return opts def Connect(self): self.error_txt = None log.info("Connect called for %r", self) self._update_now() def _update_now(self): if self.enabled: self.update_now() else: self.set_offline(self.Reasons.NONE) def _connect(self): self.connection.connect() def create_connection(self): if self.connection is not None: raise Exception("Already have a connection") import LinkedInProtocol as LIP self.connection = LIP.LinkedInProtocol( self.username, self.oauth_token, self.api_data, self.filters, time_offset=self.time_offset ) self.bind_events() def connect_failed(self, e): self._dirty_error = True self.Disconnect(reason=self.Reasons.CONN_FAIL) def Disconnect(self, reason=None): if reason is None: reason = self.Reasons.NONE self.unbind_events() self.connection = None self.set_offline(reason) common.UpdateMixin.disconnect(self) def handle_rate_limited(self): log.info("rate limited!") self.error_txt = _("API request limit has been reached.") self.handle_update_failed("RATE_LIMIT") def handle_update_failed(self, _reason): reason = getattr(self.Reasons, _reason, self.Reasons.CONN_FAIL) self.Disconnect(reason=reason) def update_item_to_notification(self, item): import weakref import gui.browser.webkit.imageloader as imageloader default_icon = skin.get("BuddiesPanel.BuddyIcons.NoIcon", None) if hasattr(item, "content_body"): body = item.content_body() else: html = self.generate_item_html(item) body = util.strip_html(html).strip() n = util.Storage( acct=weakref.ref(self), icon=imageloader.LazyWebKitImage(item.person.picture_url, default_icon), body=body, title=item.person.name, url=getattr(item, "url", item.person.profile_url), post_id=item.id, ) return n def do_notifications(self, updates): if not updates: return items = [] for item in updates: try: items.append(self.update_item_to_notification(item)) except Exception: traceback.print_exc() common.fire( "linkedin.newsfeed", items=items, popupid="%d.linkedin" % id(self), update="paged", badge=skin.get("serviceicons.linkedin", None), buttons=self.get_popup_buttons, onclick=self.on_popup_click, ) def get_popup_buttons(self, item): self._last_popup_item = item buttons = [] my_item = self.connection.get_post_by_id(item.item.post_id) if my_item is None: return buttons def count_str(count): return (" (%s)" % count) if count and count != 0 else "" if my_item.supports_comments: buttons.append( (_("Comment") + count_str(len(my_item.get_comments())), input_callback(self.on_popup_comment)) ) return buttons def on_popup_comment(self, item, control, text, options): post_id = options["item"].post_id self._post_comment(post_id, text, success=(lambda *a, **k: (self.set_dirty(), control.update_buttons()))) on_popup_comment.takes_popup_control = True def on_popup_click(self, item): import wx url = getattr(item, "url", None) if url: import wx wx.LaunchDefaultBrowser(url) def bind_events(self): conn = oauth_util.OAuthAccountBase.bind_events(self) bind = conn.bind bind("on_rate_limit", self.handle_rate_limited) bind("update_error", self.handle_update_failed) bind("newsfeed_updates", self.do_notifications) def unbind_events(self): conn = oauth_util.OAuthAccountBase.unbind_events(self) if conn is None: return unbind = conn.unbind unbind("on_rate_limit", self.handle_rate_limited) unbind("update_error", self.handle_update_failed) unbind("newsfeed_updates", self.do_notifications) @common.action() def SetStatus(self): if common.pref("social.use_global_status", default=False, type=bool): import wx wx.GetApp().SetStatusPrompt([self]) else: log.error("No alternative to global status dialog for new linked in account!") DefaultAction = OpenHomeURL = SetStatus @callbacks.callsback def _set_status(self, new_message, callback=None, **k): # callback.success += lambda * a: self.update_now() if new_message: callback.success += lambda *a: setattr(self.connection.users[self.connection.userid], "status", new_message) callback.success += lambda *a: self.set_dirty() callback.success += lambda *a: hooks.notify("digsby.linkedin.status_updated", self, new_message, *a) self.connection.set_status(new_message, callback=callback) SetStatusMessage = _set_status def disconnect(self, *a, **k): pass def observe_count(self, callback): return NotImplemented def observe_state(self, callback): return NotImplemented def unobserve_count(self, callback): return NotImplemented def unobserve_state(self, callback): return NotImplemented def OnClickHomeURL(self): return self.weblink() def launchbrowser(self, what): import wx wx.LaunchDefaultBrowser(self.weblink(what)) @common.action() def openurl_Home(self): self.launchbrowser("") @common.action() def openurl_Inbox(self): self.launchbrowser("inBox") @common.action() def openurl_Friends(self): self.launchbrowser("connections") @common.action() def openurl_Profile(self): self.launchbrowser("myprofile") @common.action( lambda self: ((self.state == self.Statuses.ONLINE) and common.pref("can_has_social_update", False)) or None ) def update_now(self): log.info("updating... %r", self) self.start_timer() log.info("current state: %r", self.state) if self.state == self.Statuses.OFFLINE or self.connection is None: self.change_state(self.Statuses.CONNECTING) try: self.create_connection() self._connect() except Exception: traceback.print_exc() self.Disconnect(self.Reasons.CONN_FAIL) return self._update() def request_status(self): self.set_waiting("status") self.connection.request_status() def _on_protocol_connect(self): log.info("connection ready") def handle_connect(self): self.change_state(self.Statuses.AUTHENTICATING) def handle_status(self, status_info=None): log.info("Got status info: %r", status_info) # def update_info(self, **info): # return SocialNetwork.update_info(self, **info) def on_feed_invalidated(self): self.social_feed.new_ids([p.id for p in self.connection.updates]) def _handle_oauth_error(self, details): log.error("oauth error occurred: %r", details) problem = net.WebFormData.parse(details.get("oauth_problem", "")) self.clear_oauth_token() if "timestamp_refused" in problem: self.error_txt = _("Please set your computer clock to the correct time / timezone.") self.Disconnect(self.Reasons.BAD_PASSWORD) def get_authenticator(self, url_generator): AuthClass = self._get_auth_class(prefkey="linkedin.authenticator") return AuthClass( self.username, url_generator, "/linkedin/{username}/oauth".format(username=self.username), "LinkedIn Login - %s" % self.username, "http://www.digsby.com/myspace", "serviceicons.linkedin", ) def _authenticate_post(self): log.info("authenticated successfully!") oauth_util.OAuthAccountBase._authenticate_post(self) self.update_now() @callbacks.callsback def newsfeed_post_comment(self, rpc, webview, id, comment, post_id, append=True, callback=None): if append: callback.success += lambda *a: self.append_comments(webview, id, post_id) callback.error += lambda error_obj=None: self.Dexcept(webview, id, error_obj=error_obj) self._post_comment(post_id, comment, callback=callback) @callbacks.callsback def _post_comment(self, post_id, comment, callback=None): self.connection.get_post_by_id(post_id).comments.append( LIO.LinkedInComment(sequence_number=-1, text=comment, person=self.connection.users[self.connection.userid]) ) self.connection.post_comment(post_id, comment, callback=callback) def append_comments(self, webview, id, post_id): hooks.notify("digsby.linkedin.comment_added", {}) t = LinkedInIB(self) context = {} context["item"] = self.connection.get_post_by_id(post_id) comments_html = t.get_html( None, set_dirty=False, file="comments_list.tenjin", dir=t.get_context()["app"].get_res_dir("base"), context=context, ) comment_link_html = t.get_html( None, set_dirty=False, file="comment_link.tenjin", dir=t.get_context()["app"].get_res_dir("base"), context=context, ) log.debug("comments_html = %r; comment_link_html = %r", comments_html, comment_link_html) self.Dsuccess(webview, id, comments_html=comments_html, comment_link_html=comment_link_html) def generate_newsfeed_html(self, items, _context_id=None, do_comments=True): t = LinkedInIB(self) context = {} context["items"] = items context["conn"] = self.connection context["do_comments"] = do_comments html = t.get_html( None, set_dirty=False, file="items.tenjin", dir=t.get_context()["app"].get_res_dir("base"), context=context ) return html def generate_item_html(self, item): t = LinkedInIB(self) context = {} context["item"] = item context["conn"] = self.connection context["friend"] = item.person html = t.get_html( None, set_dirty=False, file="%s.tenjin" % item.type, dir=t.get_context()["app"].get_res_dir("base"), context=context, ) return html def _update_post(self): super(LinkedInAccount, self)._update_post() def newsfeed_next_item(self, rpc, webview, id, *extra, **params): return self.social_feed.jscall_next_item(webview, id) def more_content(self, rpc, webview, id, **params): current_posts, _last_post_id = params.get("current_posts", 0), params.get("last_post_id", None) t = LinkedInIB(self) if current_posts < len(self.connection.updates): items = self.connection.updates[current_posts : current_posts + 1] items_html = self.generate_newsfeed_html(items) self.Dsuccess(webview, id, contents=items_html) else: self.Derror(webview, id) def newsfeed_initialize_feed(self, rpc, webview, id, *extra, **params): return self.social_feed.jscall_initialize_feed(webview, id) def newsfeed_do_dislike(self, rpc, webview, id, post_id, **kwds): log.info("do dislike: kwds = %r", kwds) item = self.connection.get_post_by_id(post_id) if item is None: log.error("%r: no post for post_id %r", self, post_id) return if item.user_dislikes(self.connection.userid): log.info("user already dislikes this post") return self.newsfeed_post_comment( rpc, webview, id, LIO.LinkedInComment.DISLIKE, post_id, append=False, success=lambda *a, **k: self.dislike_added(webview, id, post_id), ) def dislike_added(self, webview, id, post_id): self.refresh_likes(webview, id, post_id, True) hooks.notify("digsby.linkedin.dislike_added", post_id) def newsfeed_do_like(self, rpc, webview, id, post_id, **kwds): log.info("do like: kwds = %r", kwds) item = self.connection.get_post_by_id(post_id) if item is None: log.error("%r: no post for post_id %r", self, post_id) return if item.user_likes(self.connection.userid): log.info("user already likes this post") return self.newsfeed_post_comment( rpc, webview, id, LIO.LinkedInComment.LIKE, post_id, append=False, success=lambda *a, **k: self.like_added(webview, id, post_id), ) def like_added(self, webview, id, post_id): self.refresh_likes(webview, id, post_id, True) hooks.notify("digsby.linkedin.like_added", post_id) def refresh_likes(self, webview, id, post_id, dis=False): log.info("refreshing item: %r", post_id) # regen likes block, regen likes link block, send to callback # regen cached post html item_html = self.generate_newsfeed_html([self.connection.get_post_by_id(post_id)], None) self.Dsuccess(webview, id, item_html=item_html) def _create_activity(self, body): self.connection.create_activity(body=body)
class MyspaceAccount(social.network, oauth_util.OAuthAccountBase, jsonrpc.RPCClient): url_base = 'http://www.myspace.com/index.cfm' AuthClass = oauth_util.InternalBrowserAuthenticator # UserBrowserAuthenticator #AuthClass = oauth_util.InternalBrowserAuthenticatorOpenID events = events.EventMixin.events | set(( # stuff )) service = protocol = 'myspace' indicators_keys = ['blogcommenturl', 'blogsubscriptionposturl', 'picturecommenturl', 'eventinvitationurl', 'commenturl', 'phototagapprovalurl', 'friendsrequesturl', 'videocommenturl', 'groupnotificationurl', 'recentlyaddedfriendurl', 'birthdayurl', #'countpendingim', ] feed_keys = [ 'statuses', 'friends', 'posts', 'groups', 'photos', 'music', 'videos', 'events', 'applications', ] header_fuseactions = dict(( ('Home', 'user'), #('Profile', 'user.viewprofile'), # supposed to take you to myspace.com/yourname but instead just goes to myspace.com (??) ('Inbox', 'mail.inboxV2'), ('Friends', 'user.viewfriends'), ('Blog', 'blog.ListAll'), ('Post Bulletin', 'bulletin'), )) def url_destination(name): def get_url(self): kwargs = DIGSBY_UTM_ARGS.copy() self_userid = getattr(self.connection, 'userid', None) if self_userid is not None: kwargs.update(friendId = self_userid) kwargs.update(fuseaction = self.header_fuseactions[name]) return self.openurl(net.UrlQuery(self.url_base, **kwargs)) return get_url openurl_Home = url_destination("Home") openurl_Profile = url_destination("Profile") openurl_Inbox = url_destination("Inbox") openurl_Friends = url_destination("Friends") openurl_Blog = url_destination("Blog") openurl_Post = url_destination("Post Bulletin") del url_destination def __init__(self, *a, **k): common.Protocol.StateMixin.__init__(self) oauth_util.OAuthAccountBase.__init__(self, **k) self.count = 0 self.connection = None self._dirty = False filters = k.pop('filters', {}) self.filters = dict(indicators=dict(zip(self.indicators_keys, filters.get('indicators', [True]*len(self.indicators_keys)))), feed = dict(zip(self.feed_keys, filters.get('feed', [True]*len(self.feed_keys)))), ) self.header_funcs = ( ('Home', self.openurl_Home), # ('Profile', self.openurl_Profile), ('Inbox', self.openurl_Inbox), ('Friends', self.openurl_Friends), ('Blog', self.openurl_Blog), ('Post Bulletin', self.openurl_Post), ) if 'password' not in k: k['password'] = None social.network.__init__(self, *a, **k) self._remove_password = not self.protocol_info()['needs_password'] if self._remove_password: self.password = None from social.network import SocialFeed self.social_feed = SocialFeed('myspace_' + self.username, 'activities', lambda: iter(self.connection.combined_feed()), self.htmlize_activities, self._set_dirty) def htmlize_activities(self, activities, context): t = MyspaceIB(self) return t.get_html(None, set_dirty=False, file='activities.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=dict(activities = activities)) def Connect(self): log.info('Connect called for %r', self) self._update_now() def create_connection(self): if self.connection is not None: raise Exception('Already have a connection') import MyspaceProtocol as MSP self.connection = MSP.MyspaceProtocol(self.username, self.oauth_token, self._decryptedpw, self.api_data, self.filters) self.bind_events() @property def cache_path(self): return os.path.join('myspace-social3', self.username, 'api-results.dat') def _cache_data(self, api_data): self.api_data = api_data api_data = cacheable.cproperty({}, user = True) def on_feed_invalidated(self): ids = [p.id for p in self.connection.combined_feed()] self.social_feed.new_ids(ids) def observe_count(self,callback): self.add_gui_observer(callback, 'count') def unobserve_count(self,callback): self.remove_gui_observer(callback, 'count') def _got_indicators(self, inds): log.info('got indicators: %r', inds) indicators = {} for k in self.indicators_keys: if k in inds and self.filters['indicators'].get(k, False): indicators[k] = inds[k] num_inds = len(indicators) if inds.get('mailurl', None) is not None: num_inds += 1 if getattr(self, '_num_inds', -1) != num_inds: self._num_inds = num_inds self.setnotify('count', num_inds) def get_authenticator(self, url_generator): AuthClass = self._get_auth_class(prefkey = 'myspace.authenticator') return AuthClass(self.username, url_generator, '/myspace/{username}/oauth'.format(username = self.username), 'MySpace Login - %s' % self.username, "http://www.digsby.com/myspace/", 'serviceicons.myspace') def bind_events(self): oauth_util.OAuthAccountBase.bind_events(self) self.connection.bind('on_indicators', self._got_indicators) def unbind_events(self): conn = oauth_util.OAuthAccountBase.unbind_events(self) if conn is None: return conn.unbind('on_indicators', self._got_indicators) def _on_protocol_connect(self): pass def _connect(self): #assert self.connection is not None self.connection.connect() log.info('Calling connect for connection') def _update_now(self): if self.enabled: self.update_now() else: self.set_offline(self.Reasons.NONE) @common.action(lambda self: common.pref('can_has_social_update', None) or None) def update_now(self): log.info('updating... %r', self) self.start_timer() if self.state == self.Statuses.OFFLINE: self.change_state(self.Statuses.CONNECTING) try: self.create_connection() self._connect() except Exception: import traceback; traceback.print_exc() self.Disconnect(self.Reasons.CONN_FAIL) return self._update() def _update_pre(self): if self._has_updated or self._forcing_login: st = self.Statuses.CHECKING else: st = self.Statuses.CONNECTING self.change_state(st) def _reset_connection(self): self._has_updated = False self._on_auth_done() #self.connection.disconnect() def Disconnect(self, reason = None): log.info('Disconnect called') self.pause_timer() self._reset_connection() self.unbind_events() self.connection = None reason = reason or self.Reasons.NONE if self.state != self.Statuses.OFFLINE: self.set_offline(reason) common.UpdateMixin.disconnect(self) disconnect = Disconnect def _update_error(self, e): log.debug("%r got update error: %r", self, e) if hasattr(e, 'read'): log.debug_s('\tbody: %r', e.read()) if isinstance(e, oauth.OAuthError): return self._handle_oauth_error(getattr(e, 'oauth_data', None)) if self.state == self.Statuses.OFFLINE: return if self._has_updated: rsn = self.Reasons.CONN_LOST else: rsn = self.Reasons.CONN_FAIL self.Disconnect(rsn) def _handle_oauth_error(self, details): log.error('oauth error occurred: %r', details) problem = details.get('oauth_problem', None) self.clear_oauth_token() if problem == 'timestamp_refused': self.error_txt = _("Please set your computer clock to the correct time / timezone.") self.Disconnect(self.Reasons.BAD_PASSWORD) def update_info(self, **info): filters = info.pop('filters', None) if filters is not None: self.filters.update( dict(indicators=dict(zip(self.indicators_keys, filters.get('indicators', [True]*len(self.indicators_keys)))), feed = dict(zip(self.feed_keys, filters.get('feed', [True]*len(self.feed_keys)))), )) self._set_dirty() if info.get('password') is not None and self._remove_password: info['password'] = None return social.network.update_info(self, **info) def get_options(self): opts = super(MyspaceAccount, self).get_options() opts.update({'informed_ach': True, 'post_ach_all': False}) opts.update(filters = dict(feed = [bool(self.filters['feed'].get(x, True)) for x in self.feed_keys], indicators = [bool(self.filters['indicators'].get(x, True)) for x in self.indicators_keys])) if opts.get('password') is not None and self._remove_password: opts['password'] = None if 'oauth_token' not in opts: opts['oauth_token'] = self.oauth_token return opts @common.action() def edit_status(self): if common.pref('social.use_global_status', default = False, type = bool): import wx wx.GetApp().SetStatusPrompt([self]) else: from myspacegui.editstatus import get_new_status get_new_status(success = self.set_web_status) DefaultAction = OpenHomeURL = edit_status @callbacks.callsback def SetStatusMessage(self, message, callback = None, **opts): if len(message) == 0: return callback.success() self.connection.set_status_message(message, callback = callback) def _dirty_get(self): return getattr(getattr(self, 'connection', None), '_dirty', True) def _dirty_set(self, val): if self.connection is not None: self.connection._dirty = val _dirty = property(_dirty_get, _dirty_set) @common.action(lambda self: common.pref('can_has_social_update', None) or None) def _set_dirty(self): self._dirty = True ## TODO: Move all this rpc nonsense to a superclass, with bind/unbind/callbacks and some sweet-ass decorator magic. _rpc_handlers = { 'near_bottom' : 'more_content', 'post_comment' : 'post_comment', 'hook' : 'rpc_hook', 'load_comments' : 'load_comments', 'initialize_feed' : 'initialize_feed', 'next_item' : 'next_item', 'do_permissions': 'initiate_login', 'do_like' : 'newsfeed_do_like', 'do_dislike' : 'newsfeed_do_dislike', } def initiate_login(self, *a, **k): self.connection.userinfo = None oauth_util.OAuthAccountBase.initiate_login(self, *a, **k) def user_dislikes(self, userid, item): return item.user_dislikes(userid) def user_likes(self, userid, item): return item.user_likes(userid) def newsfeed_do_dislike(self, rpc, webview, id, post_id): item = self.connection.get_post_by_id(post_id) if item is None: log.error("%r: no post for post_id %r", self, post_id) return if self.user_dislikes(self.connection.userid, item): log.info("user already dislikes this post") return import myspace.objects as MSO self.post_comment(rpc, webview, id, MSO.MyspaceComment.DISLIKE, post_id, append = False, success = lambda *a, **k: self.dislike_added(webview, id, post_id)) def newsfeed_do_like(self, rpc, webview, id, post_id): item = self.connection.get_post_by_id(post_id) if item is None: log.error("%r: no post for post_id %r", self, post_id) return if self.user_likes(self.connection.userid, item): log.info("user already likes this post") return import myspace.objects as MSO self.post_comment(rpc, webview, id, MSO.MyspaceComment.LIKE, post_id, append = False, success = lambda *a, **k: self.like_added(webview, id, post_id)) def dislike_added(self, webview, id, post_id): self.refresh_likes(webview, id, post_id, True) hooks.notify('digsby.myspace.dislike_added', post_id) def like_added(self, webview, id, post_id): self.refresh_likes(webview, id, post_id, False) hooks.notify('digsby.myspace.like_added', post_id) def refresh_likes(self, webview, id, post_id, dis = False):\ log.info("refreshing item: %r", post_id) #regen likes block, regen likes link block, send to callback #regen cached post html item = self.connection.get_post_by_id(post_id) item_html = self.generate_newsfeed_html([item]) self.Dsuccess(webview, id, item_html = item_html) @callbacks.callsback def post_comment(self, rpc, webview, id, comment, post_id, append = True, callback = None): post = self.connection.get_post_by_id(post_id) if append: post._numComments += 1 callback.success += lambda *a: self.append_comments(webview, id, post_id) else: import myspace.objects as MSO post.comments.append(MSO.MyspaceComment.from_json( dict( userid = 'myspace.com.person.%s' % self.connection.userid, text = comment, commentId = str(random.randint(0, 0x7fffffff)), postedDate_parsed = time.time(), ))) callback.error += lambda *a: self.Dexcept(webview, id, *a) self._post_comment(comment, post_id, callback = callback) @callbacks.callsback def _post_comment(self, comment, post_id, callback = None): self.connection.post_comment(post_id, comment, callback = callback) def generate_newsfeed_html(self, activities, _context_id = None, do_comments = True): t = MyspaceIB(self) activities_html = t.get_html(None, set_dirty=False, file='activities.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=dict(activities = activities)) return activities_html def append_comments(self, webview, id, post_id): t = MyspaceIB(self) context = {} context['item'] = self.connection.get_post_by_id(post_id) comments_html = t.get_html(None, set_dirty=False, file='comment_section.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=context) bottom_row_html = t.get_html(None, set_dirty=False, file='bottom_row.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=context) self.Dsuccess(webview, id, comments_html = comments_html, bottom_row_html = bottom_row_html) self.api_data['friend_status'] = self.connection.friend_status self._cache_data(self.api_data) def more_content(self, rpc, webview, id, **params): current_posts, _last_post_id = params.get('current_posts', 0), params.get('last_post_id', None) if current_posts < len(self.connection.combined_feed()): activities = self.connection.combined_feed()[current_posts:current_posts+1] activities_html = self.generate_newsfeed_html(activities) self.Dsuccess(webview, id, contents = activities_html) else: self.Derror(webview, id) def initialize_feed(self, rpc, webview, id, *extra, **params): self.social_feed.jscall_initialize_feed(webview, id) def next_item(self, rpc, webview, id, *extra, **params): self.social_feed.jscall_next_item(webview, id) def load_comments(self, rpc, webview, id, post_id): self.connection.get_comments_for(post_id, #success = lambda: self.refresh_likes(webview, id, post_id), success = lambda: self.append_comments(webview, id, post_id), error = lambda error_obj = None, **k: self.Dexcept(webview, id, error_obj = error_obj, **k)) #self.more_content(args['current_posts'], args.pop('last_post_id', None), rpc.pop('id')) def user_from_activity(self, act): return self.connection.user_from_activity(act) def get_imageurl_for_user(self, user): if user is None: return None else: return user.get('image', user.get('thumbnailUrl', None)) def user_from_id(self, id): return self.connection.user_from_id(id) def _name_for_user(self, user): name = getattr(user, 'name', None) if name is None: return getattr(user, 'displayName', 'Private User') firstname, lastname = name.get('givenName', None), name.get('familyName', None) if (firstname, lastname) == (None, None): return name if firstname and lastname: return u"%s %s" % (firstname, lastname) return firstname or lastname
class TwitterProtocol(object): event_names = ''' state_changed following reply trends on_unread_counts on_feeds on_edit_feed on_view on_change_view status_update_clicked recent_timeline self_tweet on_corrupted_database update_social_ids received_whole_update '''.split() def __init__(self, username, password): self.username = username self.password = password self.recent_timeline = [] self.self_tweet = None self.trends = {} self.feeds = [] self.feeds_by_name = {} self.unread_counts = [] e = self.events = Storage( (name, Delegate()) for name in self.event_names) e.following += self.on_following e.trends += self.on_trends e.on_unread_counts += self.on_unread_counts e.recent_timeline += self.on_recent_timeline e.self_tweet += self.on_self_tweet e.on_feeds += self.on_feeds e.on_change_view += self.on_change_view e.on_view += self.on_view_changed def render_tweets(tweets, render_context): return htmlize_tweets(self, tweets) self.social_feed = SocialFeed('twitter_' + self.username, 'twitter_' + self.username, self.get_tweet_feed, render_tweets, lambda: self.account.set_infobox_dirty) def _get_database_path(self): webview = self.webkitcontroller.webview return webview.GetDatabasePath('digsbysocial_' + self.username) def set_options(self, options): guithread(lambda: self.webkitcontroller.JSCall('setAccountOptions', ** options)) def on_change_view(self, feed_name): log.info('on_change_view %r', feed_name) window = self.webkitcontroller.FeedWindow if window is not None: log.info(' found a window, calling switch_to_view') window.switch_to_view(feed_name) tlw = window.Top if tlw.IsIconized(): tlw.Iconize(False) window.Top.Raise() else: log.info(' no window found, calling open_timeline_window') self.webkitcontroller.open_timeline_window(feed_name) def on_view_changed(self, feed_name): feed = self.feeds_by_name.get(feed_name, None) if feed is not None and feed.get( 'query', None) is not None and feed.get('save', False): hooks.notify('digsby.statistics.twitter.viewed_search') def on_feeds(self, feeds): self.feeds = feeds self.feeds_by_name = dict((f['name'], f) for f in feeds) self.feeds_by_name.update(favorites=dict(name='favorites', label=_('Favorites')), history=dict(name='history', label=_('History'))) import twitter_notifications as tnots tnots._update_notifications(self, feeds) self._save_feeds(feeds) def _save_feeds(self, feeds): # don't include non-saved searches def should_save(f): return f['type'] not in ('search', 'user') or f.get('save', False) feeds_pref = filter(should_save, deepcopy(feeds)) # don't serialize certain attributes out to prefs for feed in feeds_pref: for attr in ('count', 'label'): feed.pop(attr) self.set_account_pref('feeds', feeds_pref) @property def account_prefix(self): return 'twitter.' + self.username def account_pref_key(self, name): return _account_pref_key(self.username, name) def set_account_pref(self, name, value): from common import setpref value = simplejson.dumps(value) setpref(self.account_pref_key(name), value) def get_account_pref(self, name, default): return _get_account_pref(self.username, name, default) def on_unread_counts(self, opts): self.unread_counts = opts.get('feeds') self.unread_total = opts.get('total') def on_recent_timeline(self, tweets): self.recent_timeline = [to_storage(t) for t in tweets] self.recent_timeline.reverse() self.events.update_social_ids() def update_social_ids(self): try: t = self._socialtimer except AttributeError: def later(): ids = [p['id'] for p in self.recent_timeline] self.social_feed.new_ids(ids) t = self._socialtimer = wx.PyTimer(later) if not t.IsRunning(): t.StartOneShot(1000) def on_self_tweet(self, tweet): self.self_tweet = to_storage(tweet) def on_following(self, ids): # TODO: stop should actually do something if hasattr(self, 'stream'): self.stream.stop() if common.pref('twitter.streaming', default=False): from twitterstream import TwitterStream self.stream = TwitterStream(self.username, self.password, ids) self.stream.on_tweet += self.on_stream_tweet self.stream.start() def on_trends(self, trends): # TODO: store trends over time? #self.trends.update(trends['trends']) trends = trends['trends'] self.trends = trends[trends.keys()[0]] def on_stream_tweet(self, tweet): if self.webkitcontroller is not None: wx.CallAfter(self.webkitcontroller.realtime_tweet, tweet) def connect(self, accountopts): @guithread def later(): self.webkitcontroller = TwitterWebKitController(self) self.webkitcontroller.initialize(self.username, self.password, self.get_user_feeds(), accountopts) self.init_webkit_methods() def _verify_databases(self): # webkit doesn't release file object locks for corrupted databases, # so check the integrity of the databases we care about here first. # upon any errors, they are deleted. import sqlite3 def try_query_remove_on_error(dbpath, query): '''try a query on database dbpath. dbpath is deleted on any exception.''' dbpath = path(dbpath) log.info('verifying db %r', dbpath) if not dbpath.isfile(): log.info('not a file') return try: conn = sqlite3.connect(dbpath) with conn: conn.execute(query) conn.close() except Exception: traceback.print_exc() with traceguard: log.warning('exception encountered, removing %r', dbpath) dbpath.remove() log.warning('remove completed') # check the integrity of the "index" database that webkit uses to track # each site's database try_query_remove_on_error( path(self.webkitcontroller.webview.GetDatabaseDirectory()) / 'Databases.db', 'select * from Databases limit 1') # calling window.openDatabase is necessary once for the below # _get_database_path() call to work. self.webkitcontroller.webview.RunScript( '''var test_db = window.openDatabase('_test_db_', "1.0", "test db", 1024);''' ) # ensure the twitter database is okay. try_query_remove_on_error( self._get_database_path(), 'create table if not exists _test (foo int)') def get_user_feeds(self): def deffeed(n): return dict(name=n, type=n) default_feeds = [ deffeed(n) for n in ('timeline', 'mentions', 'directs') ] userfeeds = self.get_account_pref('feeds', default_feeds) def revert(): log.warning('REVERTING user feeds, was %r:', userfeeds) self.set_account_pref('feeds', default_feeds) return default_feeds from pprint import pprint pprint(userfeeds) if not isinstance(userfeeds, list): return revert() try: if userfeeds is not default_feeds: for feed in default_feeds: for ufeed in userfeeds: if feed['type'] == ufeed['type']: break else: return revert() except Exception: traceback.print_exc() return revert() return userfeeds def init_webkit_methods(self): # forward some methods to webkitcontroller for method_name in ''' open_timeline_window clear_cache update on_status on_status_with_error_popup add_feed edit_feed delete_feed set_feeds add_group get_users get_prefs'''.split(): setattr(self, method_name, getattr(self.webkitcontroller, method_name)) def json(self, *a, **k): self.webkitcontroller.json(*a, **k) def disconnect(self): self.webkitcontroller.disconnect() def mark_all_as_read(self): self.webkitcontroller.evaljs('markAllAsRead();') def on_reply(self, id, screen_name, text): from .twitter_gui import TwitterFrame TwitterFrame.Reply(id, screen_name, text) def on_retweet(self, id, screen_name, text): from .twitter_gui import TwitterFrame TwitterFrame.Retweet(id, screen_name, text) def on_direct(self, screen_name): from .twitter_gui import TwitterFrame TwitterFrame.Direct(screen_name) def mark_feed_as_read(self, feed_name): self.webkitcontroller.JSCall('markFeedAsRead', feedName=feed_name) def toggle_addstocount(self, feed_name): self.webkitcontroller.JSCall('toggleAddsToCount', feedName=feed_name) def get_ids_and_context(self, _feed_context): #_feed_context ?= tab return list(t['id'] for t in self.get_tweet_feed()), self.recent_timeline def get_tweet_feed(self): self_id = self.self_tweet['id'] if self.self_tweet is not None else None for tweet in self.recent_timeline: if self_id is None or self_id != tweet['id']: yield tweet
class MyspaceAccount(social.network, oauth_util.OAuthAccountBase, jsonrpc.RPCClient): url_base = 'http://www.myspace.com/index.cfm' AuthClass = oauth_util.InternalBrowserAuthenticator # UserBrowserAuthenticator #AuthClass = oauth_util.InternalBrowserAuthenticatorOpenID events = events.EventMixin.events | set(( # stuff )) service = protocol = 'myspace' indicators_keys = [ 'blogcommenturl', 'blogsubscriptionposturl', 'picturecommenturl', 'eventinvitationurl', 'commenturl', 'phototagapprovalurl', 'friendsrequesturl', 'videocommenturl', 'groupnotificationurl', 'recentlyaddedfriendurl', 'birthdayurl', #'countpendingim', ] feed_keys = [ 'statuses', 'friends', 'posts', 'groups', 'photos', 'music', 'videos', 'events', 'applications', ] header_fuseactions = dict(( ('Home', 'user'), #('Profile', 'user.viewprofile'), # supposed to take you to myspace.com/yourname but instead just goes to myspace.com (??) ('Inbox', 'mail.inboxV2'), ('Friends', 'user.viewfriends'), ('Blog', 'blog.ListAll'), ('Post Bulletin', 'bulletin'), )) def url_destination(name): def get_url(self): kwargs = DIGSBY_UTM_ARGS.copy() self_userid = getattr(self.connection, 'userid', None) if self_userid is not None: kwargs.update(friendId=self_userid) kwargs.update(fuseaction=self.header_fuseactions[name]) return self.openurl(net.UrlQuery(self.url_base, **kwargs)) return get_url openurl_Home = url_destination("Home") openurl_Profile = url_destination("Profile") openurl_Inbox = url_destination("Inbox") openurl_Friends = url_destination("Friends") openurl_Blog = url_destination("Blog") openurl_Post = url_destination("Post Bulletin") del url_destination def __init__(self, *a, **k): common.Protocol.StateMixin.__init__(self) oauth_util.OAuthAccountBase.__init__(self, **k) self.count = 0 self.connection = None self._dirty = False filters = k.pop('filters', {}) self.filters = dict( indicators=dict( zip( self.indicators_keys, filters.get('indicators', [True] * len(self.indicators_keys)))), feed=dict( zip(self.feed_keys, filters.get('feed', [True] * len(self.feed_keys)))), ) self.header_funcs = ( ('Home', self.openurl_Home), # ('Profile', self.openurl_Profile), ('Inbox', self.openurl_Inbox), ('Friends', self.openurl_Friends), ('Blog', self.openurl_Blog), ('Post Bulletin', self.openurl_Post), ) if 'password' not in k: k['password'] = None social.network.__init__(self, *a, **k) self._remove_password = not self.protocol_info()['needs_password'] if self._remove_password: self.password = None from social.network import SocialFeed self.social_feed = SocialFeed( 'myspace_' + self.username, 'activities', lambda: iter(self.connection.combined_feed()), self.htmlize_activities, self._set_dirty) def htmlize_activities(self, activities, context): t = MyspaceIB(self) return t.get_html(None, set_dirty=False, file='activities.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=dict(activities=activities)) def Connect(self): log.info('Connect called for %r', self) self._update_now() def create_connection(self): if self.connection is not None: raise Exception('Already have a connection') import MyspaceProtocol as MSP self.connection = MSP.MyspaceProtocol(self.username, self.oauth_token, self._decryptedpw, self.api_data, self.filters) self.bind_events() @property def cache_path(self): return os.path.join('myspace-social3', self.username, 'api-results.dat') def _cache_data(self, api_data): self.api_data = api_data api_data = cacheable.cproperty({}, user=True) def on_feed_invalidated(self): ids = [p.id for p in self.connection.combined_feed()] self.social_feed.new_ids(ids) def observe_count(self, callback): self.add_gui_observer(callback, 'count') def unobserve_count(self, callback): self.remove_gui_observer(callback, 'count') def _got_indicators(self, inds): log.info('got indicators: %r', inds) indicators = {} for k in self.indicators_keys: if k in inds and self.filters['indicators'].get(k, False): indicators[k] = inds[k] num_inds = len(indicators) if inds.get('mailurl', None) is not None: num_inds += 1 if getattr(self, '_num_inds', -1) != num_inds: self._num_inds = num_inds self.setnotify('count', num_inds) def get_authenticator(self, url_generator): AuthClass = self._get_auth_class(prefkey='myspace.authenticator') return AuthClass( self.username, url_generator, '/myspace/{username}/oauth'.format(username=self.username), 'MySpace Login - %s' % self.username, "http://www.digsby.com/myspace/", 'serviceicons.myspace') def bind_events(self): oauth_util.OAuthAccountBase.bind_events(self) self.connection.bind('on_indicators', self._got_indicators) def unbind_events(self): conn = oauth_util.OAuthAccountBase.unbind_events(self) if conn is None: return conn.unbind('on_indicators', self._got_indicators) def _on_protocol_connect(self): pass def _connect(self): #assert self.connection is not None self.connection.connect() log.info('Calling connect for connection') def _update_now(self): if self.enabled: self.update_now() else: self.set_offline(self.Reasons.NONE) @common.action( lambda self: common.pref('can_has_social_update', None) or None) def update_now(self): log.info('updating... %r', self) self.start_timer() if self.state == self.Statuses.OFFLINE: self.change_state(self.Statuses.CONNECTING) try: self.create_connection() self._connect() except Exception: import traceback traceback.print_exc() self.Disconnect(self.Reasons.CONN_FAIL) return self._update() def _update_pre(self): if self._has_updated or self._forcing_login: st = self.Statuses.CHECKING else: st = self.Statuses.CONNECTING self.change_state(st) def _reset_connection(self): self._has_updated = False self._on_auth_done() #self.connection.disconnect() def Disconnect(self, reason=None): log.info('Disconnect called') self.pause_timer() self._reset_connection() self.unbind_events() self.connection = None reason = reason or self.Reasons.NONE if self.state != self.Statuses.OFFLINE: self.set_offline(reason) common.UpdateMixin.disconnect(self) disconnect = Disconnect def _update_error(self, e): log.debug("%r got update error: %r", self, e) if hasattr(e, 'read'): log.debug_s('\tbody: %r', e.read()) if isinstance(e, oauth.OAuthError): return self._handle_oauth_error(getattr(e, 'oauth_data', None)) if self.state == self.Statuses.OFFLINE: return if self._has_updated: rsn = self.Reasons.CONN_LOST else: rsn = self.Reasons.CONN_FAIL self.Disconnect(rsn) def _handle_oauth_error(self, details): log.error('oauth error occurred: %r', details) problem = details.get('oauth_problem', None) self.clear_oauth_token() if problem == 'timestamp_refused': self.error_txt = _( "Please set your computer clock to the correct time / timezone." ) self.Disconnect(self.Reasons.BAD_PASSWORD) def update_info(self, **info): filters = info.pop('filters', None) if filters is not None: self.filters.update( dict( indicators=dict( zip( self.indicators_keys, filters.get('indicators', [True] * len(self.indicators_keys)))), feed=dict( zip(self.feed_keys, filters.get('feed', [True] * len(self.feed_keys)))), )) self._set_dirty() if info.get('password') is not None and self._remove_password: info['password'] = None return social.network.update_info(self, **info) def get_options(self): opts = super(MyspaceAccount, self).get_options() opts.update({'informed_ach': True, 'post_ach_all': False}) opts.update( filters=dict(feed=[ bool(self.filters['feed'].get(x, True)) for x in self.feed_keys ], indicators=[ bool(self.filters['indicators'].get(x, True)) for x in self.indicators_keys ])) if opts.get('password') is not None and self._remove_password: opts['password'] = None if 'oauth_token' not in opts: opts['oauth_token'] = self.oauth_token return opts @common.action() def edit_status(self): if common.pref('social.use_global_status', default=False, type=bool): import wx wx.GetApp().SetStatusPrompt([self]) else: from myspacegui.editstatus import get_new_status get_new_status(success=self.set_web_status) DefaultAction = OpenHomeURL = edit_status @callbacks.callsback def SetStatusMessage(self, message, callback=None, **opts): if len(message) == 0: return callback.success() self.connection.set_status_message(message, callback=callback) def _dirty_get(self): return getattr(getattr(self, 'connection', None), '_dirty', True) def _dirty_set(self, val): if self.connection is not None: self.connection._dirty = val _dirty = property(_dirty_get, _dirty_set) @common.action( lambda self: common.pref('can_has_social_update', None) or None) def _set_dirty(self): self._dirty = True ## TODO: Move all this rpc nonsense to a superclass, with bind/unbind/callbacks and some sweet-ass decorator magic. _rpc_handlers = { 'near_bottom': 'more_content', 'post_comment': 'post_comment', 'hook': 'rpc_hook', 'load_comments': 'load_comments', 'initialize_feed': 'initialize_feed', 'next_item': 'next_item', 'do_permissions': 'initiate_login', 'do_like': 'newsfeed_do_like', 'do_dislike': 'newsfeed_do_dislike', } def initiate_login(self, *a, **k): self.connection.userinfo = None oauth_util.OAuthAccountBase.initiate_login(self, *a, **k) def user_dislikes(self, userid, item): return item.user_dislikes(userid) def user_likes(self, userid, item): return item.user_likes(userid) def newsfeed_do_dislike(self, rpc, webview, id, post_id): item = self.connection.get_post_by_id(post_id) if item is None: log.error("%r: no post for post_id %r", self, post_id) return if self.user_dislikes(self.connection.userid, item): log.info("user already dislikes this post") return import myspace.objects as MSO self.post_comment( rpc, webview, id, MSO.MyspaceComment.DISLIKE, post_id, append=False, success=lambda *a, **k: self.dislike_added(webview, id, post_id)) def newsfeed_do_like(self, rpc, webview, id, post_id): item = self.connection.get_post_by_id(post_id) if item is None: log.error("%r: no post for post_id %r", self, post_id) return if self.user_likes(self.connection.userid, item): log.info("user already likes this post") return import myspace.objects as MSO self.post_comment( rpc, webview, id, MSO.MyspaceComment.LIKE, post_id, append=False, success=lambda *a, **k: self.like_added(webview, id, post_id)) def dislike_added(self, webview, id, post_id): self.refresh_likes(webview, id, post_id, True) hooks.notify('digsby.myspace.dislike_added', post_id) def like_added(self, webview, id, post_id): self.refresh_likes(webview, id, post_id, False) hooks.notify('digsby.myspace.like_added', post_id) def refresh_likes(self, webview, id, post_id, dis=False): \ log.info("refreshing item: %r", post_id) #regen likes block, regen likes link block, send to callback #regen cached post html item = self.connection.get_post_by_id(post_id) item_html = self.generate_newsfeed_html([item]) self.Dsuccess(webview, id, item_html=item_html) @callbacks.callsback def post_comment(self, rpc, webview, id, comment, post_id, append=True, callback=None): post = self.connection.get_post_by_id(post_id) if append: post._numComments += 1 callback.success += lambda *a: self.append_comments( webview, id, post_id) else: import myspace.objects as MSO post.comments.append( MSO.MyspaceComment.from_json( dict( userid='myspace.com.person.%s' % self.connection.userid, text=comment, commentId=str(random.randint(0, 0x7fffffff)), postedDate_parsed=time.time(), ))) callback.error += lambda *a: self.Dexcept(webview, id, *a) self._post_comment(comment, post_id, callback=callback) @callbacks.callsback def _post_comment(self, comment, post_id, callback=None): self.connection.post_comment(post_id, comment, callback=callback) def generate_newsfeed_html(self, activities, _context_id=None, do_comments=True): t = MyspaceIB(self) activities_html = t.get_html( None, set_dirty=False, file='activities.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=dict(activities=activities)) return activities_html def append_comments(self, webview, id, post_id): t = MyspaceIB(self) context = {} context['item'] = self.connection.get_post_by_id(post_id) comments_html = t.get_html( None, set_dirty=False, file='comment_section.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=context) bottom_row_html = t.get_html( None, set_dirty=False, file='bottom_row.tenjin', dir=t.get_context()['app'].get_res_dir('base'), context=context) self.Dsuccess(webview, id, comments_html=comments_html, bottom_row_html=bottom_row_html) self.api_data['friend_status'] = self.connection.friend_status self._cache_data(self.api_data) def more_content(self, rpc, webview, id, **params): current_posts, _last_post_id = params.get('current_posts', 0), params.get( 'last_post_id', None) if current_posts < len(self.connection.combined_feed()): activities = self.connection.combined_feed( )[current_posts:current_posts + 1] activities_html = self.generate_newsfeed_html(activities) self.Dsuccess(webview, id, contents=activities_html) else: self.Derror(webview, id) def initialize_feed(self, rpc, webview, id, *extra, **params): self.social_feed.jscall_initialize_feed(webview, id) def next_item(self, rpc, webview, id, *extra, **params): self.social_feed.jscall_next_item(webview, id) def load_comments(self, rpc, webview, id, post_id): self.connection.get_comments_for( post_id, #success = lambda: self.refresh_likes(webview, id, post_id), success=lambda: self.append_comments(webview, id, post_id), error=lambda error_obj=None, **k: self.Dexcept( webview, id, error_obj=error_obj, **k)) #self.more_content(args['current_posts'], args.pop('last_post_id', None), rpc.pop('id')) def user_from_activity(self, act): return self.connection.user_from_activity(act) def get_imageurl_for_user(self, user): if user is None: return None else: return user.get('image', user.get('thumbnailUrl', None)) def user_from_id(self, id): return self.connection.user_from_id(id) def _name_for_user(self, user): name = getattr(user, 'name', None) if name is None: return getattr(user, 'displayName', 'Private User') firstname, lastname = name.get('givenName', None), name.get('familyName', None) if (firstname, lastname) == (None, None): return name if firstname and lastname: return u"%s %s" % (firstname, lastname) return firstname or lastname
def test_feed_item_deleted(self): first_ad_spot = 2 with fakeads(first_ad_index=first_ad_spot): ids = None def get_content(): return [S(id=c) for c in ids] ids_1 = [1, 2, 3, 4, 5] def getitem(t, ctx): return t sf = SocialFeed('foo_test', 'feed', get_content, getitem) # ensure ads are inserted sf.new_ids(ids_1) ids = ids_1 feed = list(sf.get_iterator()) def show(): #print '$$$$ feed: from pprint import pprint pprint(feed) show() assert feed_trends.isad(feed[2]), feed[2] # ensure the ad moves as new items come in ids_2 = [6, 7, 8, 1, 2, 4, 5] sf.new_ids(ids_2) ids = ids_2 feed = list(sf.get_iterator()) show() assert feed_trends.isad(feed[5]) # ensure that new ads appear ids_3 = [11, 12, 13, 14, 15, 16, 6, 7, 8, 1, 2, 4, 5] sf.new_ids(ids_3) ids = ids_3 feed = list(sf.get_iterator()) show() assert feed_trends.isad(feed[1]), feed[1] assert feed_trends.isad(feed[-3]) # no more than 2 ads will ever appear ids_4 = [101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111 ] + ids_3 sf.new_ids(ids_4) ids = ids_4 feed = list(sf.get_iterator()) self.assertEquals( 2, sum(1 if feed_trends.isad(a) else 0 for a in feed))