def on_nav(e=None, b=None, url=None, *a, **k): if not window.ie: e.Skip() #careful with name collision url = e.URL try: parsed = UrlQuery.parse(url) except Exception: traceback.print_exc() else: log.info('url: %r', url) log.info('in: %r', 'session' in parsed['query']) if 'session' in parsed['query']: #not sure how to clean this up right now. session = parsed['query'].get('session') log.info('parsed: %r', parsed) parsed_base = dict(parsed) parsed_base.pop('query') self.api.set_session(session) self.api.logged_in = True if not getattr(self, 'dead', False): self.dead = True self.finish_init(True, callback, register, on_success, on_fail, invisible, do_conn_fail) return
def success(res): print print res print file, key = res.split(':') wx.LaunchDefaultBrowser(UrlQuery(self.modify_url, id=file, tkn=key))
def build_oauth_dialog_url(perms=None): params = dict(client_id=DIGSBY_APP_ID, redirect_uri=LOGIN_SUCCESS_PAGE, response_type='token', display='popup') if perms is not None: params['scope'] = ','.join(perms) return UrlQuery('https://www.facebook.com/dialog/oauth/?', **params)
def compose(self, to='', subject='', body='', cc='', bcc=''): # extra = dict() if not self.hosted else dict(fs='1', view='cm') extra = dict(fs='1', tf='1', view='cm') #, ui='1')# if not self.hosted else dict() su = subject for name in 'to su body cc bcc'.split(): if vars()[name]: extra[name] = vars()[name] return self._external_url(UrlQuery(self.browserBaseMailUrl, **extra))
def _external_url(self, url): if self.web_login: self.new_token(internal=False) if self.web_login and self.external_token: return UrlQuery( self.tokenAuthUrl, **{ 'auth': self.external_token, 'service': 'mail', 'continue': url, 'source': 'googletalk' }) else: return url
def _external_url(self, url): if self.web_login: self.new_token(internal=False) if self.web_login and self.external_token: return UrlQuery( 'https://www.google.com/accounts/TokenAuth?', **{ 'auth': self.external_token, 'service': 'mail', 'continue': url, 'source': 'googletalk' }) else: return url
def perm_fail(self, register, on_success, on_fail, invisible, do_conn_fail, callback, *a, **k): if not a: pass #we already weren't logged in elif (isinstance(a[0], dict) or not_logged_in(a[0])): pass #we don't have permissions or we're not logged in else: return self.finish_init( False, callback) #we're not sure of anything, go for it! next = LOGIN_SUCCESS_PAGE url = UrlQuery(DIGSBY_LOGIN_PERMS, next=next, skipcookie='true', req_perms=','.join(['xmpp_login', 'offline_access'])) window = FBLoginWindow(self.email_in, acct=self) def on_nav(e=None, b=None, url=None, *a, **k): if not window.ie: e.Skip() #careful with name collision url = e.URL try: parsed = UrlQuery.parse(url) except Exception: traceback.print_exc() else: log.info('url: %r', url) log.info('in: %r', 'session' in parsed['query']) if 'session' in parsed['query']: #not sure how to clean this up right now. session = parsed['query'].get('session') log.info('parsed: %r', parsed) parsed_base = dict(parsed) parsed_base.pop('query') self.api.set_session(session) self.api.logged_in = True if not getattr(self, 'dead', False): self.dead = True self.finish_init(True, callback, register, on_success, on_fail, invisible, do_conn_fail) return def on_close(*a, **k): if not getattr(self, 'dead', False): on_fail() window.set_callbacks(on_nav, None, on_close) window.LoadURL(url) window._browser_frame.Show()
def refresh_campaign(self): campaign = get_ad_campagin() self.ad_url_base = CAMPAIGNS[campaign] old_ad_url = getattr(self, 'ad_url', None) self.ad_url = UrlQuery( self.ad_url_base, utm_source='digsby_client', utm_medium='im_window', utm_content=self.rotater.scenario_identifier, utm_campaign=campaign, ) if old_ad_url is not None and old_ad_url != self.ad_url: self._reload_ad()
def _do_call(self, endpoint, method, callback=None, **k): k = prepare_args(k) if self.access_token: k['access_token'] = self.access_token.encode('utf8') url = UrlQuery(self.graph_http + endpoint + '/' + method, **k) log.info("calling method: %s", method) log.info_s("calling: %s with %r", url, k) if self.mode == 'async': return self.call_asynchttp(url, None, callback=callback) elif self.mode == 'console': return call_urllib2(url, None) elif self.mode == 'threaded': from util.threads import threaded return threaded(call_urllib2)(url, None, callback=callback) else: return callback_adapter(call_urllib2)(url, None, callback=callback)
def _get_ad_url(location, method, keyword): url_kwargs = dict(placement=method.__name__, what=keyword) publisher = common.pref('social.feed_ads_publisher', 'digsby') if publisher and publisher.lower() != 'none': url_kwargs['publisher'] = publisher url_kwargs.update(method.urlargs(location)) # TODO: make UrlQuery accept an encoding argument to encode # values automatically. url_kwargs = dict((k, to_utf8(v)) for k, v in url_kwargs.iteritems()) url = UrlQuery('http://api.citygridmedia.com/ads/custom/v2/where', **url_kwargs) return url
def batch(self, *a, **k): callback = k.pop('callback') k['batch'] = list(a) k = prepare_args(k) if self.access_token: k['access_token'] = self.access_token.encode('utf-8') url = UrlQuery(self.graph_http + self.graph_endpoint) data = self.prepare_values(k) if self.mode == 'async': return self.call_asynchttp(url, data, callback=callback) elif self.mode == 'console': return call_urllib2(url, data) elif self.mode == 'threaded': from util.threads import threaded return threaded(call_urllib2)(url, data, callback=callback) else: return callback_adapter(call_urllib2)(url, data, callback=callback)
def OnComposeEmail(self, to='', subject='', body='', cc='', bcc='', callback=None): import hooks hooks.notify('digsby.statistics.email.compose') for name in ('to', 'subject', 'body', 'cc', 'bcc'): assert isinstance(vars()[name], basestring), (name, type(vars()[name]), vars()[name]) if self.mailclient and try_this( lambda: self.mailclient.startswith('file:'), False): os.startfile(self.mailclient[5:]) elif self.mailclient == 'sysdefault': kw = {} for name in ('subject', 'body', 'cc', 'bcc'): if vars()[name]: kw[name] = vars()[name] query = UrlQuery('mailto:' + quote(to), **kw) log.info('OnComposeEmail is launching query: %s' % query) try: os.startfile(query) except WindowsError: # WindowsError: [Error 1155] No application is associated with the specified file for this operation: 'mailto:' mailclient_error() raise elif self.mailclient == '__urls__': url = self.custom_compose_url if url is not None: launch_browser(url) else: url = self.compose(to, subject, body, cc, bcc) if url: launch_browser(url) callback.success()
def __missing__(self, song): """ returns None and retrieves the snipr url for the given song, inserting the return value from that API. """ self[song] = None try: return self[song] finally: url = UrlQuery('http://www.amazon.com/gp/search', LazySortedDict(dict(ie = 'UTF8', keywords = song.encode('utf-8'), tag = 'dmp3-20', index = 'digital-music', linkCode = 'ur2', camp = '1789', creative = '9325'))) f = threaded(get_short_url) f.verbose = True f(url, success = lambda val: self.__setitem__(song, change_hostname(val, 'music.digsby.com')), error = lambda _e: self.pop(song))
def _actionUrl(self, action, message_id): action_names = dict(archive='rc_^i', delete='tr', read='rd', spam='sp', star='st') if not action in action_names.values(): action = action_names[action] at = self.gmail_at url = UrlQuery(self.internalBaseMailUrl, ik='', search='all', view='tl', start='0') params = dict(act=action, at=at, vp='', msq='', ba='false', t=message_id, fs='1') return url, params
def authenticate(self, task=None): self.internal_token = token = self.new_token(internal=True) if not token: return False webreq_result = self.webrequest(UrlQuery( self.tokenAuthUrl, **{ 'auth': token, 'service': 'mail', 'continue': self.internalBaseMailUrl, 'source': 'googletalk' }), internal=True) try: self.gmail_at except Exception: log.debug('gmail_at failed in authenticate') if webreq_result: self.new_token(False) return True else: return webreq_result
def send_email(self, to='', subject='', body='', cc='', bcc=''): log.info('sending a mail') data = dict(nvp_bu_send='Send') for name in 'to subject body cc bcc'.split(): if vars()[name]: data[name] = vars()[name].encode('utf-8') if not hasattr(self, 'sendpath'): response = self.internal_http_opener.open( self.internalBaseMailUrl + '?ui=html') from urllib2 import urlparse respurl = urlparse.urlparse(response.geturl()) try: response.close() except: pass del response self.sendpath = respurl.path url = 'https://mail.google.com' + self.sendpath try: at = self.gmail_at except KeyError: at = '' params = dict(at=at, v='b', pv='tl', s='s', fv='b', cpt='c', cs='c') if not self.hosted: params.update(fv='b', cpt='c', cs='c') else: params.update(cs='b', s='s') url = UrlQuery(url, params) response = self.webrequest(url, follow_js_redirects=True, **data) log.info('sent a mail') assert response and ('Your message has been sent.' in response) log.info('send mail success: %r', bool('Your message has been sent.' in response)) return True
def on_nav(e = None, b = None, url=None, *a, **k): if not window.ie: e.Skip() #careful with name collision url = e.URL try: parsed = UrlQuery.parse(url) except Exception: traceback.print_exc() else: log.info('url: %r', url) log.info('in: %r', 'session' in parsed['query']) if 'session' in parsed['query']: #not sure how to clean this up right now. session = parsed['query'].get('session') log.info('parsed: %r', parsed) parsed_base = dict(parsed) parsed_base.pop('query') self.api.set_session(session) self.api.logged_in = True if not getattr(self, 'dead', False): self.dead = True self.finish_init(True, callback, register, on_success, on_fail, invisible, do_conn_fail) return
def on_call(self, json_obj, webview=None): params = json_obj.get('params') method = json_obj.get('method') id_ = json_obj.get('id') events = self.protocol.events try: call = getattr(self, 'on_call_' + method) call.__call__ except AttributeError: pass else: if call.func_code.co_argcount < 3: return call(params[0]) elif call.func_code.co_argcount < 4: return call(params[0], id_) else: return call(params[0], id_, webview) if method == 'viewChanged': feedName = params[0].get('feedName') events.on_view(feedName) elif method == 'following': following = params[0].get('following') events.following(following) elif method == 'state': state = params[0].get('state') if state is not None: events.state_changed(state) elif method == 'received_whole_update': events.received_whole_update() elif method == 'trends': trends = params[0].get('trends', None) if trends is not None: events.trends(trends) elif method == 'recentTimeline': tweets = params[0].get('tweets') events.recent_timeline(tweets) elif method == 'selfTweet': tweet = params[0].get('tweet') events.self_tweet(tweet) elif params: param = params[0] if param is not None and isinstance(param, dict): url = param.get('url') if url and url.startswith('digsby:'): url = UrlQuery.parse('http' + url[6:], utf8=True) # UrlQuery doesn't like digsby:// q = url['query'].get netloc = url['netloc'] if netloc == 'reply': id, screen_name, text = q('id'), q('screen_name'), q('text') if id and screen_name: self.protocol.on_reply(id, screen_name, text) elif netloc == 'retweet': id, screen_name, text = q('id'), q('screen_name'), q('text') if id and screen_name: self.protocol.on_retweet(id, screen_name, text) elif netloc == 'direct': screen_name = q('screen_name') if screen_name: self.protocol.on_direct(screen_name)
def initialize(self, username, password, userfeeds=None, accountopts=None): self.username = username self.password = password userfeeds = [] if userfeeds is None else userfeeds def when_load(): self.protocol._verify_databases() self.evaljs('window.resdir = %s' % simplejson.dumps((path(__file__).parent / 'res').url())) def success(token): opts = dict(username=self.username, password=self.password, feeds=userfeeds, accountopts=accountopts or {}) if token is not None: assert hasattr(token, 'key'), repr(token) opts.update(oauthTokenKey = token.key, oauthTokenSecret = token.secret, oauthConsumerKey = twitter_auth.CONSUMER_KEY, oauthConsumerSecret = twitter_auth.CONSUMER_SECRET) time_correction = twitter_auth.get_time_correction() if time_correction is not None: opts['accountopts'].update(timeCorrectionSecs=-time_correction) self.JSCall('initialize', **opts) api_server = getattr(self.protocol.account, 'api_server', None) if api_server is not None: return success(None) if self.oauth_token is not None: try: token = OAuthToken.from_string(self.oauth_token) except Exception: traceback.print_exc() else: log.info('using token stored in account') return success(token) def on_token(token): token_string = token.to_string() log.info('on_token received token from network: %r', token_string[:5]) self.protocol.account.update_info(oauth_token=token_string) success(token) def on_token_error(e): errcode = getattr(e, 'code', None) # if obtaining an token fails, it may be because our time is set incorrectly. # we can use the Date: header returned by Twitter's servers to adjust for # this. if errcode == 401: server_date = getattr(e, 'hdrs', {}).get('Date', None) retries_after_401 = getattr(self.protocol, 'retries_after_401', 0) if server_date and retries_after_401 < 1: self.protocol.retries_after_401 = retries_after_401 + 1 log.warning('on_token_error: server date is %r', server_date) server_date = parse_http_date(server_date) log.warning('on_token_Error: RETRYING WITH NEW SERVER DATE %r', server_date) twitter_auth.set_server_timestamp(server_date) return twitter_auth.get_oauth_token(self.username, self.password, success=on_token, error=on_token_error) state = 'autherror' if errcode == 401 else 'connfail' log.error('on_token_error: e.code is %r', errcode) log.error(' changing state to %r', state) self.protocol.events.state_changed(state) log.info('getting new oauth token from network') twitter_auth.get_oauth_token(self.username, self.password, success=on_token, error=on_token_error) self.when_load = when_load url = APP_PATH.url() from gui.browser import webkit webkit.update_origin_whitelist(url, 'https', 'twitter.com', True) webkit.update_origin_whitelist(url, 'http', 'twitter.com', True) api_server = getattr(self.protocol.account, 'api_server', None) if api_server is not None: api = UrlQuery.parse(api_server) webkit.update_origin_whitelist(url, api['scheme'], api['netloc'], True) self.bridge.LoadURL(url)
def oneriot_ad_url(appId): return UrlQuery('http://api.ads.oneriot.com/search', appId=appId, version='1.1', limit='20', format='XML')
def urls_have_same_domain(a, b): try: return UrlQuery.parse(a).netloc == UrlQuery.parse(b).netloc except Exception: traceback.print_exc_once() return a == b
def inbox_url(self): return self._external_url(UrlQuery(self.browserBaseMailUrl))
def urlForEmail(self, email): return self._external_url( UrlQuery(self.browserBaseMailUrl + "#all/" + str(email.id)))
def on_call(self, json_obj, webview=None): params = json_obj.get('params') method = json_obj.get('method') id_ = json_obj.get('id') events = self.protocol.events try: call = getattr(self, 'on_call_' + method) call.__call__ except AttributeError: pass else: if call.func_code.co_argcount < 3: return call(params[0]) elif call.func_code.co_argcount < 4: return call(params[0], id_) else: return call(params[0], id_, webview) if method == 'viewChanged': feedName = params[0].get('feedName') events.on_view(feedName) elif method == 'following': following = params[0].get('following') events.following(following) elif method == 'state': state = params[0].get('state') if state is not None: events.state_changed(state) elif method == 'received_whole_update': events.received_whole_update() elif method == 'trends': trends = params[0].get('trends', None) if trends is not None: events.trends(trends) elif method == 'recentTimeline': tweets = params[0].get('tweets') events.recent_timeline(tweets) elif method == 'selfTweet': tweet = params[0].get('tweet') events.self_tweet(tweet) elif params: param = params[0] if param is not None and isinstance(param, dict): url = param.get('url') if url and url.startswith('digsby:'): url = UrlQuery.parse( 'http' + url[6:], utf8=True) # UrlQuery doesn't like digsby:// q = url['query'].get netloc = url['netloc'] if netloc == 'reply': id, screen_name, text = q('id'), q('screen_name'), q( 'text') if id and screen_name: self.protocol.on_reply(id, screen_name, text) elif netloc == 'retweet': id, screen_name, text = q('id'), q('screen_name'), q( 'text') if id and screen_name: self.protocol.on_retweet(id, screen_name, text) elif netloc == 'direct': screen_name = q('screen_name') if screen_name: self.protocol.on_direct(screen_name)
def initialize(self, username, password, userfeeds=None, accountopts=None): self.username = username self.password = password userfeeds = [] if userfeeds is None else userfeeds def when_load(): self.protocol._verify_databases() self.evaljs('window.resdir = %s' % simplejson.dumps( (path(__file__).parent / 'res').url())) def success(token): opts = dict(username=self.username, password=self.password, feeds=userfeeds, accountopts=accountopts or {}) if token is not None: assert hasattr(token, 'key'), repr(token) opts.update( oauthTokenKey=token.key, oauthTokenSecret=token.secret, oauthConsumerKey=twitter_auth.CONSUMER_KEY, oauthConsumerSecret=twitter_auth.CONSUMER_SECRET) time_correction = twitter_auth.get_time_correction() if time_correction is not None: opts['accountopts'].update( timeCorrectionSecs=-time_correction) self.JSCall('initialize', **opts) api_server = getattr(self.protocol.account, 'api_server', None) if api_server is not None: return success(None) if self.oauth_token is not None: try: token = OAuthToken.from_string(self.oauth_token) except Exception: traceback.print_exc() else: log.info('using token stored in account') return success(token) def on_token(token): token_string = token.to_string() log.info('on_token received token from network: %r', token_string[:5]) self.protocol.account.update_info(oauth_token=token_string) success(token) def on_token_error(e): errcode = getattr(e, 'code', None) # if obtaining an token fails, it may be because our time is set incorrectly. # we can use the Date: header returned by Twitter's servers to adjust for # this. if errcode == 401: server_date = getattr(e, 'hdrs', {}).get('Date', None) retries_after_401 = getattr(self.protocol, 'retries_after_401', 0) if server_date and retries_after_401 < 1: self.protocol.retries_after_401 = retries_after_401 + 1 log.warning('on_token_error: server date is %r', server_date) server_date = parse_http_date(server_date) log.warning( 'on_token_Error: RETRYING WITH NEW SERVER DATE %r', server_date) twitter_auth.set_server_timestamp(server_date) return twitter_auth.get_oauth_token( self.username, self.password, success=on_token, error=on_token_error) state = 'autherror' if errcode == 401 else 'connfail' log.error('on_token_error: e.code is %r', errcode) log.error(' changing state to %r', state) self.protocol.events.state_changed(state) log.info('getting new oauth token from network') twitter_auth.get_oauth_token(self.username, self.password, success=on_token, error=on_token_error) self.when_load = when_load url = APP_PATH.url() from gui.browser import webkit webkit.update_origin_whitelist(url, 'https', 'twitter.com', True) webkit.update_origin_whitelist(url, 'http', 'twitter.com', True) api_server = getattr(self.protocol.account, 'api_server', None) if api_server is not None: api = UrlQuery.parse(api_server) webkit.update_origin_whitelist(url, api['scheme'], api['netloc'], True) self.bridge.LoadURL(url)
def urls_have_same_domain(a, b): try: return UrlQuery.parse(a).netloc == UrlQuery.parse(b).netloc except Exception: traceback.print_exc_once() return a == b
def _get_notifier_data(self): return self.webrequest(url=UrlQuery(self.internalBaseMailUrl, ui='pb'), data='')