def process(self): if not self._q or self._waiting: return self._waiting = True data = [] sending = [] queue, self._q[:] = self._q[:], [] while queue: msgobj, trid, callback, kw = queue.pop(0) if msgobj is not None: self.set_trid(msgobj, trid) self.set_callbacks(msgobj, callback) data.append(str(msgobj)) else: self._poll_in_queue = False sending.append(callback) if self._session_id is None: url_kws = dict(Action = 'open', Server = self.type, IP = self.host) elif len(data) == 0: url_kws = dict(Action = 'poll', SessionID = self._session_id) else: url_kws = dict(SessionID = self._session_id) data = ''.join(data) req = self.make_request(url_kws, data = data) #log.debug(req.get_selector()) def _transport_error(_req = None, _resp = None): log.error('Transport error in MsnHttpSocket: req = %r, resp = %r', _req, _resp) if isinstance(_req, Exception): e = _req elif isinstance(_resp, Exception): e = _resp else: e = _resp for cb in sending: cb_error = getattr(callback, 'error', None) if cb_error is not None: cb_error(self, e) try: del self.gateway_ip # reset to class default- maybe host is bad? except AttributeError: pass self._on_send_error(e) asynchttp.httpopen(req, success = self._on_response, error = _transport_error)
def request_ads(self, success, error=None): '''requests a set of ads from oneriot''' def on_success(req, resp): try: data = resp.read() ads = NewsItemList.from_xml(data, filter_html=True) log.info('got %d ads', len(ads) if ads else 0) except Exception as e: traceback.print_exc() if error is not None: error(e) log.error('error retrieving ads') else: success(ads) log.info('requesting feed ads from %r' % self.url) self.update_count += 1 asynchttp.httpopen(self.url, success=on_success, error=error)
def shorten_async(self, url, success, error=None): def async_success(req, resp): try: ret = self.process_response(resp) except Exception as e: if error is not None: error(e) else: cache_shortened_url(url, ret) success(ret) def async_error(req=None, resp=None): print req print resp if error is not None: error(None) # TODO: async interface for errors? import common.asynchttp as asynchttp asynchttp.httpopen(self.build_request_url(url), success=async_success, error=async_error)
def get_buddy_icon(self, buddy_name, callback=None): b = self.get_buddy(buddy_name) if b is None: return callback.error(Exception('no buddy named %r', buddy_name)) if not b.icon_hash: return callback.success(None) url = b.icon_hash def success(req, resp): b.cache_icon(resp.read(), url) b.notify('icon') log.error('got buddy icon for %r: %r', b, resp) callback.success() def error(req, exc): log.error('error requesting buddy icon for %r: %r', b, exc) callback.error(exc) asynchttp.httpopen(url, headers={'User-Agent': 'AsyncDownloadMgr'}, success=success, error=error)
def get_manifest_path(self): ''' Figure out where the manfiest is supposed to be. Since this may make an HTTP request, control flow continues asynchronously into got_updateyaml(file_obj). ''' program_dir = util.program_dir() local_info_file = program_dir / 'update.yaml' if local_info_file.isfile(): try: local_info = open(local_info_file, 'rb') except Exception: pass else: self.got_updateyaml(fobj = local_info) return log.info("Manifest path not found in %r. checking web for update.yaml", local_info_file) asynchttp.httpopen("http://update.digsby.com/update.yaml?%s" % int(time.time()), success = self.got_updateyaml, error = self.manifest_path_error)
def get_login_data(login, password, api="icq.com", callback=None): if callback is None: return simplejson.loads( wget("https://api.screenname.%s/auth/clientLogin?f=json" % api, dict(k=ICQ_API_KEY, s=login, pwd=password)) ) else: return asynchttp.httpopen( "https://api.screenname.%s/auth/clientLogin?f=json" % api, data=dict(k=ICQ_API_KEY, s=login, pwd=password), callback=callback, )
def get_manifest_path(self): ''' Figure out where the manfiest is supposed to be. Since this may make an HTTP request, control flow continues asynchronously into got_updateyaml(file_obj). ''' program_dir = util.program_dir() local_info_file = program_dir / 'update.yaml' if local_info_file.isfile(): try: local_info = open(local_info_file, 'rb') except Exception: pass else: self.got_updateyaml(fobj=local_info) return log.info("Manifest path not found in %r. checking web for update.yaml", local_info_file) asynchttp.httpopen("http://update.digsby.com/update.yaml?%s" % int(time.time()), success=self.got_updateyaml, error=self.manifest_path_error)
def unshorten_url(url, cb): longurl = long_url_from_cache(url) if url is not None: return cb(longurl) requrl = UrlQuery('http://untiny.me/api/1.0/extract', url=url, format='json') def success(req, resp): json = resp.read() unshortened_url = simplejson.loads(json)['org_url'] cb(unshortened_url) def error(req, resp): pass import common.asynchttp as asynchttp return asynchttp.httpopen(requrl, success=success, error=error)
def fetch_favicon(domain, linked_domains = None): start_domain = domain real_domain = get_icon_domain(domain) if linked_domains is None: linked_domains = [] if real_domain != domain: linked_domains.append(domain) domain = real_domain wwwdomain = 'www.' + domain if not (domain.startswith('www') or wwwdomain in linked_domains): linked_domains.append(domain) domain = wwwdomain log.info('Using %r for %r (linked = %r)', domain, start_domain, linked_domains) url = 'http://' + domain + '/favicon.ico' def on_success(req, resp): data = resp.read() log.info('httpopen(%s): received %d bytes of data', url, len(data)) log.info('%r', resp) cache_icon(domain, linked_domains, data) unset_fetching([domain]) def on_error(req=None, resp=None): log.error('on_error for domain=%r, linked_domains=%r', domain, linked_domains) if 1 < domain.count('.') < MAX_SUBDOMAIN_CHECK: # try stripping a subdomain off and making another request new_domain = '.'.join(domain.split('.')[1:]) wx.CallAfter(fetch_favicon, new_domain, linked_domains + [domain]) # return now so that the original domain remains in the "fetching" # state. return else: log.error('No more subdomains to try for %r. Error response was: %r', domain, resp) cache_noicon(domain, linked_domains) unset_fetching(linked_domains + [domain]) def on_redirect(req): if 'favicon' not in req.get_selector(): new_url = 'http://%s/%s' % (req.get_host(), 'favicon.ico') old_req = req._orig_request checked_urls = getattr(old_req, '_favicons_checked_urls', set()) if new_url in checked_urls: return None checked_urls.add(new_url) req = req.copy(url = new_url) req._favicons_checked_urls = old_req._favicons_checked_urls = checked_urls req._orig_request = old_req return req with fetch_lock: if domain in currently_fetching: log.info('already fetching %r', url) return else: log.info('getting %r', url) currently_fetching.add(domain) netcall(lambda: asynchttp.httpopen(url, success = on_success, error = on_error, on_redirect = on_redirect))
def fetch_favicon(domain, linked_domains=None): start_domain = domain real_domain = get_icon_domain(domain) if linked_domains is None: linked_domains = [] if real_domain != domain: linked_domains.append(domain) domain = real_domain wwwdomain = 'www.' + domain if not (domain.startswith('www') or wwwdomain in linked_domains): linked_domains.append(domain) domain = wwwdomain log.info('Using %r for %r (linked = %r)', domain, start_domain, linked_domains) url = 'http://' + domain + '/favicon.ico' def on_success(req, resp): data = resp.read() log.info('httpopen(%s): received %d bytes of data', url, len(data)) log.info('%r', resp) cache_icon(domain, linked_domains, data) unset_fetching([domain]) def on_error(req=None, resp=None): log.error('on_error for domain=%r, linked_domains=%r', domain, linked_domains) if 1 < domain.count('.') < MAX_SUBDOMAIN_CHECK: # try stripping a subdomain off and making another request new_domain = '.'.join(domain.split('.')[1:]) wx.CallAfter(fetch_favicon, new_domain, linked_domains + [domain]) # return now so that the original domain remains in the "fetching" # state. return else: log.error( 'No more subdomains to try for %r. Error response was: %r', domain, resp) cache_noicon(domain, linked_domains) unset_fetching(linked_domains + [domain]) def on_redirect(req): if 'favicon' not in req.get_selector(): new_url = 'http://%s/%s' % (req.get_host(), 'favicon.ico') old_req = req._orig_request checked_urls = getattr(old_req, '_favicons_checked_urls', set()) if new_url in checked_urls: return None checked_urls.add(new_url) req = req.copy(url=new_url) req._favicons_checked_urls = old_req._favicons_checked_urls = checked_urls req._orig_request = old_req return req with fetch_lock: if domain in currently_fetching: log.info('already fetching %r', url) return else: log.info('getting %r', url) currently_fetching.add(domain) netcall(lambda: asynchttp.httpopen( url, success=on_success, error=on_error, on_redirect=on_redirect))
def httpopen(url, *a, **k): url = append_ts(url) return asynchttp.httpopen(url, *a, **k)
def get_load_balance_info(server): data = util.net.wget(server) site = data.split() pairs = [line.split('=') for line in site] d = dict(pairs) return d['CS_IP_ADDRESS'] def async_get_load_balance_info(server, callback=None): from common import asynchttp def success(req, resp): try: data = resp.read() site = data.splitlines() pairs = [line.strip().split('=') for line in site] d = dict((x.strip(), y.strip()) for x,y in pairs) callback.success(d['CS_IP_ADDRESS']) except Exception, e: traceback.print_exc() callback.error(e) def error(*a): callback.error(None) def timeout(*a): callback.timeout(None) asynchttp.httpopen(server, success = success, error = error, timeout=timeout)