def _request(self, request, callback): global pnconn_pool ## Build URL ''' url = self.origin + '/' + "/".join([ "".join([ ' ~`!@#$%^&*()+=[]\\{}|;\':",./<>?'.find(ch) > -1 and hex(ord(ch)).replace( '0x', '%' ).upper() or ch for ch in list(bit) ]) for bit in request]) ''' url = self.getUrl(request) agent = ContentDecoderAgent( RedirectAgent( Agent(reactor, contextFactory=WebClientContextFactory(), pool=self.ssl and None or pnconn_pool)), [('gzip', GzipDecoder)]) request = agent.request('GET', url, Headers(self.headers), None) def received(response): finished = Deferred() response.deliverBody(PubNubResponse(finished)) return finished def complete(data): callback(eval(data)) request.addCallback(received) request.addBoth(complete)
def request_gzipped_url(url, callback, errback=None, timeout=None, **kwargs): ''' Get URL with gzip-decoder support. ''' agent = ContentDecoderAgent(Agent(reactor), [('gzip', GzipDecoder)]) d = agent.request('GET', url, Headers({'User-Agent': ['gzip']})) def handleResponse(response, **kwargs): receiverDeferred = Deferred() receiverDeferred.addCallback(callback, **kwargs) receiver = StringReceiver(receiverDeferred) response.deliverBody(receiver) if timeout: timeoutCall = reactor.callLater(timeout, d.cancel) def completed(passthrough): if timeoutCall.active(): timeoutCall.cancel() return passthrough d.addBoth(completed) d.addCallback(handleResponse, **kwargs) if errback: d.addErrback(errback, **kwargs) return d
def _request( self, request, callback ) : global pnconn_pool ## Build URL ''' url = self.origin + '/' + "/".join([ "".join([ ' ~`!@#$%^&*()+=[]\\{}|;\':",./<>?'.find(ch) > -1 and hex(ord(ch)).replace( '0x', '%' ).upper() or ch for ch in list(bit) ]) for bit in request]) ''' url = self.getUrl(request) agent = ContentDecoderAgent(RedirectAgent(Agent( reactor, contextFactory = WebClientContextFactory(), pool = self.ssl and None or pnconn_pool )), [('gzip', GzipDecoder)]) request = agent.request( 'GET', url, Headers(self.headers), None ) def received(response): finished = Deferred() response.deliverBody(PubNubResponse(finished)) return finished def complete(data): callback(eval(data)) request.addCallback(received) request.addBoth(complete)
def request_gzipped_url(url, callback, errback=None, timeout=None, **kwargs): ''' Get URL with gzip-decoder support. ''' agent = ContentDecoderAgent(Agent(reactor), [('gzip', GzipDecoder)]) d = agent.request( 'GET', url, Headers({'User-Agent': ['gzip']})) def handleResponse(response, **kwargs): receiverDeferred = Deferred() receiverDeferred.addCallback(callback, **kwargs) receiver = StringReceiver(receiverDeferred) response.deliverBody(receiver) if timeout: timeoutCall = reactor.callLater(timeout, d.cancel) def completed(passthrough): if timeoutCall.active(): timeoutCall.cancel() return passthrough d.addBoth(completed) d.addCallback(handleResponse, **kwargs) if errback: d.addErrback(errback, **kwargs) return d
def __init__(self, reactor, email, password): self.reactor = reactor self.email = email self.password = password # Set up an agent for sending HTTP requests. Uses cookies # (part of the authentication), persistent HTTP connection # pool, automatic content decoding (gzip) # container to keep track of cookies self.cookiejar = cookielib.CookieJar() # HTTP persistent connection pool self.pool = HTTPConnectionPool(self.reactor, persistent=True) # for some reason, using >1 connection per host fails self.pool.maxPersistentPerHost = 1 self.agent = ContentDecoderAgent( CookieAgent(Agent(self.reactor, pool=self.pool), self.cookiejar), [('gzip', GzipDecoder)]) # this is the token that is used to authenticate API requests self.xsrf_token = None self.auth_token = None # who we are self.player_nickname = None self.player_guid = None self.team = None self.ap = None self.level = None self.start_date = None self.new_version = False self.inventory_done = False self.profile_done = False # for keeping track of item inventory self.inventory = b07.inventory.Inventory() # for keeping track of API requests that are delayed until # authentication has completed self._deferred_api_requests = [] # for keeping track of periodic inventory refreshes self._periodic_inventory_refresh_delayedcall = None # list of functions to call every time inventory is refreshed self._on_inventory_refreshed = [] # do an immediate inventory refresh self._first_inventory_ready = self._defer_until_authenticated( self._inventory0, (), {}) # do an immediate profile refresh self._first_profile_ready = self._defer_until_authenticated( self._profile0, (), {}) # start the authentication process self.reactor.callLater(0, self._authenticate0)
def main(): agent = ContentDecoderAgent(Agent(reactor), [('gzip', GzipDecoder)]) d = agent.request('GET', 'http://www.yahoo.com/') d.addCallback(printBody) d.addErrback(log.err) d.addCallback(lambda ignored: reactor.stop()) reactor.run()
def main(): agent = ContentDecoderAgent(Agent(reactor), [(b'gzip', GzipDecoder)]) d = agent.request(b'GET', b'http://httpbin.org/gzip') d.addCallback(printBody) d.addErrback(log.err) d.addCallback(lambda ignored: reactor.stop()) reactor.run()
def __init__(self, *argz, **kwz): super(txOneDriveAPI, self).__init__(*argz, **kwz) pool = self.request_pool = QuietHTTPConnectionPool( reactor, debug_requests=self.debug_requests, **self.request_pool_options) self.request_agent = ContentDecoderAgent( RedirectAgent( Agent(reactor, TLSContextFactory(self.ca_certs_files), pool=pool)), [('gzip', GzipDecoder)])
def assert_title(self, cassette_name, expected): cassette_agent = CassetteAgent(Agent(reactor), cassette_path(cassette_name), preserve_exact_body_bytes=True) agent = ContentDecoderAgent(RedirectAgent(cassette_agent), [('gzip', GzipDecoder)]) finished = agent.request( 'GET', 'http://127.0.0.1:5000/{}'.format(cassette_name)) finished.addCallback(self.extractor.extract) finished.addCallback(self.assertEqual, expected) finished.addBoth(cassette_agent.save) return finished
class HttpClient(object): def __init__(self): self.cookies = CookieJar() self.agent = Agent(reactor) self.agent = CookieAgent(self.agent, self.cookies) self.agent = ContentDecoderAgent(self.agent, [('gzip', GzipDecoder)]) def post(self, url, body_producer, headers=None): return self.agent.request('POST', url, headers, body_producer) def get(self, url, headers=None): return self.agent.request('GET', url, headers)
def __init__(self, scraper, pool=None): self.scraper = scraper self._pool = pool self._agents = {} #map proxy->an agent redirectLimit = scraper.config.get('max_redirects') if redirectLimit is None: redirectLimit = 3 #create an agent for direct requests self._direct_agent = Agent(reactor, pool=self._pool, connectTimeout=scraper.config.get('timeout') or 30) if redirectLimit>0: self._direct_agent = BrowserLikeRedirectAgent(self._direct_agent, redirectLimit=redirectLimit) self._direct_agent = ContentDecoderAgent(self._direct_agent, [('gzip', GzipDecoder)]) self.cj = self.scraper.client.opener.cj if self.cj is not None: self._direct_agent = CookieAgent(self._direct_agent, self.cj) #create an agent for http-proxy requests #no endpoint yet, use __ instead of _ to backup the instance self.__http_proxy_agent = ProxyAgent(None, pool=self._pool) if redirectLimit>0: self._http_proxy_agent = BrowserLikeRedirectAgent(self.__http_proxy_agent, redirectLimit=redirectLimit) self._http_proxy_agent = ContentDecoderAgent(self._http_proxy_agent, [('gzip', GzipDecoder)]) else: self._http_proxy_agent = ContentDecoderAgent(self.__http_proxy_agent, [('gzip', GzipDecoder)]) if self.cj is not None: self._http_proxy_agent = CookieAgent(self._http_proxy_agent, self.cj) #create an agent for https-proxy requests #no endpoint yet, use __ instead of _ to backup the instance self.__https_proxy_agent = TunnelingAgent(reactor=reactor, proxy=None, contextFactory=ScrapexClientContextFactory(), connectTimeout=30, pool=self._pool) #no proxy yet if redirectLimit>0: self._https_proxy_agent = BrowserLikeRedirectAgent(self.__https_proxy_agent, redirectLimit=redirectLimit) self._https_proxy_agent = ContentDecoderAgent(self._https_proxy_agent, [('gzip', GzipDecoder)]) else: self._https_proxy_agent = ContentDecoderAgent(self.__https_proxy_agent, [('gzip', GzipDecoder)]) if self.cj is not None: self._https_proxy_agent = CookieAgent(self._https_proxy_agent, self.cj)
def _setUp(self): super(HTTPTest, self)._setUp() try: import OpenSSL except: log.err("Warning! pyOpenSSL is not installed. https websites will " "not work") self.control_agent = TrueHeadersSOCKS5Agent( reactor, proxyEndpoint=TCP4ClientEndpoint(reactor, '127.0.0.1', config.tor.socks_port)) self.report['socksproxy'] = None if self.localOptions['socksproxy']: try: sockshost, socksport = self.localOptions['socksproxy'].split( ':') self.report['socksproxy'] = self.localOptions['socksproxy'] except ValueError: raise InvalidSocksProxyOption socksport = int(socksport) self.agent = TrueHeadersSOCKS5Agent( reactor, proxyEndpoint=TCP4ClientEndpoint(reactor, sockshost, socksport)) else: self.agent = TrueHeadersAgent(reactor) self.report['agent'] = 'agent' if self.followRedirects: try: self.control_agent = FixedRedirectAgent(self.control_agent) self.agent = FixedRedirectAgent( self.agent, ignorePrivateRedirects=self.ignorePrivateRedirects) self.report['agent'] = 'redirect' except: log.err("Warning! You are running an old version of twisted " "(<= 10.1). I will not be able to follow redirects." "This may make the testing less precise.") if len(self.contentDecoders) > 0: self.control_agent = ContentDecoderAgent(self.control_agent, self.contentDecoders) self.agent = ContentDecoderAgent(self.agent, self.contentDecoders) self.processInputs() log.debug("Finished test setup")
def call(self, action, params=None, callback=None): if params is None: params = StalkerRequest.getDefaults(action) headers = StalkerRequest.getHeaders(self._identity, action, referer=self.baseurl) headers["X-User-Agent"] = [ "Model: MAG250; Link: WiFi", ] url = "%s%s?%s" % (self.baseurl, DEFAULT_ENDPOINT, urlencode(params)) Log.w(url) agent = ContentDecoderAgent(Agent(reactor), [('gzip', GzipDecoder)]) #Agent(reactor) def bodyCB(body): if isinstance(body, Failure): if isinstance(body.value, PartialDownloadError): body = body.value.response else: Log.w(body) callback(None) return try: result = json.loads(unicode(body)) Log.d(result) callback(result) except Exception as e: Log.w(body) callback(None) def bodyErrorCB(error=None): Log.w(error) def responseCB(response): d = readBody(response) d.addBoth(bodyCB) def errorCB(error=None): if (isinstance(error, PartialDownloadError)): responseCB(error.response) return Log.w(error) d = agent.request( 'GET', url, Headers(headers), ) d.addCallback(responseCB) d.addErrback(errorCB)
def build_agent(req): uri = URI.fromBytes(req.url) proxy = req.get('proxy') if req.get('use_proxy') is False: proxy = None if proxy: if uri.scheme == 'https': agent = TunnelingAgent( reactor=reactor, proxy=proxy, contextFactory=ScrapexClientContextFactory(), connectTimeout=req.get('timeout')) else: endpoint = TCP4ClientEndpoint(reactor, host=proxy.host, port=proxy.port, timeout=req.get('timeout')) agent = ProxyAgent(endpoint) if proxy.auth_header: req.get('headers')['Proxy-Authorization'] = proxy.auth_header else: agent = Agent(reactor) agent = RedirectAgent(agent, redirectLimit=3) agent = ContentDecoderAgent(agent, [('gzip', GzipDecoder)]) return agent
def run(self) -> Type[Tuple]: agent = Agent(reactor, connectTimeout=self._timeout) # Add the gzip decoder if self._compressed: agent = ContentDecoderAgent(agent, [(b"gzip", GzipDecoder)]) binaryPayloadRequestProducer = _BinaryPayloadRequestProducer( self._payload, self._meta, self._isPayloadGzipped, ) # Make the web request response = yield agent.request( self._httpMethod, self._url, Headers(self._headers), binaryPayloadRequestProducer, ) self._meta = binaryPayloadRequestProducer.meta self._meta.code = response.code self._meta.version = response.version self._meta.headers = {k.decode(): v[0].decode() for k, v in response.headers.getAllRawHeaders()} # Get the responseTuple data responseProducer = self._cbResponse(response, self._meta) self._meta = responseProducer.meta return self._meta
def __init__(self, hs): SimpleHttpClient.__init__(self, hs) # clobber the base class's agent and UA: self.agent = ContentDecoderAgent( BrowserLikeRedirectAgent( Agent.usingEndpointFactory(reactor, SpiderEndpointFactory(hs))), [(b'gzip', GzipDecoder)])
def get(url, data=None, on_response=None, on_error=None): errback = on_error or make_errback(frames_back=2) try: def handle_response(response): if response.code == 200: response.deliverBody(JsonReceiver.create(on_response, errback)) else: errback("returned %s" % response.code) agent = ContentDecoderAgent(Agent(reactor, pool=pool), [("gzip", GzipDecoder)]) headers = Headers(get_auth_headers()) headers.addRawHeader("User-Agent", "gzip") d = agent.request("GET", url, headers=headers, bodyProducer=JsonProducer(data) if data else None) d.addCallbacks(handle_response, errback) except Exception as ex: errback("error %s" % ex)
def __init__(self, reactor, email, password): self.reactor = reactor self.email = email self.password = password # Set up an agent for sending HTTP requests. Uses cookies # (part of the authentication), persistent HTTP connection # pool, automatic content decoding (gzip) # container to keep track of cookies self.cookiejar = cookielib.CookieJar() # HTTP persistent connection pool self.pool = HTTPConnectionPool(self.reactor, persistent = True) # for some reason, using >1 connection per host fails self.pool.maxPersistentPerHost = 1 self.agent = ContentDecoderAgent(CookieAgent(Agent(self.reactor, pool = self.pool), self.cookiejar), [('gzip', GzipDecoder)]) # this is the token that is used to authenticate API requests self.xsrf_token = None self.auth_token = None # who we are self.player_nickname = None self.player_guid = None self.team = None self.ap = None self.level = None self.start_date = None self.new_version = False self.inventory_done = False self.profile_done = True # for keeping track of item inventory self.inventory = b07.inventory.Inventory() # for keeping track of API requests that are delayed until # authentication has completed self._deferred_api_requests = [] # for keeping track of periodic inventory refreshes self._periodic_inventory_refresh_delayedcall = None # list of functions to call every time inventory is refreshed self._on_inventory_refreshed = [] # do an immediate inventory refresh self._first_inventory_ready = self._defer_until_authenticated(self._inventory0, (), {}) # do an immediate profile refresh # self._first_profile_ready = self._defer_until_authenticated(self._profile0, (), {}) # start the authentication process self.reactor.callLater(0, self._authenticate0)
def __init__(self): #: The Twisted Web `Agent` used to make HTTP requests. self.agent = ContentDecoderAgent( RedirectAgent(BlacklistingAgent(Agent(reactor))), [('gzip', GzipDecoder)]) #: A dictionary of extractors enabled for this fetcher. self.extractors = default_extractors #: The maximum number of "soft" redirects to follow per request. self.max_soft_redirects = 2
def _create_agent(self, req): """ create right agent for specific request """ agent = None uri = URI.fromBytes(req.url) proxy = req.get('proxy') if req.get('use_proxy') is False: proxy = None if proxy: if uri.scheme == 'https': agent_key = 'httpsproxy-%s-%s' % (proxy.host, proxy.port) agent = self._agents.get(agent_key) if not agent: agent = TunnelingAgent(reactor=reactor, proxy=proxy, contextFactory=ScrapexClientContextFactory(), connectTimeout=30, pool=self._pool) self._agents[agent_key] = agent else: #http agent_key = 'httpproxy-%s-%s' % (proxy.host, proxy.port) agent = self._agents.get(agent_key) if not agent: endpoint = TCP4ClientEndpoint(reactor, host=proxy.host, port=proxy.port , timeout=req.get('timeout')) agent = ProxyAgent(endpoint, pool=self._pool) self._agents[agent_key] = agent if proxy.auth_header: req.get('headers')['Proxy-Authorization'] = proxy.auth_header else: agent = self._direct_agent #use single agent when no proxies used redirectLimit = self.scraper.config.get('max_redirects') if redirectLimit is None: redirectLimit = 3 if redirectLimit>0: agent = BrowserLikeRedirectAgent(agent, redirectLimit=redirectLimit) agent = ContentDecoderAgent(agent, [('gzip', GzipDecoder)]) if self.cj is not None: agent = CookieAgent(agent, self.cj) return agent
def __init__(self, **config): for k, v in config.viewitems(): try: x = getattr(self, k) except AttributeError: raise AttributeError( 'Unrecognized configuration key: {}'.format(k)) if isinstance(x, Mapping) and isinstance(v, Mapping): v = AttrDict(v) v.rebase(AttrDict(x)) setattr(self, k, v) pool = QuietHTTPConnectionPool(reactor, persistent=True) for k, v in self.request_pool_options.viewitems(): getattr(pool, k) # to somewhat protect against typos setattr(pool, k, v) self.request_agent = ContentDecoderAgent( RedirectAgent( Agent(reactor, TLSContextFactory(self.ca_certs_files), pool=pool)), [('gzip', GzipDecoder)])
def main(): """Main command line entry point.""" parser = argparse.ArgumentParser( description='Make requests to one or more HTTP or HTTPS URIs, ' 'and record the interactions in a cassette.', epilog='If no URIs are passed on the command line, they are ' 'read from standard input, one per line.') parser.add_argument( 'uris', metavar='URI', nargs='*', help='URI to fetch') parser.add_argument( 'cassette_path', metavar='CASSETTE', help='path to output cassette') args = parser.parse_args() uris = args.uris or imap(lambda x: x.strip(), sys.stdin) cassette_agent = CassetteAgent(Agent(reactor), args.cassette_path) agent = ContentDecoderAgent( RedirectAgent(cassette_agent), [('gzip', GzipDecoder)]) finished = DeferredList([agent.request('GET', uri) for uri in uris]) finished.addCallback(save_and_exit, cassette_agent) finished.addErrback(fail_and_exit) reactor.run()
def get(url, data=None, on_response=None, on_error=None): errback = on_error or make_errback(frames_back=2) try: def handle_response(response): if response.code == 200: response.deliverBody(JsonReceiver.create(on_response, errback)) else: errback('returned %s' % response.code) agent = ContentDecoderAgent(Agent(reactor, pool=pool), [('gzip', GzipDecoder)]) headers = Headers(get_auth_headers()) headers.addRawHeader('User-Agent', 'gzip') d = agent.request('GET', url, headers=headers, bodyProducer=JsonProducer(data) if data else None) d.addCallbacks(handle_response, errback) except Exception as ex: errback('error %s' % ex)
def fetch(method, url, headers={}, data=''): """ Fetches a page and returns a deferred with the response """ headers = Headers(headers) d = Deferred() response_data = {'code':0, 'content':None, 'headers':Headers({})} class StringPrinter(Protocol): # used to record the result from a url fetch def __init__(self): self.buffer = '' def dataReceived(self, bytes): self.buffer = self.buffer + bytes def connectionLost(self, reason): response_data['content'] = self.buffer d.callback(response_data) class StringProducer(object): implements(IBodyProducer) def __init__(self, data): self.data = data self.length = len(data) def startProducing(self, consumer): consumer.write(self.data) return succeed(None) def pauseProducing(self): pass def stopProducing(self): pass def onResponse(response): # proxied response started coming back, headers are load response_data['code'] = response.code response_data['headers'] = response.headers response.deliverBody(StringPrinter()) # start the connection agent = ContentDecoderAgent(Agent(reactor), [('gzip', GzipDecoder)]) body = StringProducer(data) fetcher = agent.request(method, url, headers, body) fetcher.addCallback(onResponse) fetcher.addErrback(d.errback) return d
def __init__(self, **kwz): for k, v in kwz.viewitems(): getattr(self, k) # to somewhat protect against typos if v is not None: setattr(self, k, v) pool = QuietHTTPConnectionPool(reactor, persistent=True) for k, v in self.request_pool_options.viewitems(): getattr(pool, k) # to somewhat protect against typos setattr(pool, k, v) self.request_agent = ContentDecoderAgent(RedirectAgent(Agent( reactor, TLSContextFactory(self.ca_certs_files), pool=pool )), [('gzip', GzipDecoder)]) self.fetch_cache = dict() # {url: {header_name: processed_value, ...}, ...}
def proxy_response(request, url): """ Send a new request to the given url, return a deferred with content """ d = Deferred() response_data = {'code':0, 'content':None, 'headers':Headers({})} class StringPrinter(Protocol): # used to record the result from a url fetch def __init__(self): self.buffer = '' def dataReceived(self, bytes): self.buffer = self.buffer + bytes def connectionLost(self, reason): response_data['content'] = self.buffer d.callback(response_data) def onResponse(response): # proxied response started coming back, headers are load ed response_data['code'] = response.code response_data['headers'] = response.headers response.deliverBody(StringPrinter()) class RequestWritingPrinter(Protocol): # used to send from the proxied response to the original request def __init__(self, request): self.request = request def dataReceived(self, bytes): self.request.write(bytes) def connectionLost(self, reason): self.request.finish() # start the connection agent = ContentDecoderAgent(Agent(reactor), [('gzip', GzipDecoder)]) body = FileBodyProducer(request.content) headers = request.requestHeaders headers.setRawHeaders('Host', [urlparse(url)[1]]) headers.removeHeader('Content-Length') fetcher = agent.request(request.method, url, headers, body) fetcher.addCallback(onResponse) fetcher.addErrback(d.errback) return d
def run(self): # convert tuple to json body = self._postTuple.tupleToRestfulJsonDict() body = json.dumps(body).encode("utf-8") # add http headers headers = Headers({ "User-Agent": ["synerty/1.0"], "Content-Type": ["application/json"] }) # Add the gzip decoder agent = ContentDecoderAgent(Agent(reactor), [(b"gzip", GzipDecoder)]) # Make the web request response = yield agent.request(self._httpMethod, self._url, headers, _BytesProducer(body)) # Get the response data responseData = yield self._cbResponse(response) # Convert the bytes into a tuple and return return self._parseTuple(responseData)
class HTTPClient: user_agent = b'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0' def __init__(self): self.agent = ContentDecoderAgent(RedirectAgent(Agent(reactor)), [(b'gzip', GzipDecoder)]) @defer.inlineCallbacks def fetch(self, url, receiver=None): resp = yield self.agent.request( b'GET', url.encode('utf-8'), Headers({b'User-Agent': [self.user_agent]})) resp.error = None resp.headers = dict(resp.headers.getAllRawHeaders()) for k, v in resp.headers.copy().items(): resp.headers[k.decode('utf-8')] = parse_header( v[0].decode('utf-8')) del resp.headers[k] if 'Content-Type' not in resp.headers.keys(): resp.headers['Content-Type'] = [ 'application/octet-stream', { 'charset': 'utf-8' } ] elif 'charset' not in resp.headers['Content-Type'][1].keys(): resp.headers['Content-Type'][1]['charset'] = 'utf-8' if receiver is None: mime_type = resp.headers['Content-Type'][0] if mime_type.startswith('image'): receiver = ImageReceiver elif mime_type == 'application/json': receiver = JSONReceiver elif mime_type == 'text/html': receiver = HTMLReceiver else: receiver = Receiver d = defer.Deferred() resp.receiver = receiver resp.deliverBody(resp.receiver(resp, d)) resp.body = yield d if resp.length is UNKNOWN_LENGTH: resp.length = None # A null value serves as a good unknown defer.returnValue(resp)
def __init__(self): self.pool = HTTPConnectionPool(reactor, persistent=True) self.pool.maxPersistentPerHost = 5 # 默认一个IP最大保持两个链接 self.pool.cachedConnectionTimeout = 50 # 默认240秒 contextFactory = WebClientContextFactory() raw_agent = Agent(reactor, contextFactory, pool=self.pool) agent = RedirectAgent( ContentDecoderAgent(raw_agent, [('gzip', GzipDecoder)])) self.cookieJar = CookieJar() self.agent = CookieAgent(agent, self.cookieJar) self.headers = {'User-agent': ['Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1'], 'Accept-Language': ['zh-Hans-CN,zh-Hans;q=0.5'], 'Accept': ['text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'], 'Accept-Encoding': ['gb2313,utf-8;q=0.7,*;q=0.7'], 'Cache-Control': ['max-age=0']}
def __init__(self, **config): for k, v in config.viewitems(): try: x = getattr(self, k) except AttributeError: raise AttributeError('Unrecognized configuration key: {}'.format(k)) if isinstance(x, Mapping) and isinstance(v, Mapping): v = AttrDict(v) v.rebase(AttrDict(x)) setattr(self, k, v) pool = QuietHTTPConnectionPool(reactor, persistent=True) for k, v in self.request_pool_options.viewitems(): getattr(pool, k) # to somewhat protect against typos setattr(pool, k, v) self.request_agent = ContentDecoderAgent(RedirectAgent(Agent( reactor, TLSContextFactory(self.ca_certs_files), pool=pool )), [('gzip', GzipDecoder)])
def __init__(self, url, callID=0, maxPersistentPerHost=2, useCompression=False, connectTimeout=None): self.url = url self.connectTimeout = connectTimeout self.encoder = self.get_encoder() assert IEncoder.providedBy(self.encoder), 'no encoder available or encoder does not provide IEncoder' assert isinstance(callID, (int, long)), "callID must be <type 'int'> or <type 'long'>" self.__callID = callID self.__callsCounter = 0 if maxPersistentPerHost > 0: self.pool = HTTPConnectionPool(reactor, persistent=True) self.pool.maxPersistentPerHost = maxPersistentPerHost else: self.pool = None agent = Agent(reactor, connectTimeout=self.connectTimeout, pool=self.pool) if useCompression: self.agent = ContentDecoderAgent(agent, [('gzip', GzipDecoder)]) else: self.agent = agent
class TitleFetcher(object): """Does exactly what it says on the tin.""" def __init__(self): #: The Twisted Web `Agent` used to make HTTP requests. self.agent = ContentDecoderAgent( RedirectAgent(BlacklistingAgent(Agent(reactor))), [('gzip', GzipDecoder)]) #: A dictionary of extractors enabled for this fetcher. self.extractors = default_extractors #: The maximum number of "soft" redirects to follow per request. self.max_soft_redirects = 2 @inlineCallbacks def fetch_title(self, url, hostname_tag=False, friendly_errors=False): """Fetch the document at *url* and return a `Deferred` yielding the document title or summary as a Unicode string. *url* may be a Unicode string IRI, a byte string URI, or a Twisted `URL`. If *hostname_tag* is true, prefix the extracted title with the hostname of the initially requested URI or IRI in the form that was originally provided, as well as the hostname of the final ASCII-only URI if it differs due to redirects or normalization. If *friendly_errors* is true, catch common connection errors and return a description of the error as the extracted title instead of reraising. Otherwise, all errors bubble to the caller. """ title = None if isinstance(url, unicode): url = URL.fromText(url) elif isinstance(url, str): url = URL.fromText(url.decode('ascii')) current = url response = None for _ in xrange(self.max_soft_redirects): last_response = response # This encoding should be safe, since asURI() only returns # URIs with ASCII code points. request = self.agent.request( 'GET', current.asURI().asText().encode('ascii')) if friendly_errors: request.addErrback(describe_error) response = yield request if isinstance(response, basestring): # We got an error message from describe_error. Bail. title = response break response.setPreviousResponse(last_response) content_type = cgi.parse_header( response.headers.getRawHeaders('Content-Type', [''])[0])[0] if content_type in self.extractors: extractor = self.extractors[content_type] extracted = yield extractor.extract(response) if isinstance(extracted, Redirect): current = URL.fromText( response.request.absoluteURI.decode('ascii')).click( extracted.location) continue title = extracted # The only case where we'd want to loop again is when the # response returned is a soft redirect. break else: if friendly_errors: title = u'Encountered too many redirects.' else: raise ResponseFailed([Failure(InfiniteRedirection( 599, 'Too many soft redirects', location=current.asURI().asText().encode('ascii')))]) if title is None: title = u'{} document'.format(content_type or u'Unknown') if response.length is not UNKNOWN_LENGTH: title += u' ({})'.format(filesize(response.length)) if hostname_tag: tag = url.host if isinstance(response, Response): initial = url.host final = URL.fromText( response.request.absoluteURI.decode('ascii')).host if initial != final: tag = u'{} \u2192 {}'.format(initial, final) title = u'[{}] {}'.format(tag, title) returnValue(title)
def _setContentDecoder(self, proxy): proxy.agent = ContentDecoderAgent(proxy.agent, [('gzip', GzipDecoder)])
def __init__(self): self.cookies = CookieJar() self.agent = Agent(reactor) self.agent = CookieAgent(self.agent, self.cookies) self.agent = ContentDecoderAgent(self.agent, [('gzip', GzipDecoder)])
def __init__(self, hosts, clacks, pool, reactor, extraHeaders, anonymous): self._clacks = clacks self._hosts = hosts self._agent = ContentDecoderAgent(Agent(reactor, pool=pool), [('gzip', GzipDecoder)]) self._extraHeaders = extraHeaders self._anonymous = anonymous
class Command(BaseCommand): help = 'Mass geocode using Google Geocode API' def __init__(self, *args, **kwargs): super(Command, self).__init__(*args, **kwargs) self.agent = ContentDecoderAgent(Agent(reactor), [(b'gzip', GzipDecoder)]) self.deferred_results = [] def add_arguments(self, parser): parser.add_argument( 'infile', nargs='?', type=argparse.FileType('r'), help= 'Input JSON file, should be a list of objects with "address" property', default=sys.stdin) parser.add_argument( 'outfile', nargs='?', type=argparse.FileType('w'), help= ('Output JSON file, the properties from input file will be preserved. ' 'Two fields will be added to each object: "lat" and "lng"'), default=sys.stdout) parser.add_argument( '--coordinators-poolsize', type=int, default=20, help= 'Coordinators pool size, defaults to 20, larger mean faster download' ) def geocode_api_url(self, address): return ( f'https://maps.googleapis.com/maps/api/geocode/json?address={urllib.quote_plus(address)}' f'&components=country:US&key={settings.GOOGLE_GEOCODE_APIKEY}' ).encode('ascii', 'ignore') def handle_readbody_err(self, failure): failure.trap(PartialDownloadError) return failure.value.response def response_received(self, response): return readBody(response) def updateLatLng(self, result, index, pbar): obj = self.store[index] try: obj['geocode_result'] = json.loads( result)['results'][0]['geometry']['location'] except (IndexError, KeyError): obj['geocode_result'] = 'error' pbar.update(1) def outputResult(self, result, outfile, pbar): pbar.close() outfile.write(json.dumps(self.store)) def printError(self, error): self.stderr.write(error) def generate_requests(self, pbar): for ind, obj in enumerate(self.store): result = self.agent.request('GET', self.geocode_api_url(obj['address']))\ .addCallback(self.response_received)\ .addErrback(self.handle_readbody_err)\ .addCallback(self.updateLatLng, ind, pbar)\ .addErrback(self.printError) self.deferred_results.append(result) yield result def kickstart(self, infile, outfile, coordinators_poolsize): self.store = json.loads(infile.read()) pbar = tqdm(total=len(self.store)) generator = self.generate_requests(pbar) DeferredList([ cooperate(generator).whenDone() for _ in range(coordinators_poolsize) ])\ .addCallback(lambda _: gatherResults(self.deferred_results))\ .addCallback(self.outputResult, outfile, pbar)\ .addErrback(self.printError)\ .addBoth(lambda ign: reactor.callWhenRunning(reactor.stop)) def handle(self, infile, outfile, coordinators_poolsize, *args, **options): reactor.callLater(0.1, self.kickstart, infile, outfile, coordinators_poolsize) reactor.run()
class HTTPClient(object): use_cache_headers = True request_pool_options = dict( maxPersistentPerHost=10, cachedConnectionTimeout=600, retryAutomatically=True ) ca_certs_files = b'/etc/ssl/certs/ca-certificates.crt' user_agent = b'bordercamp-irc-bot/{} twisted/{}'\ .format(bordercamp.__version__, twisted.__version__) hide_connection_errors = False sync_fallback_timeout = 180 # timeout for synchronous fallback requests in a thread def __init__(self, **kwz): for k, v in kwz.viewitems(): getattr(self, k) # to somewhat protect against typos if v is not None: setattr(self, k, v) pool = QuietHTTPConnectionPool(reactor, persistent=True) for k, v in self.request_pool_options.viewitems(): getattr(pool, k) # to somewhat protect against typos setattr(pool, k, v) self.request_agent = ContentDecoderAgent(RedirectAgent(Agent( reactor, TLSContextFactory(self.ca_certs_files), pool=pool )), [('gzip', GzipDecoder)]) self.fetch_cache = dict() # {url: {header_name: processed_value, ...}, ...} @defer.inlineCallbacks def sync_wrap(self, func, *argz, **kwz): timeout = defer.Deferred() reactor.callLater( self.sync_fallback_timeout, lambda: not timeout.called and timeout.callback(SyncTimeout) ) def thread_wrapper(): # Do not let exception to be raise in a thread - twisted logs these as unhandled try: return func(*argz, **kwz) except Exception as err: return SyncError(err) res = yield first_result(timeout, threads.deferToThread(thread_wrapper)) if res is SyncTimeout: raise res() if isinstance(res, SyncError): raise res.args[0] defer.returnValue(res) @defer.inlineCallbacks def request(self, url, method='get', decode=None, encode=None, data=None): method, url = force_bytes(method).upper(), force_bytes(url) headers = {'User-Agent': self.user_agent} if method == 'GET' and self.use_cache_headers: # Avoid doing extra work cache = self.fetch_cache.get(url, dict()) if 'cache-control' in cache and cache['cache-control'] >= time.time(): defer.returnValue(None) # no need to re-process same thing if 'last-modified' in cache: headers['If-Modified-Since'] = rfc822date(cache['last-modified']) if 'etag' in cache: headers['If-None-Match'] = '"{}"'.format(cache['etag']) log.noise( 'HTTP request: {} {} (h: {}, enc: {}, dec: {}, data: {!r})'\ .format(method, url[:100], headers, encode, decode, type(data)) ) if data is not None: if encode is None: if isinstance(data, types.StringTypes): data = io.BytesIO(data) elif encode == 'form': headers.setdefault('Content-Type', 'application/x-www-form-urlencoded') data = io.BytesIO(urlencode(data)) elif encode == 'json': headers.setdefault('Content-Type', 'application/json') data = io.BytesIO(json.dumps(data)) else: raise ValueError('Unknown request encoding: {}'.format(encode)) data_raw, data = data, FileBodyProducer(data) else: data_raw = None if decode not in ['json', None]: raise ValueError('Unknown response decoding method: {}'.format(decode)) requests = None # indicates fallback to requests module (for e.g. ipv6-only site) err = None try: res = yield self.request_agent.request( method, url, Headers(dict((k,[v]) for k,v in (headers or dict()).viewitems())), data ) except error.DNSLookupError: import requests, socket try: res = yield self.sync_wrap( getattr(requests, method.lower()), url, headers=headers, data=data_raw ) except ( socket.error, SyncTimeout, requests.exceptions.RequestException ) as err: pass except ( RequestTransmissionFailed, RequestNotSent, ResponseFailed ) as err: pass if err: if not self.hide_connection_errors: raise HTTPClientError(None, 'Lookup/connection error: {}'.format(err)) else: log.debug('Lookup/connection error (supressed): {}'.format(err)) defer.returnValue(None) # should also supress fast refetching code, phrase, version = (res.code, res.phrase, res.version)\ if not requests else ( res.status_code, http.RESPONSES[res.status_code], ('HTTP', 1, 1) ) log.noise( 'HTTP request done ({} {}): {} {} {}'\ .format(method, url[:100], code, phrase, version) ) if code in [http.NO_CONTENT, http.NOT_MODIFIED]: defer.returnValue(None) if code not in [http.OK, http.CREATED]: raise HTTPClientError(code, phrase) if not requests: data = defer.Deferred() res.deliverBody(DataReceiver(data)) data = yield data headers = dict((k, v[-1]) for k,v in res.headers.getAllRawHeaders()) else: try: data = yield self.sync_wrap(getattr, res, 'text') headers = yield self.sync_wrap(getattr, res, 'headers') except (requests.exceptions.RequestException, SyncTimeout) as err: raise HTTPClientError(None, 'Sync connection error: {}'.format(err)) if method == 'GET' and self.use_cache_headers: cache = dict((k.lower(), v) for k,v in headers.items()) cache = dict( (k, cache[k]) for k in ['last-modified', 'cache-control', 'etag'] if k in cache ) # Update headers' cache if 'last-modified' in cache: ts = rfc822.parsedate_tz(cache['last-modified']) cache['last-modified'] = time.mktime(ts[:9]) + (ts[9] or 0) if 'cache-control' in cache: match = re.search(r'\bmax-age=(\d+)\b', cache.pop('cache-control')) if match: cache['cache-control'] = time.time() + int(match.group(1)) if cache: self.fetch_cache[url] = cache defer.returnValue((json.loads(data) if decode is not None else data, headers))
class HTTPClient(object): #: Options to twisted.web.client.HTTPConnectionPool request_pool_options = dict( maxPersistentPerHost=10, cachedConnectionTimeout=600, retryAutomatically=True ) #: Settings for a simple retry mechanism on http error codes retry = None #: Path string or list of strings ca_certs_files = b'/etc/ssl/certs/ca-certificates.crt' #: Dump HTTP request data in debug log (insecure!) debug_requests = False def __init__(self, **config): for k, v in config.viewitems(): try: x = getattr(self, k) except AttributeError: raise AttributeError('Unrecognized configuration key: {}'.format(k)) if isinstance(x, Mapping) and isinstance(v, Mapping): v = AttrDict(v) v.rebase(AttrDict(x)) setattr(self, k, v) pool = QuietHTTPConnectionPool(reactor, persistent=True) for k, v in self.request_pool_options.viewitems(): getattr(pool, k) # to somewhat protect against typos setattr(pool, k, v) self.request_agent = ContentDecoderAgent(RedirectAgent(Agent( reactor, TLSContextFactory(self.ca_certs_files), pool=pool )), [('gzip', GzipDecoder)]) @defer.inlineCallbacks def request_with_retries(self, url, method, **request_kwz): '''Gets repeated if any of "raise_for" errors or HTTPClientError is raised. When number of attempts runs out, raises last error.''' assert 'url' not in request_kwz and 'method' not in request_kwz request_kwz['url'], request_kwz['method'] = url, method log = request_kwz.get('log') if not log: log = logging.getLogger(__name__) request_kwz['log'] = log # Shortcut for simple cases if not self.retry or self.retry.attempts <= 1: defer.returnValue((yield self.request(**request_kwz))) retry_on = [HTTPClientError] for cls in request_kwz.get('raise_for', dict()).viewvalues(): assert issubclass(cls, Exception), cls retry_on.append(cls) if isinstance(self.retry.delay, (tuple, list)): delays = list(self.retry.delay) for i in xrange(self.retry.attempts - len(delays), 1): delays.append(delays[-1]) else: delays = [float(self.retry.delay)] * (self.retry.attempts - 1) delays = list(reversed(delays)) while True: try: defer.returnValue((yield self.request(**request_kwz))) except tuple(retry_on) as err: if not delays: raise # no attempts left delay, d = delays.pop(), defer.Deferred() reactor.callLater(delay, d.callback, None) if self.debug_requests: log.debug( 'Introducing delay after failed' ' ({}: {}) request: {}s'.format(type(err), err, delay) ) yield d @defer.inlineCallbacks def request( self, url, method='get', decode=None, encode=None, data=None, chunks=True, headers=dict(), raise_for=dict(), queue_lines=None, log=None ): '''Make HTTP(S) request. decode (response body) = None | json encode (data) = None | json | form | files''' if not log: log = logging.getLogger(__name__) if self.debug_requests: log.debug( 'HTTP request: {} {} (h: {}, enc: {}, dec: {}, data: {!r})'\ .format(method, url[:100], headers, encode, decode, data) ) headers = dict() if not headers else headers.copy() headers.setdefault('User-Agent', 'lafs-backup-tool') if data is not None: if encode == 'files': boundary = os.urandom(16).encode('hex') headers.setdefault('Content-Type', 'multipart/form-data; boundary={}'.format(boundary)) data = MultipartDataSender(data, boundary) yield data.calculate_length() else: if encode is None: if isinstance(data, types.StringTypes): data = io.BytesIO(data) elif encode == 'form': headers.setdefault('Content-Type', 'application/x-www-form-urlencoded') data = io.BytesIO(urlencode(data)) elif encode == 'json': headers.setdefault('Content-Type', 'application/json') data = io.BytesIO(json.dumps(data)) else: raise ValueError('Unknown request encoding: {}'.format(encode)) data = (ChunkingFileBodyProducer if chunks else FileBodyProducer)(data) if isinstance(url, unicode): url = url.encode('utf-8') if isinstance(method, unicode): method = method.lower().encode('ascii') if decode not in ['json', None]: raise ValueError('Unknown response decoding method: {}'.format(decode)) code = None try: res = yield self.request_agent.request( method.upper(), url, Headers(dict((k,[v]) for k,v in (headers or dict()).viewitems())), data ) code = res.code if self.debug_requests: log.debug( 'HTTP request done ({} {}): {} {} {}'\ .format(method, url[:100], code, res.phrase, res.version) ) if code in raise_for: raise HTTPClientError(code, res.phrase) if code == http.NO_CONTENT: defer.returnValue(None) if code not in [http.OK, http.CREATED]: raise HTTPClientError(code, res.phrase) data = defer.Deferred() if queue_lines is None: res.deliverBody(DataReceiver(data)) else: res.deliverBody(LineQueue(data, queue_lines)) data = yield data defer.returnValue(json.loads(data) if decode is not None else data) except HTTPClientError as err: raise raise_for.get(code, HTTPClientError)(code, err.message)
class API(object): class URLS: CLIENT_LOGIN = '******' SERVICE_LOGIN = '******' APPENGINE = 'https://appengine.google.com' GAME_API = 'https://betaspike.appspot.com' INGRESS = 'http://www.ingress.com' class PATHS: LOGIN = '******' CONFLOGIN = '******' class API: HANDSHAKE = '/handshake' DROP_ITEM = '/rpc/gameplay/dropItem' RECYCLE = '/rpc/gameplay/recycleItem' SAY = '/rpc/player/say' INVENTORY = '/rpc/playerUndecorated/getInventory' PLEXTS = '/rpc/playerUndecorated/getPaginatedPlexts' class INTEL: BASE = '/intel' PLEXTS = '/rpc/dashboard.getPaginatedPlextsV2' HANDSHAKE_PARAMS = {'nemesisSoftwareVersion' : '2013-07-12T15:48:09Z d6f04b1fab4f opt', 'deviceSoftwareVersion' : '4.1.1'} def __init__(self, reactor, email, password): self.reactor = reactor self.email = email self.password = password # Set up an agent for sending HTTP requests. Uses cookies # (part of the authentication), persistent HTTP connection # pool, automatic content decoding (gzip) # container to keep track of cookies self.cookiejar = cookielib.CookieJar() # HTTP persistent connection pool self.pool = HTTPConnectionPool(self.reactor, persistent = True) # for some reason, using >1 connection per host fails self.pool.maxPersistentPerHost = 1 self.agent = ContentDecoderAgent(CookieAgent(Agent(self.reactor, pool = self.pool), self.cookiejar), [('gzip', GzipDecoder)]) # this is the token that is used to authenticate API requests self.xsrf_token = None self.auth_token = None # who we are self.player_nickname = None self.player_guid = None self.team = None self.ap = None self.level = None # for keeping track of item inventory self.inventory = b07.inventory.Inventory() # for keeping track of API requests that are delayed until # authentication has completed self._deferred_api_requests = [] # for keeping track of periodic inventory refreshes self._periodic_inventory_refresh_delayedcall = None # list of functions to call every time inventory is refreshed self._on_inventory_refreshed = [] # do an immediate inventory refresh self._first_inventory_ready = self._defer_until_authenticated(self._inventory0, (), {}) # refresh inventory periodically self._first_inventory_ready.addCallback(self._setup_periodic_inventory_refresh) # start the authentication process self.reactor.callLater(0, self._authenticate0) def refreshInventory(self): """Manually start an inventory refresh.""" return self._defer_until_authenticated(self._inventory0, (), {}) def onInventoryRefreshed(self, callback, *args, **kw): self._on_inventory_refreshed.append((callback, args, kw)) def err(self, failure): log_failure(failure) def getInventory(self): return self.inventory def _defer_until_authenticated(self, func, args, kw): kw['finished'] = defer.Deferred() if self.xsrf_token is None: self._deferred_api_requests.append((func, args, kw)) else: self.reactor.callLater(0, func, *args, **kw) return kw['finished'] def _process_deferred_api_requests(self): while self._deferred_api_requests: func, args, kw = self._deferred_api_requests.pop(0) self.reactor.callLater(0, func, *args, **kw) def _authenticate0(self): auth_params = {'Email': self.email, 'Passwd': self.password, 'service': 'ah', 'source': 'IngressBot', 'accountType': 'HOSTED_OR_GOOGLE'} body = b07.utils.StringProducer(urllib.urlencode(auth_params)) d = self.agent.request('POST', self.URLS.CLIENT_LOGIN, Headers({'User-Agent' : ['Nemesis (gzip)'], 'Content-Type': ['application/x-www-form-urlencoded'], 'Accept-Charset': ['utf-8']}), body) d.addCallback(self._authenticate1) d.addErrback(self.err) def _authenticate1(self, response): finished = defer.Deferred() finished.addCallback(self._authenticate2, response.code) lp = b07.utils.LoginProtocol(finished) response.deliverBody(lp) def _authenticate2(self, result, code): if code == 200: try: self.auth_token = result['Auth'] except KeyError: critical('Authentication failed: Bad Response') elif code == 403: error = result['Error'] if error == 'BadAuthentication': critical('Authentication failed: Username or password wrong') elif error == 'NotVerified': critical('Authentication failed: Account email address has not been verified') elif error == 'TermsNotAgreed': critical('Authentication failed: User has not agreed to Googles terms of service') elif error == 'CaptchaRequired': critical('Authentication failed: CAPTCHA required') elif error == 'AccountDeleted': critical('Authentication failed: User account has been deleted') elif error == 'AccountDisabled': critical('Authentication failed: User account has been disabled') elif error == 'ServiceDisabled': critical('Authentication failed: Service disabled') elif error == 'ServiceUnavailable': critical('Authentication failed: Service unavailable') else: critical('Authentication failed: Unknown reason') else: critical('Authentication failed: Bad response') d = self.agent.request('GET', self.URLS.GAME_API + self.PATHS.LOGIN + '?' + urllib.urlencode({'auth' : self.auth_token}), Headers({'User-Agent' : ['Nemesis (gzip)'], 'Accept-Charset': ['utf-8']}), None) d.addCallback(self._authenticate3) d.addErrback(self.err) def _authenticate3(self, response): trace('{}'.format(response.code)) for cookie in self.cookiejar: trace('{}'.format(cookie)) urlParams = {'json' : json.dumps(self.HANDSHAKE_PARAMS)} d = self.agent.request('GET', self.URLS.GAME_API + self.PATHS.API.HANDSHAKE + '?' + urllib.urlencode({'json': json.dumps(self.HANDSHAKE_PARAMS)}), Headers({'User-Agent' : ['Nemesis (gzip)'], 'Accept-Charset': ['utf-8'], 'Cache-Control': ['max-age=0']}), None) d.addCallback(self._authenticate4) d.addErrback(self.err) def _authenticate4(self, response): trace('{}'.format(response.code)) if response.code == 200: finished = defer.Deferred() finished.addCallback(self._authenticate5) finished.addErrback(self.err) jp = b07.utils.JsonProtocol(finished) response.deliverBody(jp) else: critical('Got response code {} after attempting handshake!'.format(response.code)) def _authenticate5(self, result): result = result['result'] if result['versionMatch'] != 'CURRENT': critical('Software version not up-to-date') if 'xsrfToken' not in result: critical('Authentication with Ingress severs failed for unknown reason') self.xsrf_token = str(result['xsrfToken']) self.player_nickname = result['nickname'] self.player_guid = result['playerEntity'][0] self.team = result['playerEntity'][2]['controllingTeam']['team'] self.ap = result['playerEntity'][2]['playerPersonal']['ap'] self.level = result['playerEntity'][2]['playerPersonal']['clientLevel'] debug('XSRF Token: {}'.format(self.xsrf_token)) debug('Player GUID: {}'.format(self.player_guid)) info('Player nickname: {}'.format(self.player_nickname)) info('Faction: {}'.format(self.team)) info('AP: {}'.format(self.ap)) info('Level: {}'.format(self.level)) debug('Player info: {}'.format(result)) self._process_deferred_api_requests() def _setup_periodic_inventory_refresh(self, result): self._periodic_inventory_refresh_delayedcall = self.reactor.callLater(300, self._periodic_inventory_refresh0) def _periodic_inventory_refresh0(self): finished = self._defer_until_authenticated(self._inventory0, (), {}) finished.addCallback(self._periodic_inventory_refresh1) def _periodic_inventory_refresh1(self, result): self._periodic_inventory_refresh_delayedcall = self.reactor.callLater(300, self._periodic_inventory_refresh0) def _inventory0(self, finished): debug('Requesting inventory from server...') body = b07.utils.StringProducer(json.dumps({'params' : {'lastQueryTimestamp': self.inventory.last_query_timestamp}})) d = self.agent.request('POST', self.URLS.GAME_API + self.PATHS.API.INVENTORY, Headers({'User-Agent' : ['Nemesis (gzip)'], 'Content-Type': ['application/json;charset=UTF-8'], 'X-XsrfToken': [self.xsrf_token]}), body) d.addCallback(self._inventory1, finished) d.addErrback(self.err) def _inventory1(self, response, finished): if response.code == 500: error('Got a 500 SERVER ERROR trying to get the inventory!') elif response.code == 200: debug('Got 200 OK response to inventory request') d = defer.Deferred() d.addCallback(self._inventory2, finished) jp = b07.utils.JsonProtocol(d) response.deliverBody(jp) else: error('Don\'t know what to do with {} code in response to inventory request!'.format(response.code)) def _inventory2(self, result, finished): self.inventory.process_result(result) finished.callback(self.inventory) for callback, args, kw in self._on_inventory_refreshed: self.reactor.callLater(0, callback, self.inventory, *args, **kw)
def http_request(self, url, include_http_responses=False): cached_value = yield self.lookup('http_request', url) if cached_value is not None: if include_http_responses is not True: cached_value.pop('responses', None) defer.returnValue(cached_value) page_info = { 'body_length': -1, 'status_code': -1, 'headers': {}, 'failure': None } agent = ContentDecoderAgent( FixedRedirectAgent(TrueHeadersAgent(reactor)), [('gzip', GzipDecoder)] ) try: retries = 0 while True: try: response = yield agent.request('GET', url, TrueHeaders(REQUEST_HEADERS)) headers = {} for name, value in response.headers.getAllRawHeaders(): headers[name] = unicode(value[0], errors='ignore') body_length = -1 body = None try: body = yield readBody(response) body_length = len(body) except PartialDownloadError as pde: if pde.response: body_length = len(pde.response) body = pde.response page_info['body_length'] = body_length page_info['status_code'] = response.code page_info['headers'] = headers page_info['title'] = extractTitle(body) response.body = body page_info['responses'] = encodeResponses(response) break except: if retries > self.http_retries: raise retries += 1 except DNSLookupError: page_info['failure'] = 'dns_lookup_error' except TimeoutError: page_info['failure'] = 'generic_timeout_error' except ConnectionRefusedError: page_info['failure'] = 'connection_refused_error' except ConnectError: page_info['failure'] = 'connect_error' except: # XXX map more failures page_info['failure'] = 'unknown_error' yield self.cache_value('http_request', url, page_info) if include_http_responses is not True: page_info.pop('responses', None) defer.returnValue(page_info)
def request(self, method, url, **kwargs): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() if isinstance(url, unicode): parsed_url = URL.from_text(url) else: parsed_url = URL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.get('params') if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') # Convert headers dictionary to # twisted raw headers format. headers = kwargs.get('headers') if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.items(): if isinstance(v, (bytes, unicode)): h.addRawHeader(k, v) elif isinstance(v, list): h.setRawHeaders(k, v) headers = h else: headers = Headers({}) # Here we choose a right producer # based on the parameters passed in. bodyProducer = None data = kwargs.get('data') files = kwargs.get('files') # since json=None needs to be serialized as 'null', we need to # explicitly check kwargs for this key has_json = 'json' in kwargs if files: # If the files keyword is present we will issue a # multipart/form-data request as it suits better for cases # with files and/or large objects. files = list(_convert_files(files)) boundary = str(uuid.uuid4()).encode('ascii') headers.setRawHeaders( b'content-type', [b'multipart/form-data; boundary=' + boundary]) if data: data = _convert_params(data) else: data = [] bodyProducer = multipart.MultiPartProducer(data + files, boundary=boundary) elif data: # Otherwise stick to x-www-form-urlencoded format # as it's generally faster for smaller requests. if isinstance(data, (dict, list, tuple)): headers.setRawHeaders(b'content-type', [b'application/x-www-form-urlencoded']) data = urlencode(data, doseq=True) bodyProducer = self._data_to_body_producer(data) elif has_json: # If data is sent as json, set Content-Type as 'application/json' headers.setRawHeaders(b'content-type', [b'application/json; charset=UTF-8']) content = kwargs['json'] json = json_dumps(content, separators=(u',', u':')).encode('utf-8') bodyProducer = self._data_to_body_producer(json) cookies = kwargs.get('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if kwargs.get('allow_redirects', True): if kwargs.get('browser_like_redirects', False): wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) auth = kwargs.get('auth') if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) timeout = kwargs.get('timeout') if timeout: delayedCall = default_reactor(kwargs.get('reactor')).callLater( timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.get('unbuffered', False): d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
def request(self, method, url, **kwargs): method = method.upper() # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.get('params') if params: url = _combine_query_params(url, params) # Convert headers dictionary to # twisted raw headers format. headers = kwargs.get('headers') if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.iteritems(): if isinstance(v, str): h.addRawHeader(k, v) else: h.setRawHeaders(k, v) headers = h else: headers = Headers({}) # Here we choose a right producer # based on the parameters passed in. bodyProducer = None data = kwargs.get('data') files = kwargs.get('files') if files: # If the files keyword is present we will issue a # multipart/form-data request as it suits better for cases # with files and/or large objects. files = list(_convert_files(files)) boundary = uuid.uuid4() headers.setRawHeaders( 'content-type', [ 'multipart/form-data; boundary=%s' % (boundary,)]) if data: data = _convert_params(data) else: data = [] bodyProducer = multipart.MultiPartProducer( data + files, boundary=boundary) elif data: # Otherwise stick to x-www-form-urlencoded format # as it's generally faster for smaller requests. if isinstance(data, (dict, list, tuple)): headers.setRawHeaders( 'content-type', ['application/x-www-form-urlencoded']) data = urlencode(data, doseq=True) bodyProducer = IBodyProducer(data) cookies = kwargs.get('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if kwargs.get('allow_redirects', True): wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [('gzip', GzipDecoder)]) auth = kwargs.get('auth') if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request( method, url, headers=headers, bodyProducer=bodyProducer) timeout = kwargs.get('timeout') if timeout: delayedCall = default_reactor(kwargs.get('reactor')).callLater( timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.get('unbuffered', False): d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
def test_EncodingJSONRPCServer(self): DATA = {'foo': 'bar'} REQUEST = '{"jsonrpc": "2.0", "method": "test", "params": [], "id": 1}' RESPONSE = '{"jsonrpc": "2.0", "id": 1, "result": ' + json.dumps(DATA) + '}' class RPCServer(JSONRPCServer): def jsonrpc_test(self): return defer.succeed(DATA) class ReceiverProtocol(Protocol): def __init__(self, finished): self.finished = finished self.body = [] def dataReceived(self, bytes): self.body.append(bytes) def connectionLost(self, reason): self.finished.callback(''.join(self.body)) class StringProducer(object): implements(IBodyProducer) def __init__(self, body): self.body = body self.length = len(body) def startProducing(self, consumer): consumer.write(self.body) return defer.succeed(None) def pauseProducing(self): pass def stopProducing(self): pass server = RPCServer() resource = EncodingJSONRPCServer(server) site = Site(resource) port = reactor.listenTCP(8888, site, interface='127.0.0.1') agent = ContentDecoderAgent(Agent(reactor), [('gzip', GzipDecoder)]) response = yield agent.request('POST', 'http://127.0.0.1:8888', Headers({'Accept-Encoding': ['gzip']}), StringProducer(REQUEST)) self.assertTrue(isinstance(response, GzipDecoder)) finished = defer.Deferred() response.deliverBody(ReceiverProtocol(finished)) data = yield finished self.assertEqual(data, RESPONSE) port.stopListening()
def __init__(self, *args, **kwargs): super(Command, self).__init__(*args, **kwargs) self.agent = ContentDecoderAgent(Agent(reactor), [(b'gzip', GzipDecoder)]) self.deferred_results = []
def request(self, method, url, **kwargs): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() stacklevel = kwargs.pop('_stacklevel', 2) if isinstance(url, DecodedURL): parsed_url = url elif isinstance(url, EncodedURL): parsed_url = DecodedURL(url) elif isinstance(url, six.text_type): parsed_url = DecodedURL.from_text(url) else: parsed_url = DecodedURL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.pop('params', None) if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') # Convert headers dictionary to # twisted raw headers format. headers = kwargs.pop('headers', None) if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.items(): if isinstance(v, (bytes, six.text_type)): h.addRawHeader(k, v) elif isinstance(v, list): h.setRawHeaders(k, v) headers = h else: headers = Headers({}) bodyProducer, contentType = self._request_body( data=kwargs.pop('data', None), files=kwargs.pop('files', None), json=kwargs.pop('json', _NOTHING), stacklevel=stacklevel, ) if contentType is not None: headers.setRawHeaders(b'Content-Type', [contentType]) cookies = kwargs.pop('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) browser_like_redirects = kwargs.pop('browser_like_redirects', False) if kwargs.pop('allow_redirects', True): if browser_like_redirects: wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) auth = kwargs.pop('auth', None) if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) reactor = kwargs.pop('reactor', None) if reactor is None: from twisted.internet import reactor timeout = kwargs.pop('timeout', None) if timeout: delayedCall = reactor.callLater(timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.pop('unbuffered', False): d.addCallback(_BufferedResponse) if kwargs: warnings.warn( ("Got unexpected keyword argument: {}." " treq will ignore this argument," " but will raise TypeError in the next treq release.").format( ", ".join(repr(k) for k in kwargs)), DeprecationWarning, stacklevel=stacklevel, ) return d.addCallback(_Response, cookies)
class RProxyResource(Resource): isLeaf = True def __init__(self, hosts, clacks, pool, reactor, extraHeaders, anonymous): self._clacks = clacks self._hosts = hosts self._agent = ContentDecoderAgent(Agent(reactor, pool=pool), [('gzip', GzipDecoder)]) self._extraHeaders = extraHeaders self._anonymous = anonymous def render(self, request): host = self._hosts.get(request.getRequestHostname().lower()) if not host and request.getRequestHostname().lower().startswith("www."): host = self._hosts.get(request.getRequestHostname().lower()[4:]) # The non-www host doesn't want to match to www. if not host["wwwtoo"]: host = None if not host: request.code = 404 request.responseHeaders.setRawHeaders("Server", [__version__.package + " " + __version__.base()]) return b"I can't seem to find a domain by that name. Look behind the couch?" url = "{}://{}:{}/{}".format( "https" if host["proxysecure"] else "http", host["host"], host["port"], request.path[1:]) urlFragments = urlparse(request.uri) if urlFragments.query: url += "?" + urlFragments.query for x in [b'content-length', b'connection', b'keep-alive', b'te', b'trailers', b'transfer-encoding', b'upgrade', b'proxy-connection']: request.requestHeaders.removeHeader(x) prod = StringProducer(request.content) d = self._agent.request(request.method, url, request.requestHeaders, prod) def write(res): request.code = res.code old_headers = request.responseHeaders request.responseHeaders = res.headers request.responseHeaders.setRawHeaders( 'content-encoding', old_headers.getRawHeaders('content-encoding', [])) if not self._anonymous: request.responseHeaders.addRawHeader("X-Proxied-By", __version__.package + " " + __version__.base()) if request.isSecure() and host["sendhsts"]: request.responseHeaders.setRawHeaders("Strict-Transport-Security", ["max-age=31536000"]) if self._clacks: request.responseHeaders.addRawHeader("X-Clacks-Overhead", "GNU Terry Pratchett") for name, values in self._extraHeaders: request.responseHeaders.setRawHeaders(name, values) f = Deferred() res.deliverBody(Downloader(f, request.write)) f.addCallback(lambda _: request.finish()) return f def failed(res): request.setResponseCode(http.INTERNAL_SERVER_ERROR) for name, values in self._extraHeaders: request.responseHeaders.setRawHeaders(name, values) request.write(str(res)) request.finish() return res d.addCallback(write) d.addErrback(failed) return server.NOT_DONE_YET
def __init__(self, *argz, **kwz): super(txBoxAPI, self).__init__(*argz, **kwz) pool = self.request_pool = QuietHTTPConnectionPool( reactor, debug_requests=self.debug_requests, **self.request_pool_options ) self.request_agent = ContentDecoderAgent(RedirectAgent(Agent( reactor, TLSContextFactory(self.ca_certs_files), pool=pool )), [('gzip', GzipDecoder)])
class HTTPClient(object): #: Options to twisted.web.client.HTTPConnectionPool request_pool_options = dict(maxPersistentPerHost=10, cachedConnectionTimeout=600, retryAutomatically=True) #: Settings for a simple retry mechanism on http error codes retry = None #: Path string or list of strings ca_certs_files = b'/etc/ssl/certs/ca-certificates.crt' #: Dump HTTP request data in debug log (insecure!) debug_requests = False def __init__(self, **config): for k, v in config.viewitems(): try: x = getattr(self, k) except AttributeError: raise AttributeError( 'Unrecognized configuration key: {}'.format(k)) if isinstance(x, Mapping) and isinstance(v, Mapping): v = AttrDict(v) v.rebase(AttrDict(x)) setattr(self, k, v) pool = QuietHTTPConnectionPool(reactor, persistent=True) for k, v in self.request_pool_options.viewitems(): getattr(pool, k) # to somewhat protect against typos setattr(pool, k, v) self.request_agent = ContentDecoderAgent( RedirectAgent( Agent(reactor, TLSContextFactory(self.ca_certs_files), pool=pool)), [('gzip', GzipDecoder)]) @defer.inlineCallbacks def request_with_retries(self, url, method, **request_kwz): '''Gets repeated if any of "raise_for" errors or HTTPClientError is raised. When number of attempts runs out, raises last error.''' assert 'url' not in request_kwz and 'method' not in request_kwz request_kwz['url'], request_kwz['method'] = url, method log = request_kwz.get('log') if not log: log = logging.getLogger(__name__) request_kwz['log'] = log # Shortcut for simple cases if not self.retry or self.retry.attempts <= 1: defer.returnValue((yield self.request(**request_kwz))) retry_on = [HTTPClientError] for cls in request_kwz.get('raise_for', dict()).viewvalues(): assert issubclass(cls, Exception), cls retry_on.append(cls) if isinstance(self.retry.delay, (tuple, list)): delays = list(self.retry.delay) for i in xrange(self.retry.attempts - len(delays), 1): delays.append(delays[-1]) else: delays = [float(self.retry.delay)] * (self.retry.attempts - 1) delays = list(reversed(delays)) while True: try: defer.returnValue((yield self.request(**request_kwz))) except tuple(retry_on) as err: if not delays: raise # no attempts left delay, d = delays.pop(), defer.Deferred() reactor.callLater(delay, d.callback, None) if self.debug_requests: log.debug('Introducing delay after failed' ' ({}: {}) request: {}s'.format( type(err), err, delay)) yield d @defer.inlineCallbacks def request(self, url, method='get', decode=None, encode=None, data=None, chunks=True, headers=dict(), raise_for=dict(), queue_lines=None, log=None): '''Make HTTP(S) request. decode (response body) = None | json encode (data) = None | json | form | files''' if not log: log = logging.getLogger(__name__) if self.debug_requests: log.debug( 'HTTP request: {} {} (h: {}, enc: {}, dec: {}, data: {!r})'\ .format(method, url[:100], headers, encode, decode, data) ) headers = dict() if not headers else headers.copy() headers.setdefault('User-Agent', 'lafs-backup-tool') if data is not None: if encode == 'files': boundary = os.urandom(16).encode('hex') headers.setdefault( 'Content-Type', 'multipart/form-data; boundary={}'.format(boundary)) data = MultipartDataSender(data, boundary) yield data.calculate_length() else: if encode is None: if isinstance(data, types.StringTypes): data = io.BytesIO(data) elif encode == 'form': headers.setdefault('Content-Type', 'application/x-www-form-urlencoded') data = io.BytesIO(urlencode(data)) elif encode == 'json': headers.setdefault('Content-Type', 'application/json') data = io.BytesIO(json.dumps(data)) else: raise ValueError( 'Unknown request encoding: {}'.format(encode)) data = (ChunkingFileBodyProducer if chunks else FileBodyProducer)(data) if isinstance(url, unicode): url = url.encode('utf-8') if isinstance(method, unicode): method = method.lower().encode('ascii') if decode not in ['json', None]: raise ValueError( 'Unknown response decoding method: {}'.format(decode)) code = None try: res = yield self.request_agent.request( method.upper(), url, Headers( dict( (k, [v]) for k, v in (headers or dict()).viewitems())), data) code = res.code if self.debug_requests: log.debug( 'HTTP request done ({} {}): {} {} {}'\ .format(method, url[:100], code, res.phrase, res.version) ) if code in raise_for: raise HTTPClientError(code, res.phrase) if code == http.NO_CONTENT: defer.returnValue(None) if code not in [http.OK, http.CREATED]: raise HTTPClientError(code, res.phrase) data = defer.Deferred() if queue_lines is None: res.deliverBody(DataReceiver(data)) else: res.deliverBody(LineQueue(data, queue_lines)) data = yield data defer.returnValue(json.loads(data) if decode is not None else data) except HTTPClientError as err: raise raise_for.get(code, HTTPClientError)(code, err.message)
def http_request(self, url, http_request_headers, include_http_responses=False): key = url + json.dumps(http_request_headers) cached_value = yield self.lookup('http_request', key) if cached_value is not None: if include_http_responses is not True: cached_value.pop('responses', None) defer.returnValue(cached_value) page_info = { 'body_length': -1, 'status_code': -1, 'headers': {}, 'failure': None } agent = ContentDecoderAgent( FixedRedirectAgent(TrueHeadersAgent(reactor), ignorePrivateRedirects=True), [('gzip', GzipDecoder)] ) try: retries = 0 while True: try: response = yield agent.request('GET', url, TrueHeaders(http_request_headers)) headers = {} for name, value in response.headers.getAllRawHeaders(): headers[name] = unicode(value[0], errors='ignore') body_length = -1 body = None try: body = yield readBody(response) body_length = len(body) except PartialDownloadError as pde: if pde.response: body_length = len(pde.response) body = pde.response page_info['body_length'] = body_length page_info['status_code'] = response.code page_info['headers'] = headers page_info['title'] = extractTitle(body) response.body = body page_info['responses'] = encodeResponses(response) break except: if retries > self.http_retries: raise retries += 1 except DNSLookupError: page_info['failure'] = 'dns_lookup_error' except TimeoutError: page_info['failure'] = 'generic_timeout_error' except ConnectionRefusedError: page_info['failure'] = 'connection_refused_error' except ConnectError: page_info['failure'] = 'connect_error' except Exception as exc: # XXX map more failures page_info['failure'] = 'unknown_error' log.err("Unknown error occurred") log.exception(exc) yield self.cache_value('http_request', key, page_info) if include_http_responses is not True: page_info.pop('responses', None) defer.returnValue(page_info)
class API(object): class URLS: CLIENT_LOGIN = '******' SERVICE_LOGIN = '******' APPENGINE = 'https://appengine.google.com' GAME_API = 'https://betaspike.appspot.com' INGRESS = 'http://www.ingress.com' class PATHS: LOGIN = '******' CONFLOGIN = '******' class API: HANDSHAKE = '/handshake' DROP_ITEM = '/rpc/gameplay/dropItem' RECYCLE = '/rpc/gameplay/recycleItem' SAY = '/rpc/player/say' INVENTORY = '/rpc/playerUndecorated/getInventory' PROFILE = '/rpc/playerUndecorated/getPlayerProfile' PLEXTS = '/rpc/playerUndecorated/getPaginatedPlexts' class INTEL: BASE = '/intel' PLEXTS = '/rpc/dashboard.getPaginatedPlextsV2' HANDSHAKE_PARAMS = { 'nemesisSoftwareVersion': '2015-02-0T23:44:16Z ac501401a6f3 opt', 'deviceSoftwareVersion': '4.1.1' } def __init__(self, reactor, email, password): self.reactor = reactor self.email = email self.password = password # Set up an agent for sending HTTP requests. Uses cookies # (part of the authentication), persistent HTTP connection # pool, automatic content decoding (gzip) # container to keep track of cookies self.cookiejar = cookielib.CookieJar() # HTTP persistent connection pool self.pool = HTTPConnectionPool(self.reactor, persistent=True) # for some reason, using >1 connection per host fails self.pool.maxPersistentPerHost = 1 self.agent = ContentDecoderAgent( CookieAgent(Agent(self.reactor, pool=self.pool), self.cookiejar), [('gzip', GzipDecoder)]) # this is the token that is used to authenticate API requests self.xsrf_token = None self.auth_token = None # who we are self.player_nickname = None self.player_guid = None self.team = None self.ap = None self.level = None self.start_date = None self.new_version = False self.inventory_done = False self.profile_done = False # for keeping track of item inventory self.inventory = b07.inventory.Inventory() # for keeping track of API requests that are delayed until # authentication has completed self._deferred_api_requests = [] # for keeping track of periodic inventory refreshes self._periodic_inventory_refresh_delayedcall = None # list of functions to call every time inventory is refreshed self._on_inventory_refreshed = [] # do an immediate inventory refresh self._first_inventory_ready = self._defer_until_authenticated( self._inventory0, (), {}) # do an immediate profile refresh self._first_profile_ready = self._defer_until_authenticated( self._profile0, (), {}) # start the authentication process self.reactor.callLater(0, self._authenticate0) def refreshInventory(self): """Manually start an inventory refresh.""" return self._defer_until_authenticated(self._inventory0, (), {}) def refreshProfile(self): """Manually start a profile refresh.""" return self._defer_until_authenticated(self._profile0, (), {}) def onInventoryRefreshed(self, callback, *args, **kw): self._on_inventory_refreshed.append((callback, args, kw)) def err(self, failure): log_failure(failure) def getInventory(self): return self.inventory def _defer_until_authenticated(self, func, args, kw): kw['finished'] = defer.Deferred() if self.xsrf_token is None: self._deferred_api_requests.append((func, args, kw)) else: self.reactor.callLater(0, func, *args, **kw) return kw['finished'] def _process_deferred_api_requests(self): while self._deferred_api_requests: func, args, kw = self._deferred_api_requests.pop(0) self.reactor.callLater(0, func, *args, **kw) def _authenticate0(self): auth_params = { 'Email': self.email, 'Passwd': self.password, 'service': 'ah', 'source': 'IngressBot', 'accountType': 'HOSTED_OR_GOOGLE' } body = b07.utils.StringProducer(urllib.urlencode(auth_params)) trace('_authenticate0 {}'.format(urllib.urlencode(auth_params))) d = self.agent.request( 'POST', self.URLS.CLIENT_LOGIN, Headers({ 'User-Agent': ['Nemesis (gzip)'], 'Content-Type': ['application/x-www-form-urlencoded'], 'Accept-Charset': ['utf-8'] }), body) d.addCallback(self._authenticate1) d.addErrback(self.err) def _authenticate1(self, response): finished = defer.Deferred() finished.addCallback(self._authenticate2, response.code) lp = b07.utils.LoginProtocol(finished) response.deliverBody(lp) def _authenticate2(self, result, code): if code == 200: try: self.auth_token = result['Auth'] except KeyError: critical('Authentication failed: Bad Response') elif code == 403: error = result['Error'] if error == 'BadAuthentication': critical('Authentication failed: Username or password wrong') elif error == 'NotVerified': critical( 'Authentication failed: Account email address has not been verified' ) elif error == 'TermsNotAgreed': critical( 'Authentication failed: User has not agreed to Googles terms of service' ) elif error == 'CaptchaRequired': critical('Authentication failed: CAPTCHA required') elif error == 'AccountDeleted': critical( 'Authentication failed: User account has been deleted') elif error == 'AccountDisabled': critical( 'Authentication failed: User account has been disabled') elif error == 'ServiceDisabled': critical('Authentication failed: Service disabled') elif error == 'ServiceUnavailable': critical('Authentication failed: Service unavailable') else: critical('Authentication failed: Unknown reason') else: critical('Authentication failed: Bad response') trace('_authenticate2 {}'.format(self.auth_token)) d = self.agent.request( 'GET', self.URLS.GAME_API + self.PATHS.LOGIN + '?' + urllib.urlencode({'auth': self.auth_token}), Headers({ 'User-Agent': ['Nemesis (gzip)'], 'Accept-Charset': ['utf-8'] }), None) d.addCallback(self._authenticate3) d.addErrback(self.err) def _authenticate3(self, response): trace('_authenticate3 {}'.format(response.code)) for cookie in self.cookiejar: trace('_authenticate3 {}'.format(cookie)) urlParams = {'json': json.dumps(self.HANDSHAKE_PARAMS)} d = self.agent.request( 'GET', self.URLS.GAME_API + self.PATHS.API.HANDSHAKE + '?' + urllib.urlencode({'json': json.dumps(self.HANDSHAKE_PARAMS)}), Headers({ 'User-Agent': ['Nemesis (gzip)'], 'Accept-Charset': ['utf-8'], 'Cache-Control': ['max-age=0'] }), None) d.addCallback(self._authenticate4) d.addErrback(self.err) def _authenticate4(self, response): trace('_authenticate4 {}'.format(response.code)) if response.code == 200: finished = defer.Deferred() finished.addCallback(self._authenticate5) finished.addErrback(self.err) jp = b07.utils.JsonProtocol(finished) response.deliverBody(jp) else: critical('Got response code {} after attempting handshake!'.format( response.code)) def _authenticate5(self, result): trace('_authenticate5 {}'.format(result)) result = result['result'] if 'xsrfToken' not in result: critical( 'Authentication with Ingress severs failed for unknown reason') self.xsrf_token = str(result['xsrfToken']) self.player_nickname = result['nickname'] self.player_guid = result['playerEntity'][0] self.team = result['playerEntity'][2]['controllingTeam']['team'] self.ap = result['playerEntity'][2]['playerPersonal']['ap'] self.level = result['playerEntity'][2]['playerPersonal']['clientLevel'] start_date = result['storage']['mission_complete_0'] self.start_date = datetime.datetime.fromtimestamp( int(start_date.split(':delim:')[1]) / 1000) debug('XSRF Token: {}'.format(self.xsrf_token)) debug('Player GUID: {}'.format(self.player_guid)) info('Player nickname: {}'.format(self.player_nickname)) info('Faction: {}'.format(self.team)) info('AP: {}'.format(self.ap)) info('Level: {}'.format(self.level)) info('Start Date: {}'.format(self.start_date)) debug('Player info: {}'.format(result)) with open( os.path.expanduser("~/{}_config.cfg".format( self.player_nickname)), "w") as file: json.dump(result, file, indent=1) self.new_version = versionCheck(result['serverVersion'], self.player_nickname) self._process_deferred_api_requests() def _setup_periodic_inventory_refresh(self, result): self._periodic_inventory_refresh_delayedcall = self.reactor.callLater( 300, self._periodic_inventory_refresh0) def _periodic_inventory_refresh0(self): finished = self._defer_until_authenticated(self._inventory0, (), {}) finished.addCallback(self._periodic_inventory_refresh1) def _periodic_inventory_refresh1(self, result): self._periodic_inventory_refresh_delayedcall = self.reactor.callLater( 300, self._periodic_inventory_refresh0) def _inventory0(self, finished): debug('Requesting inventory from server...') body = b07.utils.StringProducer( json.dumps({ 'params': { 'lastQueryTimestamp': self.inventory.last_query_timestamp } })) d = self.agent.request( 'POST', self.URLS.GAME_API + self.PATHS.API.INVENTORY, Headers({ 'User-Agent': ['Nemesis (gzip)'], 'Content-Type': ['application/json;charset=UTF-8'], 'X-XsrfToken': [self.xsrf_token] }), body) d.addCallback(self._inventory1, finished) d.addErrback(self.err) def _inventory1(self, response, finished): if response.code == 500: error('Got a 500 SERVER ERROR trying to get the inventory!') elif response.code == 200: debug('Got 200 OK response to inventory request') d = defer.Deferred() d.addCallback(self._inventory2, finished) jp = b07.utils.JsonProtocol(d) response.deliverBody(jp) else: error( 'Don\'t know what to do with {} code in response to inventory request!' .format(response.code)) def _inventory2(self, result, finished): self.inventory.process_result(result) finished.callback(self.inventory) for callback, args, kw in self._on_inventory_refreshed: self.reactor.callLater(0, callback, self.inventory, *args, **kw) self.inventory_done = True def _profile0(self, finished): debug('Requesting profile from server...') body = b07.utils.StringProducer( json.dumps({'params': [self.player_nickname]})) d = self.agent.request( 'POST', self.URLS.GAME_API + self.PATHS.API.PROFILE, Headers({ 'User-Agent': ['Nemesis (gzip)'], 'Content-Type': ['application/json;charset=UTF-8'], 'X-XsrfToken': [self.xsrf_token] }), body) d.addCallback(self._profile1, finished) d.addErrback(self.err) def _profile1(self, response, finished): if response.code == 500: error('Got a 500 SERVER ERROR trying to get the profile!') elif response.code == 200: debug('Got 200 OK response to profile request') d = defer.Deferred() d.addCallback(self._profile2, finished) d.addCallback(self._inventory0) jp = b07.utils.JsonProtocol(d) response.deliverBody(jp) else: error( 'Don\'t know what to do with {} code in response to profile request!' .format(response.code)) def _profile2(self, result, finished): with open( os.path.expanduser("~/{}_profile.json".format( self.player_nickname)), "w") as file: json.dump(result, file, indent=1) self.profile_done = True
class txBoxAPI(BoxAPIWrapper): 'Box API client.' #: Options to twisted.web.client.HTTPConnectionPool request_pool_options = dict( persistent = True, maxPersistentPerHost = 10, cachedConnectionTimeout = 600, retryAutomatically = True ) #: These are timeouts between individual read/write ops #: Missing keys will have default values (from HTTPTimeout.state_timeouts) request_io_timeouts = dict( req_headers=60, req_body=20, res_headers=20, res_body=20, res_end=10 ) #: Path string or list of strings ca_certs_files = b'/etc/ssl/certs/ca-certificates.crt' #: Dump HTTP request data in debug log (might contain all sorts of auth tokens!) debug_requests = False def __init__(self, *argz, **kwz): super(txBoxAPI, self).__init__(*argz, **kwz) pool = self.request_pool = QuietHTTPConnectionPool( reactor, debug_requests=self.debug_requests, **self.request_pool_options ) self.request_agent = ContentDecoderAgent(RedirectAgent(Agent( reactor, TLSContextFactory(self.ca_certs_files), pool=pool )), [('gzip', GzipDecoder)]) @defer.inlineCallbacks def request( self, url, method='get', data=None, encode=None, files=None, raw=False, headers=dict(), raise_for=dict() ): if self.debug_requests: url_debug = _dump_trunc(url) log.debug('HTTP request: {} {} (h: {}, data: {}, files: {}), raw: {}'.format( method, url_debug, headers, _dump_trunc(data), _dump_trunc(files), raw )) timeout = HTTPTimeout(**self.request_io_timeouts) method, body = method.lower(), None headers = dict() if not headers else headers.copy() headers.setdefault('User-Agent', 'txBox') if data is not None: if encode is None: encode = 'json' if method != 'post' else 'form' if encode == 'form': headers.setdefault('Content-Type', 'application/x-www-form-urlencoded') body = FileBodyProducer(io.BytesIO(urlencode(data)), timer=timeout) elif encode == 'json': headers.setdefault('Content-Type', 'application/json') body = FileBodyProducer(io.BytesIO(json.dumps(data)), timer=timeout) else: raise ValueError('Unknown encoding type: {}'.format(encode)) if files is not None: assert not (data or encode),\ '"files" imply multipart encoding and no other data' boundary = os.urandom(16).encode('hex') headers.setdefault( 'Content-Type', 'multipart/form-data; boundary={}'.format(boundary) ) body = MultipartDataSender(files, boundary, timer=timeout) yield body.calculate_length() if isinstance(url, unicode): url = url.encode('utf-8') if isinstance(method, unicode): method = method.encode('ascii') res_deferred = first_result( timeout, self.request_agent.request( method.upper(), url, Headers(dict((k,[v]) for k,v in (headers or dict()).viewitems())), body ) ) code = res_body = None try: res = yield res_deferred code = res.code if code == http.NO_CONTENT: defer.returnValue(None) res_body = defer.Deferred() res.deliverBody(DataReceiver(res_body, timer=timeout)) res_body = yield first_result(timeout, res_body) if code not in [http.OK, http.CREATED]: if self.debug_requests: log.debug('HTTP error response body: {!r}'.format(res_body)) raise ProtocolError(code, res.phrase, res_body) if self.debug_requests: log.debug( 'HTTP request done ({} {}): {} {} {}, body_len: {}'\ .format(method, url_debug, code, res.phrase, res.version, len(res_body)) ) defer.returnValue(json.loads(res_body) if not raw else res_body) except ( timeout.ActivityTimeout, TimeoutError, ResponseFailed, RequestNotSent, RequestTransmissionFailed ) as err: if isinstance(err, timeout.ActivityTimeout): if not res_deferred.called: res_deferred.cancel() if res_body and not res_body.called: res_body.cancel() if self.debug_requests: log.debug( 'HTTP transport (underlying protocol) error ({} {}): {}'\ .format(method, url_debug, err.message or repr(err.args)) ) raise UnderlyingProtocolError(err) except ProtocolError as err: if self.debug_requests: log.debug( 'HTTP request handling error ({} {}, code: {}): {}'\ .format(method, url_debug, code, err.message) ) if code not in raise_for: raise raise raise_for[code](code, err.message) except RequestGenerationFailed as err: err[0][0].raiseException() finally: timeout.state_finished() @defer.inlineCallbacks def __call__( self, url, query=dict(), query_filter=True, auth_header=True, auto_refresh_token=True, upload=False, **request_kwz ): '''Make an arbitrary call to LiveConnect API. Shouldn't be used directly under most circumstances.''' if query_filter: query = dict( (k,v) for k,v in query.viewitems() if v is not None ) if auth_header: request_kwz.setdefault('headers', dict())\ ['Authorization'] = 'Bearer {}'.format(self.auth_access_token) kwz = request_kwz.copy() kwz.setdefault('raise_for', dict())[401] = AuthenticationError api_url = ft.partial( self._api_url, url, query, pass_access_token=not auth_header, upload=upload ) try: res = yield self.request(api_url(), **kwz) except AuthenticationError: if not auto_refresh_token: raise yield self.auth_get_token() if auth_header: # update auth header with a new token request_kwz['headers']['Authorization']\ = 'Bearer {}'.format(self.auth_access_token) # Existing connections seem to hang occasionally after token updates, # though it looks like a twisted issue, since they hang forever (no timeouts in place) self.request_pool.closeCachedConnections() res = yield self.request(api_url(), **request_kwz) defer.returnValue(res) @defer.inlineCallbacks def auth_get_token(self, check_state=True): 'Refresh or acquire access_token.' res = self.auth_access_data_raw = yield self._auth_token_request() defer.returnValue(self._auth_token_process(res, check_state=check_state))
def request( self, method, url, *, params=None, headers=None, data=None, files=None, json=_NOTHING, auth=None, cookies=None, allow_redirects=True, browser_like_redirects=False, unbuffered=False, reactor=None, timeout=None, _stacklevel=2, ): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() if isinstance(url, DecodedURL): parsed_url = url.encoded_url elif isinstance(url, EncodedURL): parsed_url = url elif isinstance(url, str): # We use hyperlink in lazy mode so that users can pass arbitrary # bytes in the path and querystring. parsed_url = EncodedURL.from_text(url) else: parsed_url = EncodedURL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') headers = self._request_headers(headers, _stacklevel + 1) bodyProducer, contentType = self._request_body(data, files, json, stacklevel=_stacklevel + 1) if contentType is not None: headers.setRawHeaders(b'Content-Type', [contentType]) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if allow_redirects: if browser_like_redirects: wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) if reactor is None: from twisted.internet import reactor if timeout: delayedCall = reactor.callLater(timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not unbuffered: d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
def test_EncodingJSONRPCServer(self): DATA = {'foo': 'bar'} REQUEST = '{"jsonrpc": "2.0", "method": "test", "params": [], "id": 1}' RESPONSE = '{"jsonrpc": "2.0", "id": 1, "result": ' + json.dumps(DATA) + '}' class RPCServer(JSONRPCServer): def jsonrpc_test(self): return defer.succeed(DATA) class ReceiverProtocol(Protocol): def __init__(self, finished): self.finished = finished self.body = [] def dataReceived(self, bytes): self.body.append(bytes) def connectionLost(self, reason): self.finished.callback(b''.join(self.body)) @implementer(IBodyProducer) class StringProducer(object): def __init__(self, body): self.body = body self.length = len(body) def startProducing(self, consumer): consumer.write(self.body.encode()) return defer.succeed(None) def pauseProducing(self): pass def stopProducing(self): pass server = RPCServer() resource = EncodingJSONRPCServer(server) site = Site(resource) port = reactor.listenTCP(8888, site, interface='127.0.0.1') agent = ContentDecoderAgent(Agent(reactor), [(b'gzip', GzipDecoder)]) response = yield agent.request(b'POST', b'http://127.0.0.1:8888', Headers({'Accept-Encoding': ['gzip']}), StringProducer(REQUEST)) self.assertTrue(isinstance(response, GzipDecoder)) finished = defer.Deferred() response.deliverBody(ReceiverProtocol(finished)) data = yield finished self.assert_json(data, RESPONSE) port.stopListening()