def __init__(self, scraper, pool=None): self.scraper = scraper self._pool = pool self._agents = {} #map proxy->an agent redirectLimit = scraper.config.get('max_redirects') if redirectLimit is None: redirectLimit = 3 #create an agent for direct requests self._direct_agent = Agent(reactor, pool=self._pool, connectTimeout=scraper.config.get('timeout') or 30) if redirectLimit>0: self._direct_agent = BrowserLikeRedirectAgent(self._direct_agent, redirectLimit=redirectLimit) self._direct_agent = ContentDecoderAgent(self._direct_agent, [('gzip', GzipDecoder)]) self.cj = self.scraper.client.opener.cj if self.cj is not None: self._direct_agent = CookieAgent(self._direct_agent, self.cj) #create an agent for http-proxy requests #no endpoint yet, use __ instead of _ to backup the instance self.__http_proxy_agent = ProxyAgent(None, pool=self._pool) if redirectLimit>0: self._http_proxy_agent = BrowserLikeRedirectAgent(self.__http_proxy_agent, redirectLimit=redirectLimit) self._http_proxy_agent = ContentDecoderAgent(self._http_proxy_agent, [('gzip', GzipDecoder)]) else: self._http_proxy_agent = ContentDecoderAgent(self.__http_proxy_agent, [('gzip', GzipDecoder)]) if self.cj is not None: self._http_proxy_agent = CookieAgent(self._http_proxy_agent, self.cj) #create an agent for https-proxy requests #no endpoint yet, use __ instead of _ to backup the instance self.__https_proxy_agent = TunnelingAgent(reactor=reactor, proxy=None, contextFactory=ScrapexClientContextFactory(), connectTimeout=30, pool=self._pool) #no proxy yet if redirectLimit>0: self._https_proxy_agent = BrowserLikeRedirectAgent(self.__https_proxy_agent, redirectLimit=redirectLimit) self._https_proxy_agent = ContentDecoderAgent(self._https_proxy_agent, [('gzip', GzipDecoder)]) else: self._https_proxy_agent = ContentDecoderAgent(self.__https_proxy_agent, [('gzip', GzipDecoder)]) if self.cj is not None: self._https_proxy_agent = CookieAgent(self._https_proxy_agent, self.cj)
def updateShared_cb(iv): print "Updating file..." args = ("updateshared", str(self.ccid), os.path.basename(s[3]), s[2]) salt = self.processCookie("/shares") dataq = [] dataq.append(self.client_id.genHashArgs(args, salt)) dataq.append(iv) # print "debugging:ticket, iv updatefile" # print dataq[0] # print dataq[1] # print len(dataq[1]) print "Uploading file..." agent = CookieAgent(Agent(reactor), self.cookie_jar) enc_file = open("enc_fileout", 'r') body = _FileProducer(enc_file, dataq) headers = http_headers.Headers() d = agent.request( 'POST', 'http://localhost:8000/shares/?method=updateshared&ccid=' + self.ccid + "&name=" + os.path.basename(s[3]) + "&fileid=" + s[2], headers, body) d.addCallback(self.printPutReply_cb) return NOT_DONE_YET
def handlePutFile(self, line): print "Encrypting file..." s = line.split() file = open(s[2], 'r') enc_file = open("enc_fileout", 'w') crd = self.client_id.encryptFileSym(file, enc_file) args = ("putfile", str(self.ccid), os.path.basename(s[2])) salt = self.processCookie("/files") dataq = [] dataq.append(self.client_id.genHashArgs(args, salt)) dataq.append(self.client_id.encryptData(crd[0], self.client_id.pub_key)) dataq.append(self.client_id.encryptData(crd[1])) agent = CookieAgent(Agent(reactor), self.cookie_jar) #print crd[1] # print "debugging:key, iv putfile" # print dataq[1] # print len(dataq[1]) # print dataq[2] # print len(dataq[2]) print "Uploading file..." enc_file = open("enc_fileout", 'r') body = _FileProducer(enc_file, dataq) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/files/?method=putfile&ccid=' + self.ccid + "&name=" + os.path.basename(s[2]), headers, body) d.addCallback(self.printPutReply_cb) return NOT_DONE_YET
def __init__(self, reactor, email, password): self.reactor = reactor self.email = email self.password = password # Set up an agent for sending HTTP requests. Uses cookies # (part of the authentication), persistent HTTP connection # pool, automatic content decoding (gzip) # container to keep track of cookies self.cookiejar = cookielib.CookieJar() # HTTP persistent connection pool self.pool = HTTPConnectionPool(self.reactor, persistent=True) # for some reason, using >1 connection per host fails self.pool.maxPersistentPerHost = 1 self.agent = ContentDecoderAgent( CookieAgent(Agent(self.reactor, pool=self.pool), self.cookiejar), [('gzip', GzipDecoder)]) # this is the token that is used to authenticate API requests self.xsrf_token = None self.auth_token = None # who we are self.player_nickname = None self.player_guid = None self.team = None self.ap = None self.level = None self.start_date = None self.new_version = False self.inventory_done = False self.profile_done = False # for keeping track of item inventory self.inventory = b07.inventory.Inventory() # for keeping track of API requests that are delayed until # authentication has completed self._deferred_api_requests = [] # for keeping track of periodic inventory refreshes self._periodic_inventory_refresh_delayedcall = None # list of functions to call every time inventory is refreshed self._on_inventory_refreshed = [] # do an immediate inventory refresh self._first_inventory_ready = self._defer_until_authenticated( self._inventory0, (), {}) # do an immediate profile refresh self._first_profile_ready = self._defer_until_authenticated( self._profile0, (), {}) # start the authentication process self.reactor.callLater(0, self._authenticate0)
def __init__(self, master): self.master = master self.config = master.modules["config"].interface("crunchy") self.agent = CookieAgent(master.agent, cookielib.CookieJar()) self.shows = {} self.cache_loop = None self.logged_in = False self.start()
def prepareNetwork(): cookieJar = LWPCookieJar('photo.cookie') cookieJar.load() pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = 15 agent = CookieAgent(Agent(reactor, pool=pool), cookieJar) return agent
def main(): cookieJar = compat.cookielib.CookieJar() agent = CookieAgent(Agent(reactor), cookieJar) d = agent.request(b"GET", b"http://httpbin.org/cookies/set?some=data") d.addCallback(displayCookies, cookieJar) d.addErrback(log.err) d.addCallback(lambda ignored: reactor.stop()) reactor.run()
def main(): cookieJar = CookieJar() agent = CookieAgent(Agent(reactor), cookieJar) d = agent.request('GET', 'http://www.google.com/') d.addCallback(displayCookies, cookieJar) d.addErrback(log.err) d.addCallback(lambda ignored: reactor.stop()) reactor.run()
def _create_agent(self, req): """ create right agent for specific request """ agent = None uri = URI.fromBytes(req.url) proxy = req.get('proxy') if req.get('use_proxy') is False: proxy = None if proxy: if uri.scheme == 'https': agent_key = 'httpsproxy-%s-%s' % (proxy.host, proxy.port) agent = self._agents.get(agent_key) if not agent: agent = TunnelingAgent(reactor=reactor, proxy=proxy, contextFactory=ScrapexClientContextFactory(), connectTimeout=30, pool=self._pool) self._agents[agent_key] = agent else: #http agent_key = 'httpproxy-%s-%s' % (proxy.host, proxy.port) agent = self._agents.get(agent_key) if not agent: endpoint = TCP4ClientEndpoint(reactor, host=proxy.host, port=proxy.port , timeout=req.get('timeout')) agent = ProxyAgent(endpoint, pool=self._pool) self._agents[agent_key] = agent if proxy.auth_header: req.get('headers')['Proxy-Authorization'] = proxy.auth_header else: agent = self._direct_agent #use single agent when no proxies used redirectLimit = self.scraper.config.get('max_redirects') if redirectLimit is None: redirectLimit = 3 if redirectLimit>0: agent = BrowserLikeRedirectAgent(agent, redirectLimit=redirectLimit) agent = ContentDecoderAgent(agent, [('gzip', GzipDecoder)]) if self.cj is not None: agent = CookieAgent(agent, self.cj) return agent
def __init__(self, client): NamedAdapter.__init__(self, client) self.url = "http://%(host)s:%(port)s/play/%(game)s/json%%s" % vars( self.config) self.log("Fetching from: %s" % self.url) self._loop = LoopingCall(self.update) self.interval = 0.11 self.agent = CookieAgent(Agent(reactor, pool=http_pool), CookieJar()) self.verbose = client.config.verbose self.debug = client.config.debug registerGlobal(self, IWebWorld)
def deleteFile_cb(): args = ("delete", str(self.ccid), s[2]) salt = self.processCookie("/files") body = FileBodyProducer( StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'DELETE', 'http://localhost:8000/files/?method=delete&ccid=' + self.ccid + "&fileid=" + s[2], headers, body) d.addCallback(printDeleteReply_cb)
def handleListShares(self): args = ("list", str(self.ccid)) salt = self.processCookie("/shares") body = FileBodyProducer( StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'GET', 'http://localhost:8000/shares/?method=list&ccid=' + self.ccid, headers, body) d.addCallback(self.handleList_cb) return NOT_DONE_YET
def handleStartSession(self, method): def procResponse_cb(response): defer = Deferred() defer.addCallback(method) response.deliverBody(DataPrinter(defer, "bool")) return NOT_DONE_YET def startSession_cb((signedNonce, nonceid)): agent = CookieAgent(Agent(reactor), self.cookie_jar) dataq = [] dataq.append(signedNonce) body = _FileProducer( StringIO(self.client_id.encryptData(self.client_id.password)), dataq) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/session/?method=startsession&ccid=' + self.ccid + '&nonceid=' + str(nonceid), headers, body) d.addCallback(procResponse_cb) return NOT_DONE_YET def getNonce_cb(response): defer = Deferred() defer.addCallback(startSession_cb) response.deliverBody(getNonce(defer, self.client_id, self.pin)) return NOT_DONE_YET if self.pin != None: agent = Agent(reactor) body = FileBodyProducer( StringIO(self.client_id.pub_key.exportKey('PEM'))) headers = http_headers.Headers() d = agent.request( 'GET', 'http://localhost:8000/session/?method=getnonce', headers, body) d.addCallback(getNonce_cb) return NOT_DONE_YET agent = CookieAgent(Agent(reactor), self.cookie_jar) body = FileBodyProducer( StringIO(self.client_id.encryptData(self.client_id.password))) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/session/?method=startsession&ccid=' + self.ccid + '&nonceid=' + str(-1), headers, body) d.addCallback(procResponse_cb) return NOT_DONE_YET
def startSession_cb((signedNonce, nonceid)): agent = CookieAgent(Agent(reactor), self.cookie_jar) dataq = [] dataq.append(signedNonce) body = _FileProducer( StringIO(self.client_id.encryptData(self.client_id.password)), dataq) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/session/?method=startsession&ccid=' + self.ccid + '&nonceid=' + str(nonceid), headers, body) d.addCallback(procResponse_cb) return NOT_DONE_YET
def _sendVortexMsgLater(self, vortexMsgs: VortexMsgList): yield None assert self._server assert vortexMsgs def ebSendAgain(failure): self._retrying = True logger.debug("Retrying send of %s messages : %s", len(vortexMsgs), failure.value) return task.deferLater(reactor, self.RETRY_DELAY, self._sendVortexMsgLater, vortexMsgs) def cbRequest(response): if response.code != 200: msg = "Connection to vortex %s:%s failed" % (self._server, self._port) logger.error(msg) return Failure(Exception(msg)) elif self._retrying: logger.info("VortexServer client %s:%s reconnected", self._server, self._port) self._retrying = False self.__protocol = VortexPayloadHttpClientProtocol( logger, vortexClient=self) response.deliverBody(self.__protocol) return True bodyProducer = _VortexClientPayloadProducer(vortexMsgs) agent = CookieAgent(Agent(reactor), self._cookieJar) args = {'vortexUuid': self._vortexUuid, 'vortexName': self._vortexName} uri = ("http://%s:%s/vortex?%s" % (self._server, self._port, urlencode(args))).encode("UTF-8") d = agent.request( b'POST', uri, Headers({ b'User-Agent': [b'Synerty VortexServer Client'], b'Content-Type': [b'text/plain'] }), bodyProducer) d.addCallback(cbRequest) d.addErrback(ebSendAgain) # Must be after cbRequest return d
def __init__(self, server, receiver, oauth_header=None): """Initialize the message sender. Args: server: The bayeux server to send messages to receiver: The message receiver to pass the responses to """ self.cookie_jar = CookieJar() self.agent = CookieAgent( Agent(reactor, pool=HTTPConnectionPool(reactor)), self.cookie_jar) self.client_id = -1 #Will be set upon receipt of the handshake response self.msg_id = 0 self.server = server self.receiver = receiver self.oauth_header = oauth_header
def __init__(self): self.pool = HTTPConnectionPool(reactor, persistent=True) self.pool.maxPersistentPerHost = 5 # 默认一个IP最大保持两个链接 self.pool.cachedConnectionTimeout = 50 # 默认240秒 contextFactory = WebClientContextFactory() raw_agent = Agent(reactor, contextFactory, pool=self.pool) agent = RedirectAgent( ContentDecoderAgent(raw_agent, [('gzip', GzipDecoder)])) self.cookieJar = CookieJar() self.agent = CookieAgent(agent, self.cookieJar) self.headers = {'User-agent': ['Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1'], 'Accept-Language': ['zh-Hans-CN,zh-Hans;q=0.5'], 'Accept': ['text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'], 'Accept-Encoding': ['gb2313,utf-8;q=0.7,*;q=0.7'], 'Cache-Control': ['max-age=0']}
def __init__(self, userAgent): self._aggMapping = AGGREGATION_MAPPING urlstart = getGlobalConfiguration().get('metric-url', 'http://localhost:8080') self._metric_url = '%s/%s' % (urlstart, METRIC_URL_PATH) self._metric_url_v2 = '%s/%s' % (urlstart, WILDCARD_URL_PATH) creds = IAuthorizationTool(None).extractGlobalConfCredentials() auth = base64.b64encode('{login}:{password}'.format(**creds)) self.agent = CookieAgent( Agent(reactor, pool=getPool(), connectTimeout=30), self.cookieJar) self._headers = Headers({ 'Authorization': ['basic %s' % auth], 'content-type': ['application/json'], 'User-Agent': ['Zenoss: %s' % userAgent] }) self.onMetricsFetched = None
def handleUpdateSharePerm(self, s): args = ("updateshareperm", str(self.ccid), s[3], s[2], s[4]) salt = self.processCookie("/shares") body = FileBodyProducer( StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'POST', 'http://localhost:8000/shares/?method=updateshareperm&ccid=' + self.ccid + "&rccid=" + s[3] + "&fileid=" + s[2] + "&writeable=" + s[4], headers, body) d.addCallback(self.printPutReply_cb) return NOT_DONE_YET
def register_cb((signedNonce, nonceid)): agent = CookieAgent(Agent(reactor), self.cookie_jar) dataq = [] dataq.append(signedNonce) dataq.append(self.client_id.encryptData(self.client_id.password)) # Sending the Certificate and the Sub CA to the server if self.pin is None: print "ERROR! Check the pin!" reactor.stop() cert = cc.get_certificate(cc.CERT_LABEL, self.pin) #print type(cert.as_pem()) #print cert.as_pem() if cert is None: print "ERROR! Check the pin" reactor.stop() subca = cc.get_certificate(cc.SUBCA_LABEL, self.pin) #print type(subca.as_pem()) #print subca.as_pem() if subca is None: print "ERROR! Check the pin" reactor.stop() enc_cert = b64encode(cert.as_pem()) #print "cert len: ", len(enc_cert) dataq.append(enc_cert) enc_subca = b64encode(subca.as_pem()) #print "sub ca len: ", len(enc_subca) dataq.append(enc_subca) dataq.append(self.client_id.pub_key.exportKey('PEM')) ext_key = self.client_id.pub_key.exportKey('PEM') if self.pin is None: print "ERROR! Check the pin or the CC" reactor.stop() signed_ext_key = cc.sign(ext_key, cc.KEY_LABEL, self.pin) enc_sek = b64encode(signed_ext_key) #print "encoded ext key: ", enc_sek #print "len encoded: ", len(enc_sek) dataq.append(enc_sek) body = FileProducer2(dataq) headers = http_headers.Headers() #print "Password:"******"LEN:", len(self.client_id.encryptData(self.client_id.password)) d = agent.request( 'PUT', 'http://localhost:8000/pboxes/?method=register' + '&nonceid=' + str(nonceid), headers, body) d.addCallback(procResponse_cb, checkClientReg_cb)
def call_api(self, verb, url, expected_status_code=http_status.HTTP_200_OK, headers=None, data=None, pre_read_body_cb=None, credentials=None): def cbResponse(response): # print 'Response version:', response.version # print 'Response code:', response.code # print 'Response phrase:', response.phrase # print 'Response headers:' # print pformat(list(response.headers.getAllRawHeaders())) if expected_status_code: self.assertEqual( expected_status_code, response.code, "Bad result code for request '%s %s'" % (verb, url)) return response def cb_load_body(response): d = readBody(response) if response.code != http_status.HTTP_204_NO_CONTENT: d.addCallback(RestApiTestMixin.cb_decode_json) return d _headers = {'User-Agent': ['Twisted Web Client Example']} if credentials is not None: _headers['authorization'] = [ 'basic %s' % base64.encodestring('%s:%s' % credentials) ] if headers: _headers.update(headers) if self.agent is None: self.agent = CookieAgent(Agent(reactor), CookieJar()) body = None if data is not None: body = JsonProducer(data) d = self.agent.request(verb, self.api_base_url + url, Headers(_headers), body) d.addCallback(cbResponse) if pre_read_body_cb: d.addCallback(pre_read_body_cb) d.addCallback(cb_load_body) return d
def changeProfileInfo(profileText, profileImgUrl): cookieJar = CookieJar() contextFactory = WebClientContextFactory() agent = CookieAgent(Agent(reactor, contextFactory), cookieJar) d = getPage(None, DOMAIN, 'login', agent) d.addCallback(cbGetPage, cbFindCsrf) d.addErrback(errorHandler) d.addCallback(postLoginPage, DOMAIN, agent) d.addErrback(errorHandler) d.addCallback(getPage, DOMAIN, 'account/profile', agent) d.addErrback(errorHandler) d.addCallback(cbGetPage, cbFindCsrf) d.addErrback(errorHandler) d.addCallback(postProfileInfo, DOMAIN, agent, profileText.encode('utf8'), profileImgUrl) d.addErrback(errorHandler) d.addCallback(cbGetPage, cbFindCsrf) return d
def __init__(self, username, password, url='https://localhost:8443/api/metrics/store', buflen=defaultMetricBufferSize, pubfreq=defaultPublishFrequency): super(HttpPostPublisher, self).__init__(buflen, pubfreq) self._username = username self._password = password self._needsAuth = False self._authenticated = False if self._username: self._needsAuth = True self._cookieJar = CookieJar() self._agent = CookieAgent(Agent(reactor), self._cookieJar) self._url = url self._agent_suffix = os.path.basename( sys.argv[0].rstrip(".py")) if sys.argv[0] else "python" reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
def create(cls, drupal_url, user, password, reactor=None, cookie_jar=None, user_agent="TxDrupalRestWsClient"): if cookie_jar is None: cookie_jar = compat.cookielib.CookieJar() if reactor is None: import twisted.internet reactor = twisted.internet.reactor pool = HTTPConnectionPool(reactor) tx_agent = CookieAgent(Agent(reactor, pool=pool), cookie_jar) return cls(drupal_url, user, password, reactor, tx_agent, cookie_jar, user_agent)
def shareFile_cb(): args = ("delete", str(self.ccid), s[3], s[2]) salt = self.processCookie("/shares") dataq = [] dataq.append(self.client_id.genHashArgs(args, salt)) dataq.append(enc_sym_key) print "Uploading symkey..." agent = CookieAgent(Agent(reactor), self.cookie_jar) body = _FileProducer(StringIO(""), dataq) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/shares/?method=sharefile&ccid=' + self.ccid + "&rccid=" + s[3] + "&fileid=" + s[2], headers, body) d.addCallback(self.printPutReply_cb) return d
def upload(self, link): exception = self.master.modules["commands"].exception user = yield self.config.get("user") passwd = yield self.config.get("pass") ttagent = CookieAgent(self.master.agent, cookielib.CookieJar()) if user is None or passwd is None: raise exception(u"No TT username or password in config") response = yield ttagent.request( "POST", "http://tokyotosho.info/login.php", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer( StringIO( urllib.urlencode({ "username": user, "password": passwd, "submit": "Submit" })))) body = yield self.master.modules["utils"].returnBody(response) if "Logged in." not in body: raise exception(u"Couldn't login to TT.") response = yield ttagent.request( "POST", "http://tokyotosho.info/new.php", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer( StringIO( urllib.urlencode({ "type": "1", "url": link, "comment": "#[email protected]", "website": "http://www.commiesubs.com/", "send": "Submit New Torrent" })))) body = yield self.master.modules["utils"].returnBody(response) if "Torrent Submitted" not in body: raise exception(u"Couldn't upload torrent to TT.")
def handleGetShareMData(self, data): #data = (method, fileid) def handleGetShareMData_cb(response): defer = Deferred() defer.addCallback(data[0]) response.deliverBody(DataPrinter(defer, "getmdata")) return NOT_DONE_YET args = ("get_mdata", str(self.ccid), data[1]) salt = self.processCookie("/shares") body = FileBodyProducer( StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'GET', 'http://localhost:8000/shares/?method=get_mdata&ccid=' + self.ccid + "&fileid=" + data[1], headers, body) d.addCallback(handleGetShareMData_cb) return NOT_DONE_YET
def perform_request(self, url, request_type='GET', raw_data='', headers=None): if headers: # Convert all header fields to arrays for key in headers.keys(): headers[key] = [headers[key]] headers = Headers(headers) def _on_error_response(response, response_str): raise RequestError(response, response_str) def _on_response(response): if response.code == http.OK or response.code == http.CREATED or response.code == http.PARTIAL_CONTENT: return readBody(response) else: return readBody(response).addCallback(lambda response_str: _on_error_response(response, response_str)) self._logger.debug("Performing %s request to %s", request_type, url) agent = CookieAgent(Agent(reactor), self.cookie_jar) data_producer = None if request_type == 'GET' else POSTDataProducer(raw_data) deferred = agent.request(request_type, url, headers, data_producer) deferred.addCallback(_on_response) return deferred
def handleGetShared(self, s): def handleGetShared_cb(response, f): finished = Deferred() finished.addCallback(self.writeFile_cb, s) cons = FileConsumer(f) response.deliverBody(FileDownload(finished, cons)) print "Downloading file..." return finished fileId = s[2] args = ("getshared", str(self.ccid), str(fileId)) salt = self.processCookie("/shares") body = FileBodyProducer( StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'GET', 'http://localhost:8000/shares/?method=getshared&ccid=' + self.ccid + '&fileid=' + fileId, headers, body) f = open(fileId, "w") d.addCallback(handleGetShared_cb, f) return NOT_DONE_YET
def request( self, method, url, *, params=None, headers=None, data=None, files=None, json=_NOTHING, auth=None, cookies=None, allow_redirects=True, browser_like_redirects=False, unbuffered=False, reactor=None, timeout=None, _stacklevel=2, ): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() if isinstance(url, DecodedURL): parsed_url = url.encoded_url elif isinstance(url, EncodedURL): parsed_url = url elif isinstance(url, str): # We use hyperlink in lazy mode so that users can pass arbitrary # bytes in the path and querystring. parsed_url = EncodedURL.from_text(url) else: parsed_url = EncodedURL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') headers = self._request_headers(headers, _stacklevel + 1) bodyProducer, contentType = self._request_body(data, files, json, stacklevel=_stacklevel + 1) if contentType is not None: headers.setRawHeaders(b'Content-Type', [contentType]) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if allow_redirects: if browser_like_redirects: wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) if reactor is None: from twisted.internet import reactor if timeout: delayedCall = reactor.callLater(timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not unbuffered: d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)