def redfish_request(self, method, uri, headers=None, bodyProducer=None): """Send the redfish request and return the response.""" agent = RedirectAgent( Agent(reactor, contextFactory=WebClientContextFactory())) d = agent.request(method, uri, headers=headers, bodyProducer=bodyProducer) def render_response(response): """Render the HTTPS response received.""" def eb_catch_partial(failure): # Twisted is raising PartialDownloadError because the responses # do not contain a Content-Length header. Since every response # holds the whole body we just take the result. failure.trap(PartialDownloadError) if int(failure.value.status) == HTTPStatus.OK: return failure.value.response else: return failure def cb_json_decode(data): data = data.decode("utf-8") # Only decode non-empty response bodies. if data: # occasionally invalid json is returned. provide a clear # error in that case try: return json.loads(data) except ValueError as error: raise PowerActionError( "Redfish request failed from a JSON parse error:" " %s." % error) def cb_attach_headers(data, headers): return data, headers # Error out if the response has a status code of 400 or above. if response.code >= int(HTTPStatus.BAD_REQUEST): # if there was no trailing slash, retry with a trailing slash # because of varying requirements of BMC manufacturers if (response.code == HTTPStatus.NOT_FOUND and uri.decode("utf-8")[-1] != "/"): d = agent.request( method, uri + "/".encode("utf-8"), headers=headers, bodyProducer=bodyProducer, ) else: raise PowerActionError( "Redfish request failed with response status code:" " %s." % response.code) d = readBody(response) d.addErrback(eb_catch_partial) d.addCallback(cb_json_decode) d.addCallback(cb_attach_headers, headers=response.headers) return d d.addCallback(render_response) return d
def request(self, method, url, **kwargs): method = method.upper() # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.get('params') if params: url = _combine_query_params(url, params) # Convert headers dictionary to # twisted raw headers format. headers = kwargs.get('headers') if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.iteritems(): if isinstance(v, str): h.addRawHeader(k, v) else: h.setRawHeaders(k, v) headers = h else: headers = Headers({}) # Here we choose a right producer # based on the parameters passed in. bodyProducer = None data = kwargs.get('data') files = kwargs.get('files') if files: # If the files keyword is present we will issue a # multipart/form-data request as it suits better for cases # with files and/or large objects. files = list(_convert_files(files)) boundary = uuid.uuid4() headers.setRawHeaders( 'content-type', [ 'multipart/form-data; boundary=%s' % (boundary,)]) if data: data = _convert_params(data) else: data = [] bodyProducer = multipart.MultiPartProducer( data + files, boundary=boundary) elif data: # Otherwise stick to x-www-form-urlencoded format # as it's generally faster for smaller requests. if isinstance(data, (dict, list, tuple)): headers.setRawHeaders( 'content-type', ['application/x-www-form-urlencoded']) data = urlencode(data, doseq=True) bodyProducer = IBodyProducer(data) cookies = kwargs.get('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if kwargs.get('allow_redirects', True): wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [('gzip', GzipDecoder)]) auth = kwargs.get('auth') if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request( method, url, headers=headers, bodyProducer=bodyProducer) timeout = kwargs.get('timeout') if timeout: delayedCall = default_reactor(kwargs.get('reactor')).callLater( timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.get('unbuffered', False): d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
def dataReceived(self, data): self.dataBuffer.append(data) def connectionLost(self, reason): self.deferred.callback(b''.join(self.dataBuffer)) def readBody(response): d = defer.Deferred() response.deliverBody(SimpleBodyProtocol(response.code, response.phrase, d)) return d try: from twisted.web.client import HTTPConnectionPool pool = HTTPConnectionPool(reactor) pool._factory.noisy = False agent = RedirectAgent(Agent(reactor, pool=pool)) except ImportError: agent = RedirectAgent(Agent(reactor)) eqnotice_config = config.YAMLConfig("cfg/EQ_Notice.config.yml", {'enabled': True, 'timer': 60, 'debug': False, '1': "http://pso2.acf.me.uk/pso2eq.txt"}, True) # HTTP Headers ETag_Headers = ['', '', '', '', '', '', '', '', '', ''] Modified_Headers = ['', '', '', '', '', '', '', '', '', ''] # HTTP Modified in time Modified_time = ['', '', '', '', '', '', '', '', '', ''] # HTTP Data HTTP_Data = ['', '', '', '', '', '', '', '', '', ''] # was "【1時間前】" in the data? ishour_eq = [False, False, False, False, False, False, False, False, False, False] # Hour of EQ
def setUp(self): self.reactor = self.Reactor() self.agent = RedirectAgent(self.makeAgent()) self.fetcher = TitleFetcher() self.fetcher.agent = self.agent self.connect(None)
def request( self, method, url, *, params=None, headers=None, data=None, files=None, json=_NOTHING, auth=None, cookies=None, allow_redirects=True, browser_like_redirects=False, unbuffered=False, reactor=None, timeout=None, _stacklevel=2, ): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() if isinstance(url, DecodedURL): parsed_url = url.encoded_url elif isinstance(url, EncodedURL): parsed_url = url elif isinstance(url, str): # We use hyperlink in lazy mode so that users can pass arbitrary # bytes in the path and querystring. parsed_url = EncodedURL.from_text(url) else: parsed_url = EncodedURL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') headers = self._request_headers(headers, _stacklevel + 1) bodyProducer, contentType = self._request_body(data, files, json, stacklevel=_stacklevel + 1) if contentType is not None: headers.setRawHeaders(b'Content-Type', [contentType]) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if allow_redirects: if browser_like_redirects: wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) if reactor is None: from twisted.internet import reactor if timeout: delayedCall = reactor.callLater(timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not unbuffered: d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
import types from cyclone import escape from cyclone.web import HTTPError from twisted.internet import defer from twisted.internet import reactor from twisted.internet.protocol import Protocol from twisted.web.client import Agent, RedirectAgent from twisted.web.http_headers import Headers from twisted.web.iweb import IBodyProducer from zope.interface import implements agent = RedirectAgent(Agent(reactor)) class StringProducer(object): implements(IBodyProducer) def __init__(self, body): self.body = body self.length = len(body) def startProducing(self, consumer): consumer.write(self.body) return defer.succeed(None) def pauseProducing(self): pass
def request(self, method, url, **kwargs): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() if isinstance(url, unicode): parsed_url = URL.from_text(url) else: parsed_url = URL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.get('params') if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') # Convert headers dictionary to # twisted raw headers format. headers = kwargs.get('headers') if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.items(): if isinstance(v, (bytes, unicode)): h.addRawHeader(k, v) elif isinstance(v, list): h.setRawHeaders(k, v) headers = h else: headers = Headers({}) # Here we choose a right producer # based on the parameters passed in. bodyProducer = None data = kwargs.get('data') files = kwargs.get('files') # since json=None needs to be serialized as 'null', we need to # explicitly check kwargs for this key has_json = 'json' in kwargs if files: # If the files keyword is present we will issue a # multipart/form-data request as it suits better for cases # with files and/or large objects. files = list(_convert_files(files)) boundary = str(uuid.uuid4()).encode('ascii') headers.setRawHeaders( b'content-type', [b'multipart/form-data; boundary=' + boundary]) if data: data = _convert_params(data) else: data = [] bodyProducer = multipart.MultiPartProducer(data + files, boundary=boundary) elif data: # Otherwise stick to x-www-form-urlencoded format # as it's generally faster for smaller requests. if isinstance(data, (dict, list, tuple)): headers.setRawHeaders(b'content-type', [b'application/x-www-form-urlencoded']) data = urlencode(data, doseq=True) bodyProducer = self._data_to_body_producer(data) elif has_json: # If data is sent as json, set Content-Type as 'application/json' headers.setRawHeaders(b'content-type', [b'application/json; charset=UTF-8']) content = kwargs['json'] json = json_dumps(content, separators=(u',', u':')).encode('utf-8') bodyProducer = self._data_to_body_producer(json) cookies = kwargs.get('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if kwargs.get('allow_redirects', True): if kwargs.get('browser_like_redirects', False): wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) auth = kwargs.get('auth') if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) timeout = kwargs.get('timeout') if timeout: delayedCall = default_reactor(kwargs.get('reactor')).callLater( timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.get('unbuffered', False): d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
def __init__(self): self._agent = RedirectAgent(Agent(reactor))
def request(self, method, url, **kwargs): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() stacklevel = kwargs.pop('_stacklevel', 2) if isinstance(url, DecodedURL): parsed_url = url elif isinstance(url, EncodedURL): parsed_url = DecodedURL(url) elif isinstance(url, six.text_type): parsed_url = DecodedURL.from_text(url) else: parsed_url = DecodedURL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.pop('params', None) if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') # Convert headers dictionary to # twisted raw headers format. headers = kwargs.pop('headers', None) if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.items(): if isinstance(v, (bytes, six.text_type)): h.addRawHeader(k, v) elif isinstance(v, list): h.setRawHeaders(k, v) headers = h else: headers = Headers({}) bodyProducer, contentType = self._request_body( data=kwargs.pop('data', None), files=kwargs.pop('files', None), json=kwargs.pop('json', _NOTHING), stacklevel=stacklevel, ) if contentType is not None: headers.setRawHeaders(b'Content-Type', [contentType]) cookies = kwargs.pop('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) browser_like_redirects = kwargs.pop('browser_like_redirects', False) if kwargs.pop('allow_redirects', True): if browser_like_redirects: wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) auth = kwargs.pop('auth', None) if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) reactor = kwargs.pop('reactor', None) if reactor is None: from twisted.internet import reactor timeout = kwargs.pop('timeout', None) if timeout: delayedCall = reactor.callLater(timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.pop('unbuffered', False): d.addCallback(_BufferedResponse) if kwargs: warnings.warn( ("Got unexpected keyword argument: {}." " treq will ignore this argument," " but will raise TypeError in the next treq release.").format( ", ".join(repr(k) for k in kwargs)), DeprecationWarning, stacklevel=stacklevel, ) return d.addCallback(_Response, cookies)
return finished user_name = base64.b64encode('spider:123456'.encode('utf-8')).strip() encode_user = b'Basic '+user_name header = {'Proxy-Authorization': [encode_user]} proxy_config = ('47.105.165.81',5527,encode_user) url = "http://go2mars.top/solo/articles/2018/12/18/1545131102995.html" print(time.clock()) contextFactory = ScrapyClientContextFactory() agent = RedirectAgent(TunnelingAgent(reactor,proxy_config,contextFactory,10,None,None)) host = '47.105.165.81' port = 5527 # endpoint = TCP4ClientEndpoint(reactor, host, port) # agent_http = ProxyAgent(endpoint) # d = agent_http.request(b'GET',b'http://go2mars.top/solo/articles/2018/12/18/1545131102995.html',Headers({'Proxy-Authorization': [encode_user]}),None) d = agent.request(b'GET',b'http://go2mars.top/solo/articles/2018/12/18/1545131102995.html') d.addCallback(cbRequest) d.addErrback(lambda _:print(_)) d.addCallback(lambda _:print(time.clock())) d.addBoth(lambda _:reactor.stop()) reactor.run()
def remoteCall(self, call): """ RPC handler remoting to REST servers. This method is usually registered via registerHandlerMethodForRpc on a WAMP protocol. """ proto = call.proto uri = call.uri args = call.args ## extract extra information from RPC call handler argument (id, method) = call.extra ## get the REST remote onto which we will forward the call remote = self.remotesById[id] body = None if method in ['GET', 'DELETE']: if len(args) != 1: raise Exception( URI_ERROR_REMOTING, "Invalid number of arguments (expected 1, was %d)" % len(args)) elif method in ['PUT', 'POST']: if len(args) != 2: raise Exception( URI_ERROR_REMOTING, "Invalid number of arguments (expected 2, was %d)" % len(args)) body = json_dumps(args[1]) else: ## should not arrive here! raise Exception("logic error") if remote.forwardCookies and \ proto.cookies and \ proto.cookies.has_key(remote.restDomain) and \ proto.cookies[remote.restDomain] != "": cookie = str(proto.cookies[remote.restDomain]) else: cookie = None if type(args[0]) not in [str, unicode]: raise Exception( URI_ERROR_REMOTING, "Invalid type for argument 1 (expected str, was %s)" % type(args[0])) url = urlparse.urljoin(str(remote.restBaseUrl), str(args[0])) if not remote.usePersistentConnections: ## Do HTTP/POST as individual request ## headers = { 'Content-Type': 'application/json', 'User-Agent': RestRemoter.USER_AGENT } if cookie: headers['Cookie'] = cookie d = getPage(url=url, method=method, postdata=body, headers=headers, timeout=remote.requestTimeout, connectionTimeout=remote.connectionTimeout, followRedirect=remote.redirectLimit > 0) else: ## Do HTTP/POST via HTTP connection pool ## ## http://twistedmatrix.com/documents/12.1.0/web/howto/client.html ## ## avoid module level reactor import from twisted.web.client import Agent, RedirectAgent headers = { 'Content-Type': ['application/json'], 'User-Agent': [RestRemoter.USER_AGENT] } if cookie: headers['Cookie'] = [cookie] agent = Agent(self.reactor, pool=self.httppools[remote.id], connectTimeout=remote.connectionTimeout) if remote.redirectLimit > 0: agent = RedirectAgent(agent, redirectLimit=remote.redirectLimit) ## FIXME: honor requestTimeout if body: d = agent.request(method, url, Headers(headers), StringProducer(body)) else: d = agent.request(method, url, Headers(headers)) def onResponse(response): if response.code == 200: finished = Deferred() response.deliverBody(StringReceiver(finished)) return finished else: return defer.fail("%s [%s]" % (response.code, response.phrase)) d.addCallback(onResponse) ## request information provided as error detail in case of call fails remotingRequest = { 'provider': 'rest', 'rest-base-url': remote.restBaseUrl, 'use-persistent-connections': remote.usePersistentConnections, 'request-timeout': remote.requestTimeout, 'connection-timeout': remote.connectionTimeout, 'method': method } d.addCallbacks(self._onRemoteCallResult, self._onRemoteCallError, callbackArgs=[remotingRequest], errbackArgs=[remotingRequest]) ## FIXME! d.addCallback(self.onAfterRemoteCallSuccess, id) d.addErrback(self.onAfterRemoteCallError, id) return d
def redirect_agent_builder(*args, **kw): return RedirectAgent(Agent(*args, **kw))
def connectionLost(self, reason): print('Finished receiving body:', self._bytes_received, reason.getErrorMessage()) #for r in reason.value: result = self._bodybuf.getvalue() r = json.loads(result) #callback(data)调用后,能够向defer数据链中传入一个list数据:[True,传入的参数data],可以实现将获取的 #body传输到下一个函数中去 self.finished.callback(r) url = 'https://sh.lianjia.com/ershoufang/pg1' contextFactory = WebClientContextFactory() agent = RedirectAgent(Agent(reactor, contextFactory)) result = list() t1 = time.time() for i in range(1): i = str(i) u = url + i print(u) d = agent.request(b"GET", u.encode("utf-8")) d.addCallback(cbRequest, u) d.addCallback(lambda f: print(f)) result.append(d) dd = defer.DeferredList(result) dd.addBoth(end_crawl, t1) reactor.run()
def __init__(self): self.agent = ContentDecoderAgent(RedirectAgent(Agent(reactor)), [(b'gzip', GzipDecoder)])
def remoteCall(self, call): """ RPC handler remoting to Ext.Direct servers. This method is usually registered via registerHandlerMethodForRpc on a WAMP protocol. """ proto = call.proto uri = call.uri args = call.args ## extract extra information from RPC call handler argument (id, action, method, _) = call.extra ## get the Ext.Direct remote onto which we will forward the call remote = self.remotesById[id] ## construct the POST body d = {'action': action, 'method': method, 'data': args, 'type': 'rpc', 'tid': 1} body = json_dumps(d) if remote.forwardCookies and \ proto.cookies and \ proto.cookies.has_key(remote.routerDomain) and \ proto.cookies[remote.routerDomain] != "": cookie = str(proto.cookies[remote.routerDomain]) else: cookie = None if not remote.usePersistentConnections: ## Do HTTP/POST as individual request ## headers = {'Content-Type': 'application/json', 'User-Agent': ExtDirectRemoter.USER_AGENT} if cookie: headers['Cookie'] = cookie d = getPage(url = remote.routerUrl, method = 'POST', postdata = body, headers = headers, timeout = remote.requestTimeout, connectionTimeout = remote.connectionTimeout, followRedirect = remote.redirectLimit > 0) else: ## Do HTTP/POST via HTTP connection pool ## headers = {'Content-Type': ['application/json'], 'User-Agent': [ExtDirectRemoter.USER_AGENT]} if cookie: headers['Cookie'] = [cookie] agent = Agent(self.reactor, pool = self.httppools[remote.id], connectTimeout = remote.connectionTimeout) if remote.redirectLimit > 0: agent = RedirectAgent(agent, redirectLimit = remote.redirectLimit) ## FIXME: honor requestTimeout d = agent.request('POST', remote.routerUrl, Headers(headers), StringProducer(body)) def onResponse(response): if response.code == 200: finished = Deferred() response.deliverBody(StringReceiver(finished)) return finished else: return defer.fail("%s [%s]" % (response.code, response.phrase)) d.addCallback(onResponse) ## request information provided as error detail in case of call fails remotingRequest = {'provider': 'extdirect', 'router-url': remote.routerUrl, 'use-persistent-connections': remote.usePersistentConnections, 'request-timeout': remote.requestTimeout, 'connection-timeout': remote.connectionTimeout, 'action': action, 'method': method} d.addCallbacks(self._onRemoteCallResult, self._onRemoteCallError, callbackArgs = [remotingRequest], errbackArgs = [remotingRequest]) ## FIXME! d.addCallback(self.onAfterRemoteCallSuccess, id) d.addErrback(self.onAfterRemoteCallError, id) return d