def connect_to_tracker(self): # create the HTTP GET message # Note: some trackers have strange URLs, e.g., # http://moviezone.ws/announce.php?passkey=8ae51c4b47d3e7d0774a720fa511cc2a # which has some sort of 'key' as parameter, so we need to use the add_url_params # utility function to handle such cases. url = add_url_params("http://%s:%s%s" % (self._tracker_address[0], self._tracker_address[1], self._announce_page.replace(u'announce', u'scrape')), {"info_hash": self._infohash_list}) # no more requests can be appended to this session self._is_initiated = True self._last_contact = int(time.time()) agent = RedirectAgent(Agent(reactor, connectTimeout=self.timeout, pool=self._connection_pool)) try: self.request = self.register_task("request", agent.request('GET', bytes(url))) self.request.addCallback(self.on_response) self.request.addErrback(self.on_error) self._logger.debug(u"%s HTTP SCRAPE message sent: %s", self, url) self.start_timeout() # Return deferred that will evaluate when the whole chain is done. self.result_deferred = self.register_task("result", Deferred(canceller=self._on_cancel)) except UnicodeEncodeError as e: self.result_deferred = defer.fail(e) return self.result_deferred
def _setUp(self): log.debug("Setting up HTTPTest") try: import OpenSSL except: log.err("Warning! pyOpenSSL is not installed. https websites will" "not work") self.control_agent = Agent(reactor, sockshost="127.0.0.1", socksport=config.advanced.tor_socksport) sockshost, socksport = (None, None) if self.localOptions['socksproxy']: self.report['socksproxy'] = self.localOptions['socksproxy'] sockshost, socksport = self.localOptions['socksproxy'].split(':') socksport = int(socksport) self.agent = Agent(reactor, sockshost=sockshost, socksport=socksport) if self.followRedirects: try: from twisted.web.client import RedirectAgent self.control_agent = RedirectAgent(self.control_agent) self.agent = RedirectAgent(self.agent) except: log.err("Warning! You are running an old version of twisted"\ "(<= 10.1). I will not be able to follow redirects."\ "This may make the testing less precise.") self.report['errors'].append("Could not import RedirectAgent") self.processInputs() log.debug("Finished test setup")
def pastie(data, prefix="", postfix="", user="******", lang="text", private="true", password=None): data = { 'paste_user': user, 'paste_data': data, 'paste_lang': lang, 'api_submit': 'true', 'mode': 'json', 'paste_private': private } if password: data['paste_password'] = password headers = { 'User-agent': ['Mozilla/5.0',], 'Content-type': ['application/x-www-form-urlencoded',], } agent = RedirectAgent(Agent(reactor)) headers = Headers(headers) datz = urllib.urlencode(data) d = agent.request('POST', "http://paste.thezomg.com/", headers=headers, bodyProducer=StringProducer(datz)) def cbRequest(response): finished = Deferred() response.deliverBody(DeferredPrinter(finished, prefix, postfix)) return finished d.addCallback(cbRequest) return d
def connect_to_tracker(self): # create the HTTP GET message # Note: some trackers have strange URLs, e.g., # http://moviezone.ws/announce.php?passkey=8ae51c4b47d3e7d0774a720fa511cc2a # which has some sort of 'key' as parameter, so we need to use the add_url_params # utility function to handle such cases. url = add_url_params("http://%s:%s/%s" % (self._tracker_address[0], self._tracker_address[1], self._announce_page.replace(u'announce', u'scrape')), {"info_hash": self._infohash_list}) agent = RedirectAgent(Agent(reactor, connectTimeout=self.timeout, pool=self._connection_pool)) self.request = self.register_task("request", agent.request('GET', bytes(url))) self.request.addCallback(self.on_response) self.request.addErrback(self.on_error) self._logger.debug(u"%s HTTP SCRAPE message sent: %s", self, url) # no more requests can be appended to this session self._is_initiated = True self._last_contact = int(time.time()) # Return deferred that will evaluate when the whole chain is done. self.result_deferred = self.register_task("result", Deferred(canceller=self._on_cancel)) return self.result_deferred
def getIssue(baseurl, num, template, conf): if baseurl.endswith("/"): baseurl = baseurl[:-1] cookiesession = conf['cookie'] headers = { 'User-agent': ['Mozilla/5.0',], "Cookie": [cookiesession,] } url = "%s/%s.json"%(baseurl, num) agent = RedirectAgent(Agent(reactor)) headers = Headers(headers) d = agent.request('GET', url, headers=headers) def cbRequest(response): finished = Deferred() response.deliverBody(DeferredPrinter(finished, baseurl, num, template)) return finished d.addCallback(cbRequest) return d
def normal_crawl(self, request, reactor): url = self.parse_uri(request) agent = RedirectAgent(Agent(reactor, connectTimeout=10)) response = agent.request(b'GET', url.encode('ascii'), Headers(self.headers), None) return response
def __call__(self, url): self.log.debug('calling: GET -> %s', url) agent = RedirectAgent(Agent(reactor)) on_end_defer = defer.Deferred() response_obj = yield agent.request('GET', url) response_obj.deliverBody(HTMLResponseProtocol(on_end_defer)) links_obj = yield on_end_defer defer.returnValue(links_obj)
def __init__(self, reactor, agent, well_known_cache=None): self._reactor = reactor self._clock = Clock(reactor) if well_known_cache is None: well_known_cache = _well_known_cache self._well_known_cache = well_known_cache self._well_known_agent = RedirectAgent(agent)
def __init__(self, redirectLimit=5, connectTimeout=30, proxy=None): self.lastURI = None if proxy: try: endpoint = TCP4ClientEndpoint(reactor, proxy["host"], proxy["port"], timeout=connectTimeout) except: raise TypeError("ResolverAgent's proxy argument need to be a dict with fields host and port") agent = ProxyAgent(endpoint) else: agent = Agent(reactor, connectTimeout=connectTimeout) RedirectAgent.__init__(self, agent, redirectLimit=redirectLimit)
def _webhook_request(self, method, uri, headers, verify_ssl=False, bodyProducer=None): """Send the webhook request and return the response.""" agent = RedirectAgent( Agent( reactor, contextFactory=WebClientContextFactory(verify=verify_ssl), )) d = agent.request( method, uri, headers=headers, bodyProducer=bodyProducer, ) def render_response(response): """Render the HTTPS response received.""" def eb_catch_partial(failure): # Twisted is raising PartialDownloadError because the responses # do not contain a Content-Length header. Since every response # holds the whole body we just take the result. failure.trap(PartialDownloadError) if int(failure.value.status) == HTTPStatus.OK: return failure.value.response else: return failure # Error out if the response has a status code of 400 or above. if response.code >= int(HTTPStatus.BAD_REQUEST): # if there was no trailing slash, retry with a trailing slash # because of varying requirements of BMC manufacturers if response.code == HTTPStatus.NOT_FOUND and uri[-1] != b"/": d = agent.request( method, uri + b"/", headers=headers, bodyProducer=bodyProducer, ) else: raise PowerActionError( "Request failed with response status code: " "%s." % response.code) d = readBody(response) d.addErrback(eb_catch_partial) return d d.addCallback(render_response) return d
def request( method, url, headers=None, body=None, allow_redirects=True, agent=_agent ): if body: body = _StringProducer(body) if allow_redirects: agent = RedirectAgent(agent) d = agent.request(method, url, Headers(headers), body) d.addCallback(Response, method) return d
def __init__(self, proxy_host="scorpion.premiumize.me", use_proxy=False, p_user='', p_pass=''): print "Twisted Agent in use", __TW_VER__ # can not follow rel. url redirects (location header) self.headers = Headers(agent_headers) self.useProxy = use_proxy and twEndpoints if self.useProxy: self.endpoint = TCP4ClientEndpoint(reactor, proxy_host, 80) self.agent = RedirectAgent(ProxyAgent(self.endpoint)) auth = base64.b64encode("%s:%s" % (p_user, p_pass)) self.headers.addRawHeader('Proxy-Authorization', 'Basic ' + auth.strip()) else: self.agent = RedirectAgent(Agent(reactor))
def __call__(self, url): self.log.debug('calling: GET -> %s', url) agent = RedirectAgent(Agent(reactor)) code = None try: # State can be either OK response_obj = yield agent.request('GET', url) code = response_obj.code self.log.debug('response code: %s', code) except Exception, err: # Or state can fail due to any reason: timeouts, dns errors, etc # I catch all errors self.log.error('call error: -> %s, reson: %s', url, err)
def initialize(self): logging.debug("HTTPTest.initialize") from twisted.web.client import Agent import yaml self.agent = Agent(self.reactor) if self.follow_redirects: from twisted.web.client import RedirectAgent self.agent = RedirectAgent(self.agent) self.request = {} self.response = {}
def __ssl_request__(query_url): headers = self.createOnionooHeaders() contextFactory = OnionooClientContextFactory() agent = RedirectAgent(Agent(reactor, contextFactory)) d = agent.request( 'GET', str(query_url), Headers(headers), None) d.addCallback(self.onionooCallback) d.addErrback(err) d.addCallback(self.onionooPaste, msg, channel) d.addErrback(err) return d
def build_agent(req): uri = URI.fromBytes(req.url) proxy = req.get('proxy') if req.get('use_proxy') is False: proxy = None if proxy: if uri.scheme == 'https': agent = TunnelingAgent( reactor=reactor, proxy=proxy, contextFactory=ScrapexClientContextFactory(), connectTimeout=req.get('timeout')) else: endpoint = TCP4ClientEndpoint(reactor, host=proxy.host, port=proxy.port, timeout=req.get('timeout')) agent = ProxyAgent(endpoint) if proxy.auth_header: req.get('headers')['Proxy-Authorization'] = proxy.auth_header else: agent = Agent(reactor) agent = RedirectAgent(agent, redirectLimit=3) agent = ContentDecoderAgent(agent, [('gzip', GzipDecoder)]) return agent
def __init__( self, reactor, tls_client_options_factory, _well_known_tls_policy=None, _srv_resolver=None, _well_known_cache=well_known_cache, ): self._reactor = reactor self._clock = Clock(reactor) self._tls_client_options_factory = tls_client_options_factory if _srv_resolver is None: _srv_resolver = SrvResolver() self._srv_resolver = _srv_resolver self._pool = HTTPConnectionPool(reactor) self._pool.retryAutomatically = False self._pool.maxPersistentPerHost = 5 self._pool.cachedConnectionTimeout = 2 * 60 agent_args = {} if _well_known_tls_policy is not None: # the param is called 'contextFactory', but actually passing a # contextfactory is deprecated, and it expects an IPolicyForHTTPS. agent_args['contextFactory'] = _well_known_tls_policy _well_known_agent = RedirectAgent( Agent(self._reactor, pool=self._pool, **agent_args), ) self._well_known_agent = _well_known_agent # our cache of .well-known lookup results, mapping from server name # to delegated name. The values can be: # `bytes`: a valid server-name # `None`: there is no (valid) .well-known here self._well_known_cache = _well_known_cache
def _request(self, request, callback): global pnconn_pool ## Build URL ''' url = self.origin + '/' + "/".join([ "".join([ ' ~`!@#$%^&*()+=[]\\{}|;\':",./<>?'.find(ch) > -1 and hex(ord(ch)).replace( '0x', '%' ).upper() or ch for ch in list(bit) ]) for bit in request]) ''' url = self.getUrl(request) agent = ContentDecoderAgent( RedirectAgent( Agent(reactor, contextFactory=WebClientContextFactory(), pool=self.ssl and None or pnconn_pool)), [('gzip', GzipDecoder)]) request = agent.request('GET', url, Headers(self.headers), None) def received(response): finished = Deferred() response.deliverBody(PubNubResponse(finished)) return finished def complete(data): callback(eval(data)) request.addCallback(received) request.addBoth(complete)
def queryApi(self, remoteId): """ Query Ext.Direct API by remote ID. """ remote = self.remotesById.get(remoteId, None) if remote is None: return None if not remote.usePersistentConnections: ## Do HTTP/POST as individual request ## d = getPage(url = remote.apiUrl, method = 'GET', headers = {'User-Agent': ExtDirectRemoter.USER_AGENT}, timeout = remote.requestTimeout, connectionTimeout = remote.connectionTimeout, followRedirect = remote.redirectLimit > 0) else: ## Do HTTP/POST via HTTP connection pool ## ## avoid module level reactor import from twisted.web.client import Agent, RedirectAgent agent = Agent(self.reactor, pool = self.httppools[remote.id], connectTimeout = remote.connectionTimeout) if remote.redirectLimit > 0: agent = RedirectAgent(agent, redirectLimit = remote.redirectLimit) ## FIXME: honor requestTimeout d = agent.request('GET', remote.apiUrl, Headers({'User-Agent': [ExtDirectRemoter.USER_AGENT]})) def onResponse(response): if response.code == 200: finished = Deferred() response.deliverBody(StringReceiver(finished)) return finished else: return defer.fail("%s [%s]" % (response.code, response.phrase)) d.addCallback(onResponse) apiRequest = {'provider': 'extdirect', 'api-url': remote.apiUrl, 'use-persistent-connections': remote.usePersistentConnections, 'request-timeout': remote.requestTimeout, 'connection-timeout': remote.connectionTimeout} d.addCallbacks(self._onQueryApiResult, self._onQueryApiError, callbackArgs = [remote, apiRequest], errbackArgs = [apiRequest]) return d
def _setUp(self): super(HTTPTest, self)._setUp() try: import OpenSSL except: log.err("Warning! pyOpenSSL is not installed. https websites will " "not work") self.control_agent = TrueHeadersSOCKS5Agent( reactor, proxyEndpoint=TCP4ClientEndpoint(reactor, '127.0.0.1', config.tor.socks_port)) self.report['socksproxy'] = None sockshost, socksport = (None, None) if self.localOptions['socksproxy']: try: sockshost, socksport = self.localOptions['socksproxy'].split( ':') self.report['socksproxy'] = self.localOptions['socksproxy'] except ValueError: raise InvalidSocksProxyOption socksport = int(socksport) self.agent = TrueHeadersSOCKS5Agent( reactor, proxyEndpoint=TCP4ClientEndpoint(reactor, sockshost, socksport)) else: self.agent = TrueHeadersAgent(reactor) self.report['agent'] = 'agent' if self.followRedirects: try: from twisted.web.client import RedirectAgent self.control_agent = RedirectAgent(self.control_agent) self.agent = RedirectAgent(self.agent) self.report['agent'] = 'redirect' except: log.err("Warning! You are running an old version of twisted"\ "(<= 10.1). I will not be able to follow redirects."\ "This may make the testing less precise.") self.processInputs() log.debug("Finished test setup")
def download(self, name, url, path): # keep track of current download self.current_download_path = path self.current_download_url = url mirror_url = url.format(self.common.settings['mirror']) # convert mirror_url from unicode to string, if needed (#205) if isinstance(mirror_url, unicode): mirror_url = unicodedata.normalize('NFKD', mirror_url).encode( 'ascii', 'ignore') # initialize the progress bar self.progressbar.set_fraction(0) self.progressbar.set_text(_('Downloading {0}').format(name)) self.progressbar.show() self.refresh_gtk() if self.common.settings['download_over_tor']: from twisted.internet.endpoints import TCP4ClientEndpoint from txsocksx.http import SOCKS5Agent torEndpoint = TCP4ClientEndpoint(reactor, '127.0.0.1', 9050) # default mirror gets certificate pinning, only for requests that use the mirror if self.common.settings[ 'mirror'] == self.common.default_mirror and '{0}' in url: agent = SOCKS5Agent(reactor, TorProjectPolicyForHTTPS( self.common.paths['torproject_pem']), proxyEndpoint=torEndpoint) else: agent = SOCKS5Agent(reactor, proxyEndpoint=torEndpoint) else: if self.common.settings[ 'mirror'] == self.common.default_mirror and '{0}' in url: agent = Agent( reactor, TorProjectPolicyForHTTPS( self.common.paths['torproject_pem'])) else: agent = Agent(reactor) # actually, agent needs to follow redirect agent = RedirectAgent(agent) # start the request d = agent.request('GET', mirror_url, Headers({'User-Agent': ['torbrowser-launcher']}), None) self.file_download = open(path, 'w') d.addCallback(self.response_received).addErrback(self.download_error) if not reactor.running: reactor.run()
def _agent(self): if not self._proxyIp: agent = Agent(self._reactor, connectTimeout=self._timeout) else: endpoint = TCP4ClientEndpoint(reactor=self._reactor, host=self._ipAddr, port=self._port, timeout=self._timeout) agent = ProxyAgent(endpoint) agent = RedirectAgent(agent) if self._follow else agent return agent.request("GET", self._reqURL, self._headers)
def __init__(self, *argz, **kwz): super(txOneDriveAPI, self).__init__(*argz, **kwz) pool = self.request_pool = QuietHTTPConnectionPool( reactor, debug_requests=self.debug_requests, **self.request_pool_options) self.request_agent = ContentDecoderAgent( RedirectAgent( Agent(reactor, TLSContextFactory(self.ca_certs_files), pool=pool)), [('gzip', GzipDecoder)])
def do_http(self, url): print "Visiting", url #url = "http://127.0.0.1:8000/?" + url.encode("base64").replace("\n","") + '/' agent = RedirectAgent(Agent(reactor)) d = agent.request('GET', url, Headers({'User-Agent': ['Clickr']}), None) def cbResponse(response): from pprint import pformat from twisted.internet.defer import Deferred print 'Response version:', response.version print 'Response code:', response.code print 'Response phrase:', response.phrase print 'Response headers:' print pformat(list(response.headers.getAllRawHeaders())) print 'Response received' finished = Deferred() response.deliverBody(BeginningPrinter(finished)) return finished
def __init__( self, reactor: IReactorTime, agent: IAgent, user_agent: bytes, well_known_cache: Optional[TTLCache] = None, had_well_known_cache: Optional[TTLCache] = None, ): self._reactor = reactor self._clock = Clock(reactor) if well_known_cache is None: well_known_cache = _well_known_cache if had_well_known_cache is None: had_well_known_cache = _had_valid_well_known_cache self._well_known_cache = well_known_cache self._had_valid_well_known_cache = had_well_known_cache self._well_known_agent = RedirectAgent(agent) self.user_agent = user_agent
def __init__( self, reactor, agent, user_agent, well_known_cache=None, had_well_known_cache=None, ): self._reactor = reactor self._clock = Clock(reactor) if well_known_cache is None: well_known_cache = _well_known_cache if had_well_known_cache is None: had_well_known_cache = _had_valid_well_known_cache self._well_known_cache = well_known_cache self._had_valid_well_known_cache = had_well_known_cache self._well_known_agent = RedirectAgent(agent) self.user_agent = user_agent
def on_title(self, bot, user, details): """TITLE [url] - If provided, prints the title of url. If not, prints the title of the last mentioned url.""" if not len(details['splitmsg']): if not bot in self.bots: return "No URL has been said recently." if not details["channel"].lower() in self.bots[bot]: return "No URL has been said recently in this channel." url = self.bots[bot][details['channel'].lower()] else: url = details['splitmsg'][0] match = titlepattern.match(str(url)) if not match: url = "http://" + url match = titlepattern.match(url) if not match: return "Oops, try a valid url!" try: agent = RedirectAgent(Agent(reactor)) d = agent.request('GET', str(url)) def cbRequest(response): finished = Deferred() response.deliverBody(DeferredPrinter(finished, url)) return finished d.addCallback(cbRequest) return d except urllib2.HTTPError as e: return "Looks like that page has an error on it! (%s: %i)" % (url, e.code) except urllib2.URLError, e: return "There was an error retrieving the page's data. (%s: %s)" % (url, e)
def setUp(self): log.debug("Setting up HTTPTest") try: import OpenSSL except: log.err("Warning! pyOpenSSL is not installed. https websites will" "not work") self.agent = Agent(reactor) if self.followRedirects: try: from twisted.web.client import RedirectAgent self.agent = RedirectAgent(self.agent) except: log.err("Warning! You are running an old version of twisted"\ "(<= 10.1). I will not be able to follow redirects."\ "This may make the testing less precise.") self.report['errors'].append("Could not import RedirectAgent") self.request = {} self.response = {} self.processInputs() log.debug("Finished test setup")
def __init__(self): self.pool = HTTPConnectionPool(reactor, persistent=True) self.pool.maxPersistentPerHost = 5 # 默认一个IP最大保持两个链接 self.pool.cachedConnectionTimeout = 50 # 默认240秒 contextFactory = WebClientContextFactory() raw_agent = Agent(reactor, contextFactory, pool=self.pool) agent = RedirectAgent( ContentDecoderAgent(raw_agent, [('gzip', GzipDecoder)])) self.cookieJar = CookieJar() self.agent = CookieAgent(agent, self.cookieJar) self.headers = {'User-agent': ['Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1'], 'Accept-Language': ['zh-Hans-CN,zh-Hans;q=0.5'], 'Accept': ['text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'], 'Accept-Encoding': ['gb2313,utf-8;q=0.7,*;q=0.7'], 'Cache-Control': ['max-age=0']}
def main(): agent = RedirectAgent(Agent(reactor)) sem = DeferredSemaphore(10) print "Loading breweries..." mongo = MongoClient().entities.ba breweries = loadBreweries(mongo) print "Done loading breweries." jobs = [] for brewery in breweries: jobs.append(sem.run(socialRequest,brewery,agent,mongo)) # if len(jobs) % 50 == 0: # print "Brewery Jobs started: %d" % len(jobs) d = gatherResults(jobs) d.addBoth(cbShutdown) print "Let the Reactor BEGIN!" reactor.run()
def __init__(self, reactor): self.reactor = reactor pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = 1 pool.cachedConnectionTimeout = 600 self.agent = RedirectAgent(Agent(reactor, pool=pool)) self.reqQ = HttpReqQ(self.agent, self.reactor) self.clientPlaylist = HlsPlaylist() self.verbose = False self.download = False self.outDir = "" self.encryptionHandled = False # required for the dump durations functionality self.dur_dump_file = None self.dur_avproble_acc = 0 self.dur_vt_acc = 0 self.dur_playlist_acc = 0
def download(self, name, url, path): # keep track of current download self.current_download_path = path self.current_download_url = url # initialize the progress bar mirror_url = url.format(self.common.settings['mirror']) self.progressbar.set_fraction(0) self.progressbar.set_text(_('Downloading {0}').format(name)) self.progressbar.show() self.refresh_gtk() if self.common.settings['update_over_tor']: print _('Updating over Tor') from twisted.internet.endpoints import TCP4ClientEndpoint from txsocksx.http import SOCKS5Agent torEndpoint = TCP4ClientEndpoint(reactor, '127.0.0.1', 9050) # default mirror gets certificate pinning, only for requests that use the mirror if self.common.settings['mirror'] == self.common.default_mirror and '{0}' in url: agent = SOCKS5Agent(reactor, VerifyTorProjectCert(self.common.paths['torproject_pem']), proxyEndpoint=torEndpoint) else: agent = SOCKS5Agent(reactor, proxyEndpoint=torEndpoint) else: if self.common.settings['mirror'] == self.common.default_mirror and '{0}' in url: agent = Agent(reactor, VerifyTorProjectCert(self.common.paths['torproject_pem'])) else: agent = Agent(reactor) # actually, agent needs to follow redirect agent = RedirectAgent(agent) # start the request d = agent.request('GET', mirror_url, Headers({'User-Agent': ['torbrowser-launcher']}), None) self.file_download = open(path, 'w') d.addCallback(self.response_received).addErrback(self.download_error) if not reactor.running: reactor.run()
def __init__(self, **config): for k, v in config.viewitems(): try: x = getattr(self, k) except AttributeError: raise AttributeError( 'Unrecognized configuration key: {}'.format(k)) if isinstance(x, Mapping) and isinstance(v, Mapping): v = AttrDict(v) v.rebase(AttrDict(x)) setattr(self, k, v) pool = QuietHTTPConnectionPool(reactor, persistent=True) for k, v in self.request_pool_options.viewitems(): getattr(pool, k) # to somewhat protect against typos setattr(pool, k, v) self.request_agent = ContentDecoderAgent( RedirectAgent( Agent(reactor, TLSContextFactory(self.ca_certs_files), pool=pool)), [('gzip', GzipDecoder)])
def __init__( self, # This reactor should also be IReactorTCP and IReactorPluggableNameResolver # because it eventually makes its way to HostnameEndpoint.__init__. # But that's not easy to express with an annotation. We use the # `seconds` attribute below, so mark this as IReactorTime for now. reactor: IReactorTime, tls_client_options_factory: Optional[ClientTLSOptionsFactory], _well_known_tls_policy: Optional[IPolicyForHTTPS] = None, _srv_resolver: Optional[SrvResolver] = None, _well_known_cache: TTLCache[bytes, Optional[bytes]] = well_known_cache, ) -> None: self._reactor = reactor self._tls_client_options_factory = tls_client_options_factory if _srv_resolver is None: _srv_resolver = SrvResolver() self._srv_resolver = _srv_resolver self._pool = HTTPConnectionPool(reactor) self._pool.retryAutomatically = False self._pool.maxPersistentPerHost = 5 self._pool.cachedConnectionTimeout = 2 * 60 if _well_known_tls_policy is not None: # the param is called 'contextFactory', but actually passing a # contextfactory is deprecated, and it expects an IPolicyForHTTPS. _well_known_agent = Agent(self._reactor, pool=self._pool, contextFactory=_well_known_tls_policy) else: _well_known_agent = Agent(self._reactor, pool=self._pool) self._well_known_agent = RedirectAgent(_well_known_agent) # our cache of .well-known lookup results, mapping from server name # to delegated name. The values can be: # `bytes`: a valid server-name # `None`: there is no (valid) .well-known here self._well_known_cache = _well_known_cache
def request( self, method='GET', path='', headers={ 'User-Agent': ['onDemand/1.0 (Rest_Client)'], 'Accept': ['application/json'] }, body=None): data = None if self.loc: host = '/'.join((self.loc, path)) else: host = '/'.join((self.host, path)) if self.token: host += '?auth=' + self.token if body: headers.update({'Content-Type': ['application/json']}) data = FileBodyProducer(StringIO(json.dumps(body))) agent = RedirectAgent(Agent(reactor, pool=self.pool)) d = agent.request(method, host, Headers(headers), data) def cbFail(fail): if hasattr(fail.value, 'response'): if hasattr(fail.value.response, 'code'): if fail.value.response.code == 307: loc = fail.value.response.headers.getRawHeaders( 'location') new = urlparse(loc[0]) newhost = '://'.join((new.scheme, new.netloc)) if newhost == self.host: self.loc = None else: self.loc = newhost self.log.debug('redirect: %s' % self.loc) data = FileBodyProducer(StringIO(json.dumps(body))) d = agent.request(method, loc[0], Headers(headers), data) d.addCallbacks(cbRequest, cbFail) return d elif fail.value.response.code == 404 and self.loc: self.loc = None host = '/'.join((self.host, path)) if self.token: host += '?auth=' + self.token d = self.request(method, host, Headers(headers), body) d.addCallbacks(cbRequest, cbFail) return d else: print(dir(fail.value)) print(fail.value.message) print(fail.value.args) self.log.error('unhandled failure: %s -- %s' % (fail.value.message, fail.value)) def cbRequest(response): # print 'Response version:', response.version # print 'Response code:', response.code # print 'Response phrase:', response.phrase # print 'Response headers:' # print pformat(list(response.headers.getAllRawHeaders())) finished = Deferred() response.deliverBody(RestHandle(finished, self.event_handler)) return finished d.addCallbacks(cbRequest, cbFail) return d
def connectionLost(self, reason): self.deferred.callback(b''.join(self.dataBuffer)) def readBody(response): d = defer.Deferred() response.deliverBody( SimpleBodyProtocol(response.code, response.phrase, d)) return d try: from twisted.web.client import HTTPConnectionPool pool = HTTPConnectionPool(reactor) pool._factory.noisy = False agent = RedirectAgent(Agent(reactor, pool=pool)) except ImportError: agent = RedirectAgent(Agent(reactor)) eqalert_config = config.YAMLConfig( "cfg/EQ_Alert.config.yml", { 'enabled': True, 'timer': 80, 'debug': False, 'api': "http://pso2.acf.me.uk/api/eq.json", '0': True, '1': True, '2': True, '3': True, '4': True, '5': True,
class HTTPTest(NetTestCase): """ A utility class for dealing with HTTP based testing. It provides methods to be overriden for dealing with HTTP based testing. The main functions to look at are processResponseBody and processResponseHeader that are invoked once the headers have been received and once the request body has been received. """ name = "HTTP Test" version = 0.1 randomizeUA = True followRedirects = False def setUp(self): log.debug("Setting up HTTPTest") try: import OpenSSL except: log.err("Warning! pyOpenSSL is not installed. https websites will" "not work") self.agent = Agent(reactor) if self.followRedirects: try: from twisted.web.client import RedirectAgent self.agent = RedirectAgent(self.agent) except: log.err("Warning! You are running an old version of twisted"\ "(<= 10.1). I will not be able to follow redirects."\ "This may make the testing less precise.") self.report['errors'].append("Could not import RedirectAgent") self.request = {} self.response = {} self.processInputs() log.debug("Finished test setup") def processInputs(self): pass def _processResponseBody(self, data, body_processor): log.debug("Processing response body") self.response['body'] = data self.report['response'] = self.response if body_processor: body_processor(data) else: self.processResponseBody(data) def processResponseBody(self, data): """ This should handle all the response body smushing for getting it ready to be passed onto the control. @param data: The content of the body returned. """ pass def processResponseHeaders(self, headers): """ This should take care of dealing with the returned HTTP headers. @param headers: The content of the returned headers. """ pass def processRedirect(self, location): """ Handle a redirection via a 3XX HTTP status code. @param location: the url that is being redirected to. """ pass def doRequest(self, url, method="GET", headers=None, body=None, headers_processor=None, body_processor=None): """ Perform an HTTP request with the specified method. url: the full url path of the request method: the HTTP Method to be used headers: the request headers to be sent as a dict body: the request body headers_processor: a function to be used for processing the HTTP header responses (defaults to self.processResponseHeaders). This function takes as argument the HTTP headers as a dict. body_processory: a function to be used for processing the HTTP response body (defaults to self.processResponseBody). This function takes the response body as an argument. """ log.debug("Performing request %s %s %s" % (url, method, headers)) d = self.build_request(url, method, headers, body) def errback(data): log.err("Error in test %s" % data) self.report["error"] = data def finished(data): return d.addErrback(errback) d.addCallback(self._cbResponse, headers_processor, body_processor) d.addCallback(finished) return d def build_request(self, url, method="GET", headers=None, body=None): self.request['method'] = method self.request['url'] = url self.request['headers'] = headers if headers else {} self.request['body'] = body if self.randomizeUA: self.randomize_useragent() self.report['request'] = self.request self.report['url'] = url # If we have a request body payload, set the request body to such # content if body: body_producer = StringProducer(self.request['body']) else: body_producer = None headers = Headers(self.request['headers']) req = self.agent.request(self.request['method'], self.request['url'], headers, body_producer) return req def _cbResponse(self, response, headers_processor, body_processor): log.debug("Got response %s" % response) if not response: self.report['response'] = None log.err("We got an empty response") return self.response['headers'] = list(response.headers.getAllRawHeaders()) self.response['code'] = response.code self.response['length'] = response.length self.response['version'] = response.length if str(self.response['code']).startswith('3'): self.processRedirect(response.headers.getRawHeaders('Location')[0]) if headers_processor: headers_processor(self.response['headers']) else: self.processResponseHeaders(self.response['headers']) finished = defer.Deferred() response.deliverBody(BodyReceiver(finished)) finished.addCallback(self._processResponseBody, body_processor) return finished def randomize_useragent(self): user_agent = random.choice(userAgents) self.request['headers']['User-Agent'] = [user_agent]
def request(self, method, url, **kwargs): method = method.upper() # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.get('params') if params: url = _combine_query_params(url, params) # Convert headers dictionary to # twisted raw headers format. headers = kwargs.get('headers') if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.iteritems(): if isinstance(v, str): h.addRawHeader(k, v) else: h.setRawHeaders(k, v) headers = h else: headers = Headers({}) # Here we choose a right producer # based on the parameters passed in. bodyProducer = None data = kwargs.get('data') files = kwargs.get('files') if files: # If the files keyword is present we will issue a # multipart/form-data request as it suits better for cases # with files and/or large objects. files = list(_convert_files(files)) boundary = uuid.uuid4() headers.setRawHeaders( 'content-type', [ 'multipart/form-data; boundary=%s' % (boundary,)]) if data: data = _convert_params(data) else: data = [] bodyProducer = multipart.MultiPartProducer( data + files, boundary=boundary) elif data: # Otherwise stick to x-www-form-urlencoded format # as it's generally faster for smaller requests. if isinstance(data, (dict, list, tuple)): headers.setRawHeaders( 'content-type', ['application/x-www-form-urlencoded']) data = urlencode(data, doseq=True) bodyProducer = IBodyProducer(data) wrapped_agent = self._agent if kwargs.get('allow_redirects', True): wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [('gzip', GzipDecoder)]) auth = kwargs.get('auth') if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request( method, url, headers=headers, bodyProducer=bodyProducer) timeout = kwargs.get('timeout') if timeout: delayedCall = default_reactor(kwargs.get('reactor')).callLater( timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.get('unbuffered', False): d.addCallback(_BufferedResponse) return d.addCallback(_Response)
def request(self, method='GET', path='', headers={'User-Agent': ['onDemand/1.0 (Rest_Client)'], 'Accept': ['application/json']}, body=None): data = None if self.loc: host = '/'.join((self.loc, path)) else: host = '/'.join((self.host, path)) if self.token: host += '?auth=' + self.token if body: headers.update({'Content-Type': ['application/json']}) data = FileBodyProducer(StringIO(json.dumps(body))) agent = RedirectAgent(Agent(reactor, pool=self.pool)) d = agent.request(method, host, Headers(headers), data) def cbFail(fail): if hasattr(fail.value, 'response'): if hasattr(fail.value.response, 'code'): if fail.value.response.code == 307: loc = fail.value.response.headers.getRawHeaders( 'location') new = urlparse(loc[0]) newhost = '://'.join((new.scheme, new.netloc)) if newhost == self.host: self.loc = None else: self.loc = newhost self.log.debug('redirect: %s' % self.loc) data = FileBodyProducer(StringIO(json.dumps(body))) d = agent.request( method, loc[0], Headers(headers), data) d.addCallbacks(cbRequest, cbFail) return d elif fail.value.response.code == 404 and self.loc: self.loc = None host = '/'.join((self.host, path)) if self.token: host += '?auth=' + self.token d = self.request(method, host, Headers(headers), body) d.addCallbacks(cbRequest, cbFail) return d else: print(dir(fail.value)) print(fail.value.message) print(fail.value.args) self.log.error('unhandled failure: %s -- %s' % ( fail.value.message, fail.value)) def cbRequest(response): # print 'Response version:', response.version # print 'Response code:', response.code # print 'Response phrase:', response.phrase # print 'Response headers:' # print pformat(list(response.headers.getAllRawHeaders())) finished = Deferred() response.deliverBody(RestHandle(finished, self.event_handler)) return finished d.addCallbacks(cbRequest, cbFail) return d
class TwAgentHelper: def __init__(self, proxy_host="scorpion.premiumize.me", use_proxy=False, p_user='', p_pass=''): print "Twisted Agent in use", __TW_VER__ # can not follow rel. url redirects (location header) self.headers = Headers(agent_headers) self.useProxy = use_proxy and twEndpoints if self.useProxy: self.endpoint = TCP4ClientEndpoint(reactor, proxy_host, 80) self.agent = RedirectAgent(ProxyAgent(self.endpoint)) auth = base64.b64encode("%s:%s" % (p_user, p_pass)) self.headers.addRawHeader('Proxy-Authorization', 'Basic ' + auth.strip()) else: self.agent = RedirectAgent(Agent(reactor)) def getRedirectedUrl(self, callback, cb_err, url, *args, **kwargs): print "getRedirectedUrl: ", url self._rd_callback = callback self.url = url self.data = "" self.agent.request('HEAD', url, headers=self.headers).addCallback(self.__getResponse, *args, **kwargs).addErrback(cb_err) def __getResponse(self, response, *args, **kwargs): print "__getResponse:" print "Status code: ", response.phrase for header, value in response.headers.getAllRawHeaders(): print header, value r = response.headers.getRawHeaders("location") if r: r_url = r[0] p = self._parse(r_url) if b'http' not in p[0]: print "Rel. URL correction" scheme, host, port, path = self._parse(self.url) r_url = b'%s://%s/%s' % (scheme, host, r_url) else: r_url = self.url print "Location: ", r_url self._rd_callback(r_url, *args, **kwargs) def getWebPage(self, callback, cb_err, url, follow_redir, *args, **kwargs): print "getWebPage: ", url self._wp_callback = callback self._errback = cb_err self.data = "" if follow_redir: self.getRedirectedUrl(self.__getWebPageDef, cb_err, url, *args, **kwargs) else: self.__getWebPageDef(url, *args, **kwargs) def __getWebPageDef(self, url, *args, **kwargs): d = self.agent.request('GET', url, headers=self.headers) d.addCallback(self.__getResource) d.addCallbacks(self._wp_callback, self._errback, callbackArgs=args, callbackKeywords=kwargs) """ def __getWebPageDef(self, url, *args, **kwargs): #getPage(url, followRedirect=True, agent=self.headers, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self._wp_callback, *args, **kwargs).addErrback(self._errback) getPage(url, followRedirect=True, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self._wp_callback, *args, **kwargs).addErrback(self._errback) """ def __getResource(self, response): print "__getResource:" finished = Deferred() response.deliverBody(GetResource(finished)) return finished @staticmethod def _parse(url, defaultPort=None): url = url.strip() parsed = http.urlparse(url) scheme = parsed[0] path = urlunparse(('', '') + parsed[2:]) if defaultPort is None: if scheme == 'https': defaultPort = 443 else: defaultPort = 80 host, port = parsed[1], defaultPort if ':' in host: host, port = host.split(':') try: port = int(port) except ValueError: port = defaultPort if path == '': path = '/' return scheme, host, port, path
def __init__(self, uri, connectTimeout=15, redirectLimit=20): self.lastURI = uri RedirectAgent.__init__(self, Agent(reactor, connectTimeout=connectTimeout), redirectLimit=redirectLimit)
def __init__(self, agent, onRedirect, port=80, proxy=""): RedirectAgent.__init__(self, agent, 20) self._onRedirect = onRedirect self._port = port self._proxy = proxy
def request( self, method, url, *, params=None, headers=None, data=None, files=None, json=_NOTHING, auth=None, cookies=None, allow_redirects=True, browser_like_redirects=False, unbuffered=False, reactor=None, timeout=None, _stacklevel=2, ): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() if isinstance(url, DecodedURL): parsed_url = url.encoded_url elif isinstance(url, EncodedURL): parsed_url = url elif isinstance(url, str): # We use hyperlink in lazy mode so that users can pass arbitrary # bytes in the path and querystring. parsed_url = EncodedURL.from_text(url) else: parsed_url = EncodedURL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') headers = self._request_headers(headers, _stacklevel + 1) bodyProducer, contentType = self._request_body(data, files, json, stacklevel=_stacklevel + 1) if contentType is not None: headers.setRawHeaders(b'Content-Type', [contentType]) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if allow_redirects: if browser_like_redirects: wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) if reactor is None: from twisted.internet import reactor if timeout: delayedCall = reactor.callLater(timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not unbuffered: d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
class WellKnownResolver: """Handles well-known lookups for matrix servers.""" def __init__( self, reactor: IReactorTime, agent: IAgent, user_agent: bytes, well_known_cache: Optional[TTLCache] = None, had_well_known_cache: Optional[TTLCache] = None, ): self._reactor = reactor self._clock = Clock(reactor) if well_known_cache is None: well_known_cache = _well_known_cache if had_well_known_cache is None: had_well_known_cache = _had_valid_well_known_cache self._well_known_cache = well_known_cache self._had_valid_well_known_cache = had_well_known_cache self._well_known_agent = RedirectAgent(agent) self.user_agent = user_agent async def get_well_known(self, server_name: bytes) -> WellKnownLookupResult: """Attempt to fetch and parse a .well-known file for the given server Args: server_name: name of the server, from the requested url Returns: The result of the lookup """ try: prev_result, expiry, ttl = self._well_known_cache.get_with_expiry( server_name) now = self._clock.time() if now < expiry - WELL_KNOWN_GRACE_PERIOD_FACTOR * ttl: return WellKnownLookupResult(delegated_server=prev_result) except KeyError: prev_result = None # TODO: should we linearise so that we don't end up doing two .well-known # requests for the same server in parallel? try: with Measure(self._clock, "get_well_known"): result, cache_period = await self._fetch_well_known( server_name) # type: Optional[bytes], float except _FetchWellKnownFailure as e: if prev_result and e.temporary: # This is a temporary failure and we have a still valid cached # result, so lets return that. Hopefully the next time we ask # the remote will be back up again. return WellKnownLookupResult(delegated_server=prev_result) result = None if self._had_valid_well_known_cache.get(server_name, False): # We have recently seen a valid well-known record for this # server, so we cache the lack of well-known for a shorter time. cache_period = WELL_KNOWN_DOWN_CACHE_PERIOD else: cache_period = WELL_KNOWN_INVALID_CACHE_PERIOD # add some randomness to the TTL to avoid a stampeding herd cache_period *= random.uniform( 1 - WELL_KNOWN_DEFAULT_CACHE_PERIOD_JITTER, 1 + WELL_KNOWN_DEFAULT_CACHE_PERIOD_JITTER, ) if cache_period > 0: self._well_known_cache.set(server_name, result, cache_period) return WellKnownLookupResult(delegated_server=result) async def _fetch_well_known(self, server_name: bytes) -> Tuple[bytes, float]: """Actually fetch and parse a .well-known, without checking the cache Args: server_name: name of the server, from the requested url Raises: _FetchWellKnownFailure if we fail to lookup a result Returns: The lookup result and cache period. """ had_valid_well_known = self._had_valid_well_known_cache.get( server_name, False) # We do this in two steps to differentiate between possibly transient # errors (e.g. can't connect to host, 503 response) and more permanent # errors (such as getting a 404 response). response, body = await self._make_well_known_request( server_name, retry=had_valid_well_known) try: if response.code != 200: raise Exception("Non-200 response %s" % (response.code, )) parsed_body = json_decoder.decode(body.decode("utf-8")) logger.info("Response from .well-known: %s", parsed_body) result = parsed_body["m.server"].encode("ascii") except defer.CancelledError: # Bail if we've been cancelled raise except Exception as e: logger.info("Error parsing well-known for %s: %s", server_name, e) raise _FetchWellKnownFailure(temporary=False) cache_period = _cache_period_from_headers( response.headers, time_now=self._reactor.seconds) if cache_period is None: cache_period = WELL_KNOWN_DEFAULT_CACHE_PERIOD # add some randomness to the TTL to avoid a stampeding herd every 24 hours # after startup cache_period *= random.uniform( 1 - WELL_KNOWN_DEFAULT_CACHE_PERIOD_JITTER, 1 + WELL_KNOWN_DEFAULT_CACHE_PERIOD_JITTER, ) else: cache_period = min(cache_period, WELL_KNOWN_MAX_CACHE_PERIOD) cache_period = max(cache_period, WELL_KNOWN_MIN_CACHE_PERIOD) # We got a success, mark as such in the cache self._had_valid_well_known_cache.set( server_name, bool(result), cache_period + WELL_KNOWN_REMEMBER_DOMAIN_HAD_VALID, ) return result, cache_period async def _make_well_known_request(self, server_name: bytes, retry: bool) -> Tuple[IResponse, bytes]: """Make the well known request. This will retry the request if requested and it fails (with unable to connect or receives a 5xx error). Args: server_name: name of the server, from the requested url retry: Whether to retry the request if it fails. Raises: _FetchWellKnownFailure if we fail to lookup a result Returns: Returns the response object and body. Response may be a non-200 response. """ uri = b"https://%s/.well-known/matrix/server" % (server_name, ) uri_str = uri.decode("ascii") headers = { b"User-Agent": [self.user_agent], } i = 0 while True: i += 1 logger.info("Fetching %s", uri_str) try: response = await make_deferred_yieldable( self._well_known_agent.request(b"GET", uri, headers=Headers(headers))) body_stream = BytesIO() await make_deferred_yieldable( read_body_with_max_size(response, body_stream, WELL_KNOWN_MAX_SIZE)) body = body_stream.getvalue() if 500 <= response.code < 600: raise Exception("Non-200 response %s" % (response.code, )) return response, body except defer.CancelledError: # Bail if we've been cancelled raise except BodyExceededMaxSize: # If the well-known file was too large, do not keep attempting # to download it, but consider it a temporary error. logger.warning( "Requested .well-known file for %s is too large > %r bytes", server_name.decode("ascii"), WELL_KNOWN_MAX_SIZE, ) raise _FetchWellKnownFailure(temporary=True) except Exception as e: if not retry or i >= WELL_KNOWN_RETRY_ATTEMPTS: logger.info("Error fetching %s: %s", uri_str, e) raise _FetchWellKnownFailure(temporary=True) logger.info("Error fetching %s: %s. Retrying", uri_str, e) # Sleep briefly in the hopes that they come back up await self._clock.sleep(0.5)
def redfish_request(self, method, uri, headers=None, bodyProducer=None): """Send the redfish request and return the response.""" agent = RedirectAgent( Agent(reactor, contextFactory=WebClientContextFactory()) ) d = agent.request( method, uri, headers=headers, bodyProducer=bodyProducer ) def render_response(response): """Render the HTTPS response received.""" def eb_catch_partial(failure): # Twisted is raising PartialDownloadError because the responses # do not contain a Content-Length header. Since every response # holds the whole body we just take the result. failure.trap(PartialDownloadError) if int(failure.value.status) == HTTPStatus.OK: return failure.value.response else: return failure def cb_json_decode(data): data = data.decode("utf-8") # Only decode non-empty response bodies. if data: # occasionally invalid json is returned. provide a clear # error in that case try: return json.loads(data) except ValueError as error: raise PowerActionError( "Redfish request failed from a JSON parse error:" " %s." % error ) def cb_attach_headers(data, headers): return data, headers # Error out if the response has a status code of 400 or above. if response.code >= int(HTTPStatus.BAD_REQUEST): # if there was no trailing slash, retry with a trailing slash # because of varying requirements of BMC manufacturers if ( response.code == HTTPStatus.NOT_FOUND and uri.decode("utf-8")[-1] != "/" ): d = agent.request( method, uri + "/".encode("utf-8"), headers=headers, bodyProducer=bodyProducer, ) else: raise PowerActionError( "Redfish request failed with response status code:" " %s." % response.code ) d = readBody(response) d.addErrback(eb_catch_partial) d.addCallback(cb_json_decode) d.addCallback(cb_attach_headers, headers=response.headers) return d d.addCallback(render_response) return d
class HTTPTest(OONITest): """ A utility class for dealing with HTTP based testing. It provides methods to be overriden for dealing with HTTP based testing. The main functions to look at are processResponseBody and processResponseHeader that are invoked once the headers have been received and once the request body has been received. """ randomize_ua = True follow_redirects = False def initialize(self): logging.debug("HTTPTest.initialize") from twisted.web.client import Agent import yaml self.agent = Agent(self.reactor) if self.follow_redirects: from twisted.web.client import RedirectAgent self.agent = RedirectAgent(self.agent) self.request = {} self.response = {} def _processResponseBody(self, data): log.debug("HTTPTest._processResponseBody") self.response['body'] = data #self.result['response'] = self.response self.processResponseBody(data) def processResponseBody(self, data): """ This should handle all the response body smushing for getting it ready to be passed onto the control. @param data: The content of the body returned. """ logging.debug("HTTPTest.processResponseBody") def processResponseHeaders(self, headers): """ This should take care of dealing with the returned HTTP headers. @param headers: The content of the returned headers. """ log.debug("HTTPTest.processResponseHeaders") def processRedirect(self, location): """ Handle a redirection via a 3XX HTTP status code. @param location: the url that is being redirected to. """ logging.debug("HTTPTest.processRedirect") def experiment(self, args): log.msg("HTTPTest.experiment") url = self.local_options['url'] if 'url' not in args else args['url'] d = self.build_request(url) def finished(data): return data d.addCallback(self._cbResponse) d.addCallback(finished) return d def _cbResponse(self, response): log.debug("HTTPTest._cbResponse") self.response['headers'] = list(response.headers.getAllRawHeaders()) self.response['code'] = response.code self.response['length'] = response.length self.response['version'] = response.length if str(self.response['code']).startswith('3'): self.processRedirect(response.headers.getRawHeaders('Location')[0]) self.processResponseHeaders(self.response['headers']) #self.result['response'] = self.response finished = defer.Deferred() response.deliverBody(BodyReceiver(finished)) finished.addCallback(self._processResponseBody) def randomize_useragent(self): log.debug("HTTPTest.randomize_useragent") user_agent = random.choice(useragents) self.request['headers']['User-Agent'] = [user_agent] def build_request(self, url, method="GET", headers=None, body=None): log.debug("HTTPTest.build_request") self.request['method'] = method self.request['url'] = url self.request['headers'] = headers if headers else {} self.request['body'] = body if self.randomize_ua: self.randomize_useragent() #self.result['request'] = self.request self.result['url'] = url return self.agent.request(self.request['method'], self.request['url'], Headers(self.request['headers']), self.request['body']) def load_assets(self): log.debug("HTTPTest.load_assets") if self.local_options: return {'url': Asset(self.local_options['asset'])} else: return {}