def _preparedRequest(self, url): self.timer.start() headers = {} if self.url: headers['Referer'] = self.url if self._req_content_type: headers['Content-Type'] = self._req_content_type headers['Connection'] = 'close' headers['Host'] = urlparse.urlparse(url).netloc headers['User-Agent'] = 'Python-urllib/2.4' headers.update(self._req_headers) extra_environ = {} if self.handleErrors: extra_environ['paste.throw_errors'] = None headers['X-zope-handle-errors'] = 'True' else: extra_environ['wsgi.handleErrors'] = False extra_environ['paste.throw_errors'] = True extra_environ['x-wsgiorg.throw_errors'] = True headers.pop('X-zope-handle-errors', None) kwargs = {'headers': sorted(headers.items()), 'extra_environ': extra_environ, 'expect_errors': True} yield kwargs self._changed() self.timer.stop()
def _preparedRequest(self, url): self.timer.start() headers = {} if self._req_referrer is not None: headers['Referer'] = self._req_referrer if self._req_content_type: headers['Content-Type'] = self._req_content_type headers['Connection'] = 'close' headers['Host'] = urlparse.urlparse(url).netloc headers['User-Agent'] = 'Python-urllib/2.4' headers.update(self._req_headers) extra_environ = {} if self.handleErrors: extra_environ['paste.throw_errors'] = None headers['X-zope-handle-errors'] = 'True' else: extra_environ['wsgi.handleErrors'] = False extra_environ['paste.throw_errors'] = True extra_environ['x-wsgiorg.throw_errors'] = True headers.pop('X-zope-handle-errors', None) kwargs = {'headers': sorted(headers.items()), 'extra_environ': extra_environ, 'expect_errors': True} yield kwargs self._req_content_type = None self.timer.stop()
def _verifyPath(self, path, ck): self_path = urlparse.urlparse(self.url)[2] if not self_path.startswith(path): raise ValueError('current url must start with path, if given') if ck is not None and ck.path != path and ck.path.startswith(path): raise ValueError( 'cannot set a cookie that will be hidden by another ' 'cookie for this url (%s)' % (self.url,))
def _submit(self, form, name=None, index=None, coord=None, **args): # A reimplementation of webtest.forms.Form.submit() to allow to insert # coords into the request fields = form.submit_fields(name, index=index) if coord is not None: fields.extend([('%s.x' % name, coord[0]), ('%s.y' % name, coord[1])]) url = self._absoluteUrl(form.action) if form.method.upper() != "GET": args.setdefault("content_type", form.enctype) else: parsed = urlparse.urlparse(url)._replace(query='', fragment='') url = urlparse.urlunparse(parsed) return form.response.goto(url, method=form.method, params=fields, **args)
def _preparedRequest(self, url, no_visit=False): """ Monkey patched for openNoVisit() """ from zope.testbrowser._compat import urlparse self.timer.start() headers = {} if self.url: headers['Referer'] = self.url if self._req_content_type: headers['Content-Type'] = self._req_content_type headers['Connection'] = 'close' headers['Host'] = urlparse.urlparse(url).netloc headers['User-Agent'] = 'Python-urllib/2.4' headers.update(self._req_headers) extra_environ = {} if self.handleErrors: extra_environ['paste.throw_errors'] = None headers['X-zope-handle-errors'] = 'True' else: extra_environ['wsgi.handleErrors'] = False extra_environ['paste.throw_errors'] = True extra_environ['x-wsgiorg.throw_errors'] = True headers.pop('X-zope-handle-errors', None) kwargs = { 'headers': sorted(headers.items()), 'extra_environ': extra_environ, 'expect_errors': True } yield kwargs if not no_visit: self._changed() self.timer.stop()
def _assertAllowed(self, url): parsed = urlparse.urlparse(url) if self.restricted: # We are in restricted mode, check host part only host = parsed.netloc.partition(':')[0] if host in _allowed: return for dom in _allowed_2nd_level: if host.endswith('.%s' % dom): return raise HostNotAllowed(url) else: # Unrestricted mode: retrieve robots.txt and check against it robotsurl = urlparse.urlunsplit( (parsed.scheme, parsed.netloc, '/robots.txt', '', '')) rp = urllib_robotparser.RobotFileParser() rp.set_url(robotsurl) rp.read() if not rp.can_fetch("*", url): msg = "request disallowed by robots.txt" raise RobotExclusionError(url, 403, msg, [], None)
def _assertAllowed(self, url): parsed = urlparse.urlparse(url) if self.restricted: # We are in restricted mode, check host part only host = parsed.netloc.partition(':')[0] if host in _allowed: return for dom in _allowed_2nd_level: if host.endswith('.%s' % dom): return raise HostNotAllowed(url) else: # Unrestricted mode: retrieve robots.txt and check against it robotsurl = urlparse.urlunsplit((parsed.scheme, parsed.netloc, '/robots.txt', '', '')) rp = urllib_robotparser.RobotFileParser() rp.set_url(robotsurl) rp.read() if not rp.can_fetch("*", url): msg = "request disallowed by robots.txt" raise RobotExclusionError(url, 403, msg, [], None)