def test_summary(mock_request): # Given url = 'http://www.google.com' mock_response = Response() mock_response.headers.get = MagicMock(return_value = 'html') mock_response.url = url mock_response.encoding = 'UTF-8' mock_response.consumed = False mock_response.raw = MagicMock() mock_response.iter_content = lambda s: ['<html><head><title>Test Title</head><body></body></html>'] mock_request.return_value = mock_response # When summ = Summary(url) # summ._html = '<html><head><title>Test Title</head><body></body></html>' summ.extract() # Then # mock_response.raw.close.assert_called_with() assert summ.title == 'Test Title'
def to_requests_response(self): """Returns an instance of `requests.Response` based on this response. Returns: request.Response: the generated response. """ # Make sure that body is at position 0 before returning self.body.seek(0) urllib3_response = URLLib3Rresponse( body=self.body, headers=self.headers, status=self.http_code, request_method=self.request.method, reason=self.reason, preload_content=False ) response = RequestResponse() response.request = self.request response.raw = urllib3_response response.status_code = self.http_code response.reason = self.reason response.headers = CaseInsensitiveDict(response.raw.headers) response.encoding = get_encoding_from_headers(response.headers) extract_cookies_to_jar(response.cookies, self.request, urllib3_response) if isinstance(self.request.url, six.binary_type): response.url = self.request.url.decode("utf-8") else: response.url = self.request.url return response
def responses(code, path=None, redirection=None, data=None, url=None, headers={'Content-Type': 'text/xml'}): response = Response() response.status_code = code if path is not None and redirection is None: with open(data_file(path), 'rb') as f: response.raw = BytesIO(f.read()) elif data is not None: response._content = data.encode('utf-8') if redirection is not None: temp = Response() temp.status_code = 301 if 'permanent' in redirection else 302 temp.url = path response.history.append(temp) response.url = redirection headers['location'] = path if url is None: if redirection is not None: url = redirection else: url = 'https://example.com/{}'.format(str(uuid4())) response.url = url response.headers = headers return response
def responses(code, path=None, redirection=None, data=None, url=None, headers=None): if headers is None: headers = {'Content-Type': 'text/xml'} response = Response() response.status_code = code if path is not None and redirection is None: with open(data_file(path), 'rb') as f: response.raw = BytesIO(f.read()) elif data is not None: response._content = data.encode('utf-8') if redirection is not None: temp = Response() temp.status_code = 301 if 'permanent' in redirection else 302 temp.url = path response.history.append(temp) response.url = redirection headers['location'] = path if url is None: if redirection is not None: url = redirection else: url = 'https://example.com/{}'.format(str(uuid4())) response.url = url response.headers = headers return response
def test_add_resource_with_redirects(self): ans = Index() rdr1 = Response() rdr1.url = 'http://localhost:5000/redirect1' rdr2 = Response() rdr2.url = 'http://localhost:5000/redirect2' self.response.history.append(rdr1) self.response.history.append(rdr2) self.resource.response = self.response ans.add_resource(self.resource) self.assertEqual(ans.get(self.resource.url), self.context.resolve()) self.assertEqual(ans.get(self.response.url), self.context.resolve()) self.assertEqual(ans.get(rdr1.url), self.context.resolve()) self.assertEqual(ans.get(rdr2.url), self.context.resolve())
def responses(code, path=None, redirection=None, headers={'Content-Type': 'text/xml'}): response = Response() response.status_code = code if path is not None: with open(test_file(path), 'r') as f: response.raw = BytesIO(f.read()) if redirection is not None: temp = Response() temp.status_code = 301 if 'permanent' in redirection else 302 temp.url = path response.history.append(temp) response.url = redirection response.headers = headers return response
def stub(url): response = Response() response._content = json response.url = url response.status_code = status_code response.json = json_loads_stub(json) return response
def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): resp = Response() resp.status_code = 200 resp.url = request.url try: import boto3 return self._send_boto3(boto3, resp, request) except ImportError: try: import boto return self._send_boto(boto, resp, request) except ImportError: stderrlog.info('\nError: boto3 is required for S3 channels. ' 'Please install with `conda install boto3`\n' 'Make sure to run `source deactivate` if you ' 'are in a conda environment.\n') resp.status_code = 404 return resp
def test_trace_requests_session(monkeypatch): resp = Response() resp.status_code = 200 resp.url = URL monkeypatch.setattr( 'opentracing_utils.libs._requests.__requests_http_send', assert_send_reuqest_mock(resp)) recorder = Recorder() t = BasicTracer(recorder=recorder) t.register_required_propagators() opentracing.tracer = t top_span = opentracing.tracer.start_span(operation_name='top_span') with top_span: session = requests.Session() session.headers.update({CUSTOM_HEADER: CUSTOM_HEADER_VALUE}) response = session.get(URL) assert len(recorder.spans) == 2 assert recorder.spans[0].context.trace_id == top_span.context.trace_id assert recorder.spans[0].parent_id == recorder.spans[-1].context.span_id assert recorder.spans[-1].operation_name == 'top_span' assert response.status_code == resp.status_code assert recorder.spans[0].tags[tags.HTTP_STATUS_CODE] == resp.status_code assert recorder.spans[0].tags[tags.HTTP_URL] == URL assert recorder.spans[0].tags[tags.HTTP_METHOD] == 'GET' assert recorder.spans[0].tags[tags.SPAN_KIND] == tags.SPAN_KIND_RPC_CLIENT assert recorder.spans[0].tags[tags.PEER_HOSTNAME] == 'example.com'
def response(self) -> Response: resp = Response() resp.url = self.page.url resp.status_code = 200 resp._content = self.content.encode() # TODO this can be more sophisticated return resp
def article2(self): resp = Response() resp.url = 'https://www.youtube.com/watch?v=scbrjaqM3Oc' resp.encoding = 'utf8' with open('tests/fixtures/article-2.html', 'rb') as fd: resp._content = fd.read() return resp
def mocked_file_response(path, url): # type: (str, str) -> Union[Response, HTTPException] """ Generates a mocked response from the provided file path, and represented as if coming from the specified URL. :param path: actual file path to be served in the response :param url: wanted file URL :return: generated response """ if not os.path.isfile(path): raise HTTPNotFound("Could not find mock file: [{}]".format(url)) resp = Response() ext = os.path.splitext(path)[-1] typ = CONTENT_TYPE_APP_JSON if ext == ".json" else CONTENT_TYPE_TEXT_XML if ext == ".xml" else None if not typ: return HTTPUnprocessableEntity( "Unknown Content-Type for mock file: [{}]".format(url)) resp.status_code = 200 resp.headers["Content-Type"] = typ setattr(resp, "content_type", typ) content = open(path, "rb").read() resp._content = content # noqa: W0212 class StreamReader(object): _data = [ None, content ] # should technically be split up more to respect chuck size... def read(self, chuck_size=None): # noqa: E811 return self._data.pop(-1) setattr(resp, "raw", StreamReader()) resp.url = url return resp
def build_response(request, file): response = Response() response.request = request response.url = request.url response.raw = file response.status_code = 200 return response
def test_trace_requests_extract_span_fail(monkeypatch): resp = Response() resp.status_code = 200 resp.url = URL send_request_mock = MagicMock() send_request_mock.return_value = resp extract_span_mock = MagicMock() extract_span_mock.return_value = None, None monkeypatch.setattr('opentracing_utils.libs._requests.__requests_http_send', send_request_mock) monkeypatch.setattr('opentracing_utils.libs._requests.get_span_from_kwargs', extract_span_mock) logger = MagicMock() monkeypatch.setattr('opentracing_utils.libs._requests.logger', logger) recorder = Recorder() t = BasicTracer(recorder=recorder) t.register_required_propagators() opentracing.tracer = t session = requests.Session() session.headers.update({CUSTOM_HEADER: CUSTOM_HEADER_VALUE}) response = session.get(URL) assert response.status_code == resp.status_code logger.warn.assert_called_once()
def successful_github_response(url, *_args, **_kwargs): r = Response() r.url = url r.status_code = 200 r.reason = "OK" r.headers = { "Content-Type": "application/json; charset=utf-8", } r.raw = BytesIO(b"""[ { "html_url": "https://github.com/nautobot/nautobot/releases/tag/v2.7.8", "tag_name": "v2.7.8", "prerelease": false }, { "html_url": "https://github.com/nautobot/nautobot/releases/tag/v2.6-beta1", "tag_name": "v2.6-beta1", "prerelease": true }, { "html_url": "https://github.com/nautobot/nautobot/releases/tag/v2.5.9", "tag_name": "v2.5.9", "prerelease": false } ] """) return r
def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): pathname = url_to_path(request.url) resp = Response() resp.status_code = 200 resp.url = request.url try: stats = stat(pathname) except OSError as exc: resp.status_code = 404 resp.raw = exc else: modified = formatdate(stats.st_mtime, usegmt=True) content_type = guess_type(pathname)[0] or "text/plain" resp.headers = CaseInsensitiveDict({ "Content-Type": content_type, "Content-Length": stats.st_size, "Last-Modified": modified, }) resp.raw = open(pathname, "rb") resp.close = resp.raw.close return resp
def BadResponse(body, request, status_code=None, headers=None): """ Construct a Bad HTTP response (defined in DEFAULT_BAD_RESPONSE_CODE) :param body: The body of the response :type body: ``str`` :param request: The HTTP request :type request: :class:`requests.Request` :param status_code: The return status code, defaults to DEFAULT_GOOD_STATUS_CODE if not specified :type status_code: ``int`` :param headers: Response headers, defaults to DEFAULT_RESPONSE_HEADERS if not specified :type headers: ``dict`` :rtype: :class:`requests.Response` :returns: a Response object """ response = Response() response.url = request.url response.raw = BytesIO(body) if status_code: response.status_code = status_code else: response.status_code = DEFAULT_BAD_STATUS_CODE if headers: response.headers = headers else: response.headers = DEFAULT_RESPONSE_HEADERS response.request = request response._content = body return response
def goto(self, url, **kwargs) -> Response: resp = Response() resp.url = url resp._content = kwargs["content"].encode() resp.status_code = 200 self._response = resp return resp
def test_extract_nonce(self): mock_nonce = "mock-nonce-nse" mock_response = Response() mock_response.url = "http://example.com/?" + urlencode( dict(nonce=mock_nonce)) self.assertEqual(BBCiPlayer._extract_nonce(mock_response), mock_nonce)
def build_response(request, status_code=200, headers={}, content='(none)'): """ Build a :class:`requests.Response` object on the basis of the passed parameters. """ response = Response() response.status_code = status_code response.reason = responses[status_code] response.headers = CaseInsensitiveDict(headers) # Pretend that we've already read from the socket response._content = content response.encoding = get_encoding_from_headers(response.headers) response.url = request.url response.raw = MockRawResponse() # Give the Response some context. response.request = request response.connection = MockConnection() return response
def test_trace_requests_no_parent_span(monkeypatch): resp = Response() resp.status_code = 200 resp.url = URL monkeypatch.setattr('opentracing_utils.libs._requests.__requests_http_send', assert_send_request_mock(resp)) recorder = Recorder() t = BasicTracer(recorder=recorder) t.register_required_propagators() opentracing.tracer = t session = requests.Session() session.headers.update({CUSTOM_HEADER: CUSTOM_HEADER_VALUE}) response = session.get(URL) assert len(recorder.spans) == 1 assert recorder.spans[0].tags[tags.HTTP_STATUS_CODE] == resp.status_code assert recorder.spans[0].tags[tags.HTTP_URL] == URL assert recorder.spans[0].tags[tags.HTTP_METHOD] == 'GET' assert recorder.spans[0].tags[tags.SPAN_KIND] == tags.SPAN_KIND_RPC_CLIENT assert recorder.spans[0].tags[tags.PEER_HOSTNAME] == 'example.com' assert recorder.spans[0].tags['timeout'] is None assert recorder.spans[0].tags[tags.COMPONENT] == 'requests' assert recorder.spans[0].operation_name == '{}_get'.format(OPERATION_NAME_PREFIX) assert response.status_code == resp.status_code
def test_trace_requests_no_error_tag(monkeypatch): resp = Response() resp.status_code = 400 resp.url = URL trace_requests(set_error_tag=False) monkeypatch.setattr('opentracing_utils.libs._requests.__requests_http_send', assert_send_request_mock(resp)) recorder = Recorder() t = BasicTracer(recorder=recorder) t.register_required_propagators() opentracing.tracer = t top_span = opentracing.tracer.start_span(operation_name='top_span') with top_span: response = requests.get(URL, headers={CUSTOM_HEADER: CUSTOM_HEADER_VALUE}) assert len(recorder.spans) == 2 assert recorder.spans[0].context.trace_id == top_span.context.trace_id assert recorder.spans[0].parent_id == recorder.spans[-1].context.span_id assert response.status_code == resp.status_code assert recorder.spans[0].tags[tags.HTTP_STATUS_CODE] == resp.status_code assert recorder.spans[0].tags[tags.HTTP_URL] == URL assert recorder.spans[0].tags[tags.HTTP_METHOD] == 'GET' assert recorder.spans[0].tags[tags.SPAN_KIND] == tags.SPAN_KIND_RPC_CLIENT assert recorder.spans[0].tags[tags.PEER_HOSTNAME] == 'example.com' assert recorder.spans[0].tags['timeout'] is None assert recorder.spans[0].tags[tags.COMPONENT] == 'requests' assert recorder.spans[0].operation_name == '{}_get'.format(OPERATION_NAME_PREFIX) assert 'error' not in recorder.spans[0].tags
def test_trace_requests_with_ignore_url_pattern_prune_kwargs(monkeypatch): """ In case there is a parent span already, the ignore url pattern must still be respected """ resp = Response() resp.status_code = 200 resp.url = URL trace_requests(ignore_url_patterns=[r".*{}.*".format(URL)]) monkeypatch.setattr('opentracing_utils.libs._requests.__requests_http_send', assert_send_request_mock_no_traces(resp)) @trace() def f1(): pass recorder = Recorder() t = BasicTracer(recorder=recorder) t.register_required_propagators() opentracing.tracer = t top_span = opentracing.tracer.start_span(operation_name='top_span') with top_span: response = requests.get(URL, headers={CUSTOM_HEADER: CUSTOM_HEADER_VALUE}) f1() # Top span, and @trace for f1() create spans. With the ignore pattern # in place, the call to requests.get should not add a span assert len(recorder.spans) == 2 assert recorder.spans[0].context.trace_id == top_span.context.trace_id assert recorder.spans[0].operation_name == 'f1' assert recorder.spans[1].operation_name == 'top_span' assert response.status_code == resp.status_code
def successful_github_response(url, *_args, **_kwargs): r = Response() r.url = url r.status_code = 200 r.reason = 'OK' r.headers = { 'Content-Type': 'application/json; charset=utf-8', } r.raw = BytesIO(b'''[ { "html_url": "https://github.com/netbox-community/netbox/releases/tag/v2.7.8", "tag_name": "v2.7.8", "prerelease": false }, { "html_url": "https://github.com/netbox-community/netbox/releases/tag/v2.6-beta1", "tag_name": "v2.6-beta1", "prerelease": true }, { "html_url": "https://github.com/netbox-community/netbox/releases/tag/v2.5.9", "tag_name": "v2.5.9", "prerelease": false } ] ''') return r
def test_process_response_imgs(self): host_url = 'http://www.davidcmoss.co.uk/' content = "<img src='test.png' />" response = Response() response.url = host_url response._content = content page = process_response(host_url, response) assert host_url + 'test.png' in page.images
def test_process_response_ex_links(self): host_url = 'http://www.davidcmoss.co.uk/' content = "<a href='http://google.com' />" response = Response() response.url = host_url response._content = content page = process_response(host_url, response) assert 'http://google.com' in page.ex_links
def test_process_response_links(self): host_url = 'http://www.davidcmoss.co.uk/' content = "<a href='/test' />" response = Response() response.url = host_url response._content = content page = process_response(host_url, response) assert host_url + 'test' in page.links
def article(self): resp = Response() resp.url = 'http://www.pariszigzag.fr/paris-insolite-secret/'\ 'les-plus-belles-boulangeries-de-paris' resp.encoding = 'utf8' with open('tests/fixtures/article.html', 'rb') as fd: resp._content = fd.read() return resp
def responses(code, path=None, redirection=None, headers={'Content-Type': 'text/xml'}): response = Response() response.status_code = code if path is not None: with open(data_file(path), 'r') as f: response.raw = BytesIO(f.read()) if redirection is not None: temp = Response() temp.status_code = 301 if 'permanent' in redirection else 302 temp.url = path response.history.append(temp) response.url = redirection response.headers = headers return response
def get_response(self, scheme='http:'): resp = Response() resp.url = scheme + self.response_url resp.status_code = 200 resp.encoding = 'utf8' resp.headers['content-type'] = 'text/html' with open('tests/fixtures/article.html') as fd: setattr(resp, '_content', fd.read()) return resp
def test_login_failed_no_bbdata(self, post_mock): response = Response() response.url = "http://tapochek.net/not_login" response.cookies = {} post_mock.return_value = response with self.assertRaises(TapochekLoginFailedException) as e: self.tracker.login(self.helper.real_login, self.helper.real_password) self.assertEqual(2, e.exception.code) self.assertEqual("Failed to retrieve cookie", e.exception.message)
def test_check_download(self): plugin = RutorOrgPlugin() plugin.init(self.tracker_settings) response = Response() response.status_code = 200 response.headers["Content-Type"] = "application/bittorrent" self.assertEqual(plugin.check_download(response), Status.Ok) response = Response() response.status_code = 200 response.url = "http://rutor.info/d.php" self.assertEqual(plugin.check_download(response), Status.NotFound) response = Response() response.status_code = 500 response.url = "http://rutor.info/d.php" self.assertEqual(plugin.check_download(response), Status.Error)
def response2(self): resp = Response() resp.status_code = 200 resp.url = 'https://www.youtube.com/watch?v=scbrjaqM3Oc' resp.encoding = 'utf8' resp.headers['content-type'] = 'text/html' with open('tests/fixtures/article-2.html') as fd: setattr(resp, '_content', fd.read()) return resp
def test_AbsoluteUrl(): base = "http://httpbin.org/" resp = Response() resp.url = "http://httpbin.org/" resp.status_code = 200 proc = AbsoluteUrl() assert proc("foo/bar", response=resp) == (f"{base}foo/bar", {}) assert proc(["foo/bar", "gaz/har"], response=resp) == ([f"{base}foo/bar", f"{base}gaz/har"], {})
def request_mookup(*args, **kwargs): method = kwargs["method"] url = kwargs["url"] if not url.startswith(SERVER + "/"): raise ValueError("URL [{}] does not start with [{}/].".format( url, SERVER)) parts = url[len(SERVER) + 1:].split("?") url = parts[0] params = parts[1] if len(parts) > 1 else None response = Response() response.url = kwargs["url"] response_file = os.path.join(RESPONSE_ROOT, url, method) try: with open(response_file) as f: data = {"responses": {}, "__builtins__": {}} exec(f.read(), data) data = data["responses"][params] if type(data) is dict: if "status_code" in data: response.status_code = data.pop("status_code") else: response.status_code = 200 # Extend the links with the server for item in [None, "owner", "project", "workspace"]: cur_dict = data if item is None else data.get(item, {}) if "links" in cur_dict: for link in cur_dict["links"].values(): try: link["href"] = "{}/{}".format( SERVER, link["href"]) except: pass if "next" in data: data["next"] = "{}/{}".format(SERVER, data["next"]) response.encoding = "utf-8" response._content = bytes(json.dumps(data), response.encoding) else: response.status_code = 200 response._content = data except FileNotFoundError: response.encoding = "utf-8" response._content = b"{}" response.status_code = 404 # Not found response.reason = "No stub defined [{}]".format(response_file) except KeyError: response.encoding = "utf-8" response._content = b"{}" response.status_code = 404 # Not found response.reason = "No stub defined for param [{}]".format(params) except Exception as e: raise e return response
def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): resp = Response() resp.status_code = 200 resp.url = request.url try: import boto except ImportError: stderrlog.info('\nError: boto is required for S3 channels. ' 'Please install it with `conda install boto`\n' 'Make sure to run `source deactivate` if you ' 'are in a conda environment.\n') resp.status_code = 404 return resp conn = boto.connect_s3() bucket_name, key_string = url_to_s3_info(request.url) # Get the bucket without validation that it exists and that we have # permissions to list its contents. bucket = conn.get_bucket(bucket_name, validate=False) try: key = bucket.get_key(key_string) except boto.exception.S3ResponseError as exc: # This exception will occur if the bucket does not exist or if the # user does not have permission to list its contents. resp.status_code = 404 resp.raw = exc return resp if key and key.exists: modified = key.last_modified content_type = key.content_type or "text/plain" resp.headers = CaseInsensitiveDict({ "Content-Type": content_type, "Content-Length": key.size, "Last-Modified": modified, }) _, self._temp_file = mkstemp() key.get_contents_to_filename(self._temp_file) f = open(self._temp_file, 'rb') resp.raw = f resp.close = resp.raw.close else: resp.status_code = 404 return resp
def responses(code, path=None, redirection=None, data=None, headers={'Content-Type': 'text/xml'}): response = Response() response.status_code = code if path is not None and redirection is None: with open(data_file(path), 'rb') as f: response.raw = BytesIO(f.read()) elif data is not None: response._content = data.encode('utf-8') if redirection is not None: temp = Response() temp.status_code = 301 if 'permanent' in redirection else 302 temp.url = path response.history.append(temp) response.url = redirection headers['location'] = path response.headers = headers return response
def build_response(self, req, resp): response = Response() response.status_code = resp.status_code response.headers = CaseInsensitiveDict((k, v) for k, v in resp.items()) response.encoding = get_encoding_from_headers(response.headers) response.raw = StringIO(resp.content) response.reason = None if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url # Convert from django's SimpleCookie to request's CookieJar cookiejar_from_dict(resp.cookies, response.cookies) # context response.request = req response.connection = self response = dispatch_hook('response', req.hooks, response) return response
def build_response(request, data, code, encoding): response = Response() response.encoding = encoding # Fill in some useful fields. raw = StringIO() raw.write(data) raw.seek(0) response.raw = raw response.url = request.url response.request = request response.status_code = code # Run the response hook. response = dispatch_hook('response', request.hooks, response) return response
def build_response(request, data, code, encoding): '''Builds a response object from the data returned by ftplib, using the specified encoding.''' response = Response() response.encoding = encoding # Fill in some useful fields. response.raw = data response.url = request.url response.request = request response.status_code = code.split()[0] # Make sure to seek the file-like raw object back to the start. response.raw.seek(0) # Run the response hook. response = dispatch_hook('response', request.hooks, response) return response
def fake_send(self, request): g = parser.match(request.url).groups() response = Response() response.url = g[0] response.reason = g[1] response.status_code = int(g[2]) if g[3] is not None: wait = float(g[3]) else: wait = defaultSendTime - 0.001 # Epsilon, since sleep is defined as "will wait at *least* as long as..." sleep(wait) if response.status_code >= 600: # Special case for testing exception handling raise Exception('[%d] %s' % ( response.status_code, response.reason )) return response
def build_response(request, data, code, encoding): '''Builds a response object from the data returned by ftplib, using the specified encoding.''' response = Response() response.encoding = encoding # Fill in some useful fields. response.raw = data response.url = request.url response.request = request response.status_code = int(code.split()[0]) if hasattr(data, "content_len"): response.headers['Content-Length'] = str(data.content_len) # Make sure to seek the file-like raw object back to the start. response.raw.seek(0) # Run the response hook. response = dispatch_hook('response', request.hooks, response) return response
def build_response(request, data, code, encoding): '''Builds a response object from the data returned by ftplib, using the specified encoding.''' response = Response() response.encoding = encoding # Fill in some useful fields. raw = StringIO() raw.write(data) raw.seek(0) response.raw = raw response.url = request.url response.request = request response.status_code = code # Run the response hook. response = dispatch_hook('response', request.hooks, response) return response
def send(self, request, stream=False, timeout=None, **kwargs): parsed_url = urllib.parse.urlparse(request.url) file_path = parsed_url.path # Strip the leading slash, if present. if file_path.startswith('/'): file_path = file_path[1:] try: self.conn = self.get_connection(parsed_url.netloc, timeout) except ConnectionRefusedError as exc: # Wrap this in a requests exception. # in requests 2.2.1, ConnectionError does not take keyword args raise requests.exceptions.ConnectionError() from exc resp = Response() resp.url = request.url try: size = self.conn.size(file_path) except error_perm: resp.status_code = 404 return resp if stream: # We have to do this in a background thread, since ftplib's and requests' approaches are the opposite: # ftplib is callback based, and requests needs to expose an iterable. (Push vs pull) # When the queue size is reached, puts will block. This provides some backpressure. queue = Queue(maxsize=100) done_sentinel = object() def handle_transfer(): # Download all the chunks into a queue, then place a sentinel object into it to signal completion. self.conn.retrbinary('RETR ' + file_path, queue.put) queue.put(done_sentinel) Thread(target=handle_transfer).start() def stream(amt=8192, decode_content=False): """A generator, yielding chunks from the queue.""" # We maintain a buffer so the consumer gets exactly the number of bytes requested. buffer = bytearray() while True: data = queue.get() if data is not done_sentinel: buffer.extend(data) if len(buffer) >= amt: result = buffer[:amt] buffer = buffer[amt:] yield result else: if buffer: yield buffer return Raw = namedtuple('raw', 'stream') raw = Raw(stream) resp.status_code = 200 resp.raw = raw resp.headers['content-length'] = size resp.close = lambda: self.conn.close() return resp else: # Not relevant for Ubuntu Make. raise NotImplementedError
def resolve_url(url, *args, **kwargs): response = Response() response.status_code = 200 response.url = url return response
def send(self, request, **kwargs): """ Wraps a file, described in request, in a Response object. :param request: The PreparedRequest` being "sent". :returns: a Response object containing the file """ # Check that the method makes sense. Only support GET if request.method not in ("GET", "HEAD"): raise ValueError("Invalid request method %s" % request.method) # Parse the URL url_parts = urlparse(request.url) # Make the Windows URLs slightly nicer if is_win32 and url_parts.netloc.endswith(":"): url_parts = url_parts._replace(path="/" + url_parts.netloc + url_parts.path, netloc='') # Reject URLs with a hostname component if url_parts.netloc and url_parts.netloc not in ("localhost", ".", "..", "-"): raise ValueError("file: URLs with hostname components are not permitted") # If the path is relative update it to be absolute if url_parts.netloc in (".", ".."): pwd = os.path.abspath(url_parts.netloc).replace(os.sep, "/") + "/" if is_win32: # prefix the path with a / in Windows pwd = "/" + pwd url_parts = url_parts._replace(path=urljoin(pwd, url_parts.path.lstrip("/"))) resp = Response() resp.url = request.url # Open the file, translate certain errors into HTTP responses # Use urllib's unquote to translate percent escapes into whatever # they actually need to be try: # If the netloc is - then read from stdin if url_parts.netloc == "-": if is_py3: resp.raw = sys.stdin.buffer else: resp.raw = sys.stdin # make a fake response URL, the current directory resp.url = "file://" + os.path.abspath(".").replace(os.sep, "/") + "/" else: # Split the path on / (the URL directory separator) and decode any # % escapes in the parts path_parts = [unquote(p) for p in url_parts.path.split('/')] # Strip out the leading empty parts created from the leading /'s while path_parts and not path_parts[0]: path_parts.pop(0) # If os.sep is in any of the parts, someone fed us some shenanigans. # Treat is like a missing file. if any(os.sep in p for p in path_parts): raise IOError(errno.ENOENT, os.strerror(errno.ENOENT)) # Look for a drive component. If one is present, store it separately # so that a directory separator can correctly be added to the real # path, and remove any empty path parts between the drive and the path. # Assume that a part ending with : or | (legacy) is a drive. if path_parts and (path_parts[0].endswith('|') or path_parts[0].endswith(':')): path_drive = path_parts.pop(0) if path_drive.endswith('|'): path_drive = path_drive[:-1] + ':' while path_parts and not path_parts[0]: path_parts.pop(0) else: path_drive = '' # Try to put the path back together # Join the drive back in, and stick os.sep in front of the path to # make it absolute. path = path_drive + os.sep + os.path.join(*path_parts) # Check if the drive assumptions above were correct. If path_drive # is set, and os.path.splitdrive does not return a drive, it wasn't # reall a drive. Put the path together again treating path_drive # as a normal path component. if path_drive and not os.path.splitdrive(path): path = os.sep + os.path.join(path_drive, *path_parts) # Use io.open since we need to add a release_conn method, and # methods can't be added to file objects in python 2. resp.raw = io.open(path, "rb") resp.raw.release_conn = resp.raw.close except IOError as e: if e.errno == errno.EACCES: resp.status_code = codes.forbidden elif e.errno == errno.ENOENT: resp.status_code = codes.not_found else: resp.status_code = codes.bad_request # Wrap the error message in a file-like object # The error message will be localized, try to convert the string # representation of the exception into a byte stream resp_str = str(e).encode(locale.getpreferredencoding(False)) resp.raw = BytesIO(resp_str) resp.headers['Content-Length'] = len(resp_str) # Add release_conn to the BytesIO object resp.raw.release_conn = resp.raw.close else: resp.status_code = codes.ok # If it's a regular file, set the Content-Length resp_stat = os.fstat(resp.raw.fileno()) if stat.S_ISREG(resp_stat.st_mode): resp.headers['Content-Length'] = resp_stat.st_size return resp
def send( self, request, stream=False, verify=None, cert=None, proxies=None, timeout=None ): """issue request""" fname = url_unquote(request.url[len('file://'):]) if not fname: raise InvalidURL('missing file name') if '/' not in fname: raise InvalidURL( 'hostname without filename (perhaps missing a /?)' ) host, fname = fname.split('/', 1) fname = self.resolve_host(host, fname) response = Response() response.url = request.url response.headers['Date'] = formatdate(timeval=None, localtime=True) try: if request.method in ('GET', 'HEAD'): statdata = stat(fname) etag = '"%s/%s/%s' \ % (statdata.st_dev, statdata.st_ino, statdata.st_mtime) if S_ISLNK(statdata.st_mode): # handle relative symlinks! target_file = abspath(readlink(fname)) response.status_code = 302 response.headers['Status'] = '302 Found' response.headers['Location'] = \ url_quote('file://' + target_file) elif S_ISDIR(statdata.st_mode): response.status_code = 200 response.headers['Status'] = '200 Ok' body = \ """<html><head><title>%s</title></head><body><ul>""" \ % fname for subfname in sorted(listdir(fname)): body += '<li><a href="file://' + \ url_quote(subfname) + '">' + \ html_escape(fname) + '</a></li>' body += '</body></html>' response.headers['ETag'] = 'W/' + etag response.raw = StringIO(body) elif S_ISREG(statdata.st_mode): response.status_code = 200 response.headers['Content-Length'] = statdata.st_size response.headers['Last-Modified'] = formatdate( timeval=statdata.st_mtime, localtime=True ) mt, enc = guess_mime_type(request.url, strict=False) if mt is None: mt = 'application/octet-stream' if enc is not None: response.headers['Content-Encoding'] = enc response.headers['Content-Type'] = mt response.headers['ETag'] = etag if request.method == 'GET': response.raw = open(fname, 'r') else: response.status_code = 500 response.headers['Status'] = '500 Internal Server Error' elif request.method == 'PUT': open(fname, 'w').write(request.body) # FIXME: Is this right? response.status_code = 200 response.headers['Status'] = '200 Ok' elif request.method == 'POST': if exists(fname): # FIXME: Is this right? response.status_code = 409 response.headers['Status'] = '409 Conflict' else: open(fname, 'w').write(request.body) elif request.method == 'DELETE': unlink(fname) response.status_code = 200 response.headers['Status'] = '200 Ok' else: response.status_code = 405 response.headers['Status'] = '405 Method Not Allowed' except OSError as e: if e.errno == errno.ENOENT: if request.method == 'DELETE': response.status_code = 410 response.headers['Status'] = '410 Gone' else: response.status_code = 404 response.headers['Status'] = '404 Not Found' elif e.errno == errno.EISDIR: response.status_code = 405 response.headers['Status'] = '405 Method Not Allowed' response.raw = StringIO('Cannot %r a directory...' % request.method) elif e.errno == errno.EACCES: response.status_code = 403 response.headers['Status'] = '403 Forbidden' else: response.status_code = 500 response.headers['Status'] = '500 Internal Server Error' response.raw = StringIO('OSError: ' + strerror(e.errno)) except Exception: response.status_code = 500 response.headers['Status'] = '500 Internal Server Error' response.raw = StringIO(format_exc()) # context response.request = request response.connection = self # hooks response = dispatch_hook('response', request.hooks, response) # streaming if not stream: response.content return response
def send(self, request, stream=False, verify=None, cert=None, proxies=None, timeout=None): """issue request""" data = url_unquote(request.url[len('data:'):]) if ',' not in data: raise InvalidURL('data URL missing comma') mime, content = data.split(',', 1) content = content.strip() base64 = False charset = None while ';' in mime: mime, encoding_spec = mime.rsplit(';', 1) encoding_spec = encoding_spec.strip() if encoding_spec == 'base64': base64 = True elif not encoding_spec.startswith('charset='): raise InvalidURL( 'unrecognized encoding parameter: %r' % encoding_spec ) else: charset = encoding_spec[len('charset='):] try: if base64: content = a2b_base64(content) content_type = mime.strip() if charset: content_type += "; charset=" + charset response = Response() response.url = request.url response.headers['Date'] = formatdate(timeval=None, localtime=True) if request.method in ('GET', 'HEAD'): response.status_code = 200 response.headers['Content-Length'] = len(content) response.headers['Last-Modified'] = formatdate() response.headers['Content-Type'] = content_type if charset: response.encoding = charset response.raw = StringIO(str(content)) else: response.status_code = 405 response.headers['Status'] = '405 Method Not Allowed' except Exception: response.status_code = 500 response.headers['Status'] = '500 Internal Server Error' response.raw = StringIO(format_exc()) # context response.request = request response.connection = self # hooks response = dispatch_hook('response', request.hooks, response) # streaming if not stream: response.content return response