def start(self, response): """ Initiate and return a stream for `response` body with progress callback attached. Can be called only once. :param response: Initiated response object with headers already fetched :type response: requests.models.Response :return: RawStream, output_file """ assert not self.status.time_started try: total_size = int(response.headers['Content-Length']) except (KeyError, ValueError, TypeError): total_size = None if self._output_file: if self._resume and response.status_code == PARTIAL_CONTENT: total_size = parse_content_range( response.headers.get('Content-Range'), self._resumed_from) else: self._resumed_from = 0 try: self._output_file.seek(0) self._output_file.truncate() except IOError: pass # stdout else: # TODO: Should the filename be taken from response.history[0].url? # Output file not specified. Pick a name that doesn't exist yet. fn = None if 'Content-Disposition' in response.headers: fn = filename_from_content_disposition( response.headers['Content-Disposition']) if not fn: fn = filename_from_url( url=response.url, content_type=response.headers.get('Content-Type'), ) self._output_file = open(get_unique_filename(fn), mode='a+b') self.status.started(resumed_from=self._resumed_from, total_size=total_size) stream = RawStream(msg=HTTPResponse(response), with_headers=False, with_body=True, on_body_chunk_downloaded=self.chunk_downloaded, chunk_size=1024 * 8) self._progress_reporter.output.write( 'Downloading %sto "%s"\n' % ((humanize_bytes(total_size) + ' ' if total_size is not None else ''), self._output_file.name)) self._progress_reporter.start() return stream, self._output_file
def start(self, initial_url: str, final_response: requests.Response) -> Tuple[RawStream, IO]: """ Initiate and return a stream for `response` body with progress callback attached. Can be called only once. :param initial_url: The original requested URL :param final_response: Initiated response object with headers already fetched :return: RawStream, output_file """ assert not self.status.time_started # FIXME: some servers still might sent Content-Encoding: gzip # <https://github.com/httpie/httpie/issues/423> try: total_size = int(final_response.headers['Content-Length']) except (KeyError, ValueError, TypeError): total_size = None if not self._output_file: self._output_file = self._get_output_file_from_response( initial_url=initial_url, final_response=final_response, ) else: # `--output, -o` provided if self._resume and final_response.status_code == PARTIAL_CONTENT: total_size = parse_content_range( final_response.headers.get('Content-Range'), self._resumed_from) else: self._resumed_from = 0 try: self._output_file.seek(0) self._output_file.truncate() except IOError: pass # stdout self.status.started(resumed_from=self._resumed_from, total_size=total_size) stream = RawStream(msg=HTTPResponse(final_response), with_headers=False, with_body=True, on_body_chunk_downloaded=self.chunk_downloaded, chunk_size=1024 * 8) self._progress_reporter.output.write( 'Downloading %sto "%s"\n' % ((humanize_bytes(total_size) + ' ' if total_size is not None else ''), self._output_file.name)) self._progress_reporter.start() return stream, self._output_file
def build_output_stream(args: argparse.Namespace, env: Environment, request: requests.Request, response: requests.Response, output_options: str) -> Iterable[bytes]: """Build and return a chain of iterators over the `request`-`response` exchange each of which yields `bytes` chunks. """ req_h = OUT_REQ_HEAD in output_options req_b = OUT_REQ_BODY in output_options resp_h = OUT_RESP_HEAD in output_options resp_b = OUT_RESP_BODY in output_options req = req_h or req_b resp = resp_h or resp_b output = [] stream_class, stream_kwargs = get_stream_type_and_kwargs(env=env, args=args) if req: output.append( stream_class( msg=HTTPRequest(request), with_headers=req_h, with_body=req_b, **stream_kwargs, )) if req_b and resp: # Request/Response separator. output.append([b'\n\n']) if resp: output.append( stream_class( msg=HTTPResponse(response), with_headers=resp_h, with_body=resp_b, **stream_kwargs, )) if env.stdout_isatty and resp_b: # Ensure a blank line after the response body. # For terminal output only. output.append([b'\n\n']) return chain(*output)
def build_output_stream(args, env, request, response): """Build and return a chain of iterators over the `request`-`response` exchange each of which yields `bytes` chunks. """ req_h = OUT_REQ_HEAD in args.output_options req_b = OUT_REQ_BODY in args.output_options resp_h = OUT_RESP_HEAD in args.output_options resp_b = OUT_RESP_BODY in args.output_options req = req_h or req_b resp = resp_h or resp_b output = [] Stream = get_stream_type(env, args) if req: output.append( Stream(msg=HTTPRequest(request), with_headers=req_h, with_body=req_b)) if req_b and resp: # Request/Response separator. output.append([b'\n\n']) if resp: output.append( Stream(msg=HTTPResponse(response), with_headers=resp_h, with_body=resp_b)) if env.stdout_isatty and resp_b: # Ensure a blank line after the response body. # For terminal output only. output.append([b'\n\n']) return chain(*output)
def getEventList(request): events1 = EventsList.objects.filter(facultyid=id) events2 = EventsList.objects.all() template = loader.get_template('EventList.html') result = template.render(context={"mylist": events1, "list": events2}) return HTTPResponse(result)
def homepage(request): template = loader.get_template('HomePage.html') result = template.render() return HTTPResponse(template)