def frontend_development(request): """ View for the main frontend page in development mode: Proxy a request to the React development server at `localhost:<REACT_PORT>`. :param request: :return: """ start_time = default_timer() logger.info("frontend_dev PROXY {}".format(request.path)) upstream_url = "http://{}:{}{}".format(settings.REACT_HOST, settings.REACT_PORT, request.path) method = request.META["REQUEST_METHOD"].lower() response = getattr(requests, method)(upstream_url, stream=True) content_type = response.headers.get("Content-Type") logger.info("frontend_dev PROXY_RECV ({:.2f}s): {}".format( default_timer() - start_time, request.path)) if content_type.startswith("text/html"): return http.HttpResponse( content=engines["django"].from_string( response.text).render(request=request), status=response.status_code, reason=response.reason, ) else: return http.StreamingHttpResponse( streaming_content=response.iter_content(2**12), content_type=content_type, status=response.status_code, reason=response.reason, )
def streaming_response(src): response = http.StreamingHttpResponse(src, content_type='text/event-stream') # Set Content-Encoding so GzipMiddleware is not going to gzip this # response (because it's a stream) response['Content-Encoding'] = 'identity' return response
def object_download(request, container_name, object_path): try: obj = api.swift.swift_get_object(request, container_name, object_path, resp_chunk_size=swift.CHUNK_SIZE) except Exception: redirect = reverse("horizon:project:containers:index") exceptions.handle(request, _("Unable to retrieve object."), redirect=redirect) # Add the original file extension back on if it wasn't preserved in the # name given to the object. filename = object_path.rsplit(swift.FOLDER_DELIMITER)[-1] if not os.path.splitext(obj.name)[1] and obj.orig_name: name, ext = os.path.splitext(obj.orig_name) filename = "%s%s" % (filename, ext) # NOTE(tsufiev): StreamingHttpResponse class had been introduced in # Django 1.5 specifically for the purpose streaming and/or transferring # large files, it's less fragile than standard HttpResponse and should be # used when available. if django.VERSION >= (1, 5): response = http.StreamingHttpResponse(obj.data) else: response = http.HttpResponse(obj.data) safe_name = filename.replace(",", "").encode('utf-8') response['Content-Disposition'] = 'attachment; filename="%s"' % safe_name response['Content-Type'] = 'application/octet-stream' response['Content-Length'] = obj.bytes return response
def test_process_response_uses_callback_streaming(self): request = self.factory.get("/dummy_url", {"callback": "xyz"}) response = http.StreamingHttpResponse("blah") middleware_response = self.middleware.process_response( request, response) self.assertEqual(b''.join(middleware_response.streaming_content), b"typeof xyz === 'function' && xyz(blah)")
def export_js_localized_strings(request): """ Export all unlicalized strings in files. """ response = http.HttpResponseNotModified() try: # write to a file file = tempfile.TemporaryFile() # get strings strings = utils.all_unlocalized_js_strings(True) for s in strings: file.write('"%s": "",\n' % s) file.seek(0) filename = time.strftime("strings_%Y%m%d_%H%M%S.js", time.localtime()) response = http.StreamingHttpResponse(utils.file_iterator(file)) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="%s"' % filename except Exception, e: message = "Can't export game data: %s" % e logger.log_tracemsg(message) file.close() return render(request, 'fail.html', {"message": message})
def RenderBinaryDownload(request): """Basic handler to allow downloads of aff4:/config/executables files.""" if (LEGACY_RENDERERS_AUTH_MANAGER and not LEGACY_RENDERERS_AUTH_MANAGER.CheckPermissions( request.user, "legacy_renderers")): return AccessDenied("User is not allowed to use legacy renderers.") path, filename = request.path.split("/", 2)[-1].rsplit("/", 1) if not path or not filename: return AccessDenied("Error: Invalid path.") request.REQ = request.REQUEST def Generator(): with aff4.FACTORY.Open(aff4_path, aff4_type="GRRSignedBlob", token=BuildToken(request, 60)) as fd: while True: data = fd.Read(1000000) if not data: break yield data base_path = rdfvalue.RDFURN("aff4:/config/executables") aff4_path = base_path.Add(path).Add(filename) if not aff4_path.RelativeName(base_path): # Check for path traversals. return AccessDenied("Error: Invalid path.") filename = aff4_path.Basename() response = http.StreamingHttpResponse(streaming_content=Generator(), content_type="binary/octet-stream") response["Content-Disposition"] = ("attachment; filename=%s" % filename) return response
def object_download(request, container_name, object_path): try: obj = api.swift.swift_get_object(request, container_name, object_path, resp_chunk_size=swift.CHUNK_SIZE) except Exception: redirect = reverse("horizon:project:containers:index") exceptions.handle(request, _("Unable to retrieve object."), redirect=redirect) # Add the original file extension back on if it wasn't preserved in the # name given to the object. filename = object_path.rsplit(swift.FOLDER_DELIMITER)[-1] if not os.path.splitext(obj.name)[1] and obj.orig_name: name, ext = os.path.splitext(obj.orig_name) filename = "%s%s" % (filename, ext) response = http.StreamingHttpResponse(obj.data) safe_name = filename.replace(",", "") if six.PY2: safe_name = safe_name.encode('utf-8') response['Content-Disposition'] = 'attachment; filename="%s"' % safe_name response['Content-Type'] = 'application/octet-stream' response['Content-Length'] = obj.bytes return response
def download_merged_pdf(obj, files): """ Returns a HTTPResponse that contains all PDF files merged into a single PDF file. """ warnings.simplefilter('ignore', DeprecationWarning) filename = u"%s_%s_%s_files.pdf" % (obj.type, obj.reference, obj.revision) output = StreamedPdfFileWriter() # generate a summary ctx = { "obj" : obj, "files" : files, "state_histories" : get_state_histories(obj), } template = get_template("summary.xhtml") html = template.render(Context(ctx)) result = StringIO.StringIO() pdf = pisa.pisaDocument(StringIO.StringIO(html.encode("utf-16")), result) result.seek(0) inp = PdfFileReader(result) for page in inp.pages: output.addPage(page) # append all pdfs for pdf_file in files: inp = PdfFileReader(file(pdf_file.file.path, "rb")) for page in inp.pages: output.addPage(page) response = http.StreamingHttpResponse(output, content_type='application/pdf') response['Content-Disposition'] = 'attachment; filename="%s"' % filename warnings.simplefilter('default', DeprecationWarning) return response
def output_json(out, code=200): if code != 200: out['code'] = code indent = None if settings.DEBUG: if isinstance(out, dict): out['debug_db_queries'] = connection.queries indent = 4 json_dumps_params = {'ensure_ascii': False, 'indent': indent} if type(out) is dict: response = http.JsonResponse( out, status=code, encoder=GEOS_JSONEncoder, json_dumps_params=json_dumps_params) else: encoder = GEOS_JSONEncoder(**json_dumps_params) content = encoder.iterencode(out) response = http.StreamingHttpResponse( streaming_content=content, content_type='application/json', status=code) response['Cache-Control'] = 'max-age=2419200' # 4 weeks response['Access-Control-Allow-Origin'] = '*' return response
def index_dev(request, upstream='http://my_app_frontend:3000'): """ Proxy HTTP requests to the frontend dev server in development. The implementation is very basic e.g. it doesn't handle HTTP headers. TODO this method does not work with SSL right now, so I'm having to use waitress to serve the fully-built react-frontend. """ upstream_url = upstream + request.path method = request.META['REQUEST_METHOD'].lower() response = getattr(requests, method)(upstream_url, stream=True) content_type = response.headers.get('Content-Type') if request.META.get('HTTP_UPGRADE', '').lower() == 'websocket': return http.HttpResponse( content="WebSocket connections aren't supported", status=501, reason='Not Implemented') elif content_type == 'text/html; charset=UTF-8': return http.HttpResponse( content=engines['django'].from_string(response.text).render(), status=response.status_code, reason=response.reason, ) else: return http.StreamingHttpResponse( streaming_content=response.iter_content(2**12), content_type=content_type, status=response.status_code, reason=response.reason, )
def catchall_dev(request, context=None): """Proxy HTTP requests to the frontend dev server in development. The implementation is very basic e.g. it doesn't handle HTTP headers. """ # URL to the development webpack server, used to redirect front-end requests. UPSTREAM = "http://localhost:3000" # Redirect websocket requests directly to the webpack server. if request.META.get("HTTP_UPGRADE", "").lower() == "websocket": return http.HttpResponseRedirect(UPSTREAM + request.path) upstream_url = UPSTREAM + request.path method = request.META["REQUEST_METHOD"].lower() response = getattr(requests, method)(upstream_url, stream=True) content_type = response.headers.get("Content-Type") if content_type == "text/html; charset=UTF-8": return http.HttpResponse( content=(engines["jinja2"].from_string(response.text).render( request=request, context=context)), status=response.status_code, reason=response.reason, ) return http.StreamingHttpResponse( streaming_content=response.iter_content(2**12), content_type=content_type, status=response.status_code, reason=response.reason, )
def catchall_dev(request, upstream='http://localhost:3000'): """ Proxy HTTP requests to the frontend dev server in development. The implementation is very basic e.g. it doesn't handle HTTP headers. """ upstream_url = upstream + request.path method = request.META['REQUEST_METHOD'].lower() response = getattr(requests, method)(upstream_url, stream=True) content_type = response.headers.get('Content-Type') if request.META.get('HTTP_UPGRADE', '').lower() == 'websocket': return http.HttpResponse( content="WebSocket connections aren't supported", status=501, reason="Not Implemented") elif content_type == 'text/html; charset=UTF-8': return http.HttpResponse( content=engines['django'].from_string(response.text).render(), status=response.status_code, reason=response.reason, ) else: return http.StreamingHttpResponse( streaming_content=response.iter_content(2**12), content_type=content_type, status=response.status_code, reason=response.reason, )
def export_file(request): """ Export game world files. """ def file_iterator(file, chunk_size=512): while True: c = file.read(chunk_size) if c: yield c else: # remove temp file file.close() break response = http.HttpResponseNotModified() # get data's zip try: zipfile = tempfile.TemporaryFile() exporter.export_zip_all(zipfile) zipfile.seek(0) filename = time.strftime("worlddata_%Y%m%d_%H%M%S.zip", time.localtime()) response = http.StreamingHttpResponse(file_iterator(zipfile)) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="%s"' % filename except Exception, e: zipfile.close() message = "Can't export world: %s" % e logger.log_tracemsg(message) return render(request, 'fail.html', {"message": message})
def mjpeg(request, source='vid1'): path = settings.BASE_DIR + '/static/' + source + '/' if not isdir(path): raise http.Http404("Stream does not exist.") if not isfile(join(path, "settings.txt")): raise http.Http404("Not a stream folder") with open(settings.BASE_DIR + '/static/' + source + '/settings.txt', "r") as f: option = f.readline() if option.startswith("static"): filelist = [ f for f in listdir(path) if (isfile(join(path, f)) and not f.endswith('.txt')) ] if len(filelist) == 0: raise http.Http404("No files in folder") mjpegstream = creategenerator_mjpegstream(filelist, source) elif option.startswith("dynamic"): mjpegstream = creategenerator_mjpeglivestream(path, source) else: return http.HttpResponseServerError() response = http.StreamingHttpResponse( mjpegstream, content_type='multipart/x-mixed-replace;boundary=myboundary') return response
def export_game_data(request): """ Export game world files. """ response = http.HttpResponseNotModified() file_type = request.GET.get("file_type", None) # get data's zip zipfile = None try: zipfile = tempfile.TemporaryFile() exporter.export_zip_all(zipfile, file_type) zipfile.seek(0) filename = time.strftime("worlddata_%Y%m%d_%H%M%S.zip", time.localtime()) response = http.StreamingHttpResponse(file_iterator(zipfile)) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="%s"' % filename except Exception, e: message = "Can't export game data: %s" % e logger.log_tracemsg(message) zipfile.close() return render(request, 'fail.html', {"message": message})
def read(self, method, url, data, headers): proxy_request_url = self.get_absolute_url(url) proxy_request = _HttpMethodRequest(method, proxy_request_url, data=data, headers=headers) try: response = urllib2.urlopen(proxy_request) except urllib2.HTTPError as e: return http.HttpResponse(e.read(), status=e.code, content_type=e.hdrs['content-type']) except urllib2.URLError as e: return http.HttpResponse(e.reason, 404) else: status = response.getcode() proxy_response = http.StreamingHttpResponse( proxy_stream_generator(response), status=status, content_type=response.headers['content-type']) if 'set-cookie' in response.headers: proxy_response['set-cookie'] = response.headers['set-cookie'] return proxy_response
def get_users_by_project(request): if request.method == 'POST': project_id = request.POST.get('project_id') try: users_list = common.get_users_list(request, project_id) group_list = common.get_groups_list(request, project_id) if len(users_list) == 0 and len(group_list) == 0: # If the project does not contain users nor groups users_response = '<option value="">None</option>' else: users_response = '<option value="">Select one</option>' if len(users_list) > 0: # If the project contains some users users_response += '<optgroup label="Users">' for value, label in users_list: users_response += '<option value="' + str(value) + '">' + str(label) + '</option>' if len(group_list) > 0: # If the project contains some groups users_response += '<optgroup label="Groups">' for value, label in group_list: users_response += '<option value="' + str(value) + '">' + str(label) + '</option>' except: users_response = '<option value="">None</option>' # Generate response response = http.StreamingHttpResponse(users_response) return response
def catchall_dev(request, context=None): """Proxy HTTP requests to the frontend dev server in development. The implementation is very basic e.g. it doesn't handle HTTP headers. """ # Redirect websocket requests directly to the webpack server. if request.META.get('HTTP_UPGRADE', '').lower() == 'websocket': return http.HttpResponseRedirect(UPSTREAM + request.path) # Until we change it, this app doesn't live at the root of our website. # Since the frontend server is at the root, and won't recognize our URL, # we need to remove the base part of the path before proxying. request_path = request.path.replace(URL_BASE, '') upstream_url = UPSTREAM + request_path method = request.META['REQUEST_METHOD'].lower() response = getattr(requests, method)(upstream_url, stream=True) content_type = response.headers.get('Content-Type') if content_type == 'text/html; charset=UTF-8': return http.HttpResponse( content=(engines['jinja2'].from_string(response.text).render( request=request, context=context)), status=response.status_code, reason=response.reason, ) return http.StreamingHttpResponse( streaming_content=response.iter_content(2**12), content_type=content_type, status=response.status_code, reason=response.reason, )
def catchall_dev(request, context=None): """Proxy HTTP requests to the frontend dev server in development. The implementation is very basic e.g. it doesn't handle HTTP headers. """ # Redirect websocket requests directly to the webpack server. if request.META.get('HTTP_UPGRADE', '').lower() == 'websocket': return http.HttpResponseRedirect(UPSTREAM + request.path) upstream_url = UPSTREAM + request.path method = request.META['REQUEST_METHOD'].lower() response = getattr(requests, method)(upstream_url, stream=True) content_type = response.headers.get('Content-Type') if content_type == 'text/html; charset=UTF-8': return http.HttpResponse( content=(engines['jinja2'].from_string(response.text).render( request=request, context=context)), status=response.status_code, reason=response.reason, ) return http.StreamingHttpResponse( streaming_content=response.iter_content(2**12), content_type=content_type, status=response.status_code, reason=response.reason, )
def render_to_response(self, context, **response_kwargs): rows = self.get_rows() pseudo_buffer = Echo() writer = csv.writer(pseudo_buffer) response = http.StreamingHttpResponse((writer.writerow(row) for row in rows), content_type="text/csv") filename = getattr(self, 'file_name', 'data.csv') response['Content-Disposition'] = 'attachment; filename="{0}"'.format(filename) return response
def _data_response(request, query, media, privilaged=False): orm_models = get_models(request) if query.model_name not in orm_models: raise http.Http404(f"{query.model_name} does not exist") bound_query = BoundQuery.bind(query, orm_models) if media == "csv": results = get_results(request, bound_query, orm_models, False) def csv_rows(): # the pivoted column headers yield from pad_table( len(bound_query.row_fields) - 1, flip_table( format_table( bound_query.col_fields, results["cols"], spacing=len(bound_query.data_fields) - 1, )), ) # the row headers and data area yield from pad_table( 1 - len(bound_query.row_fields), join_tables( format_table(bound_query.row_fields, results["rows"]), *(format_table(bound_query.data_fields, sub_table) for sub_table in results["body"]), ), ) writer = csv.writer(Echo()) response = http.StreamingHttpResponse( (writer.writerow(row) for row in csv_rows()), content_type="text/csv") response[ "Content-Disposition"] = f"attachment; filename={query.model_name}-{timezone.now().isoformat()}.csv" return response elif media == "json": results = get_results(request, bound_query, orm_models, True) resp = _get_query_data(bound_query) if privilaged else {} resp.update(results) return JsonResponse(resp) elif privilaged and media == "query": resp = _get_query_data(bound_query) return JsonResponse(resp) elif privilaged and media == "sql": query_set = get_result_queryset(request, bound_query, orm_models) return HttpResponse( sqlparse.format(str(query_set.query), reindent=True, keyword_case="upper"), content_type="text/plain", ) else: raise http.Http404(f"Bad file format {media} requested")
def _data_response(request, query, media, privileged=False, strict=False): orm_models = get_models(request) if query.model_name not in orm_models: raise http.Http404(f"{query.model_name} does not exist") bound_query = BoundQuery.bind(query, orm_models) if strict and not all(f.is_valid for f in bound_query.filters): return http.HttpResponseBadRequest() if media == "csv": results = get_results(request, bound_query, orm_models, False) csv_rows = get_csv_rows(bound_query, results) writer = csv.writer(Echo()) response = http.StreamingHttpResponse( (writer.writerow(row) for row in csv_rows), content_type="text/csv") response[ "Content-Disposition"] = f"attachment; filename={query.model_name}-{timezone.now().isoformat()}.csv" return response elif media == "json": results = get_results(request, bound_query, orm_models, True) resp = _get_query_data(bound_query) if privileged else {} resp.update(results) return JsonResponse(resp) elif privileged and media == "query": resp = _get_query_data(bound_query) return JsonResponse(resp) elif privileged and media in ["sql", "explain", "qs"]: query_set = get_result_queryset(request, bound_query, media == "qs") if isinstance(query_set, list): res = "Not available for pure aggregates" else: if media == "sql": res = "This is an approximation of the main query.\n" res += ( "Pages with pivoted or calculated data may do additional" " queries.\n\n") res += sqlparse.format(str(query_set.query), reindent=True, keyword_case="upper") elif media == "explain": res = query_set.explain() elif media == "qs": res = "This is an approximation of the main queryset.\n" res += ( "Pages with pivoted or calculated data may do additional" " queries.\n\n") res += str(query_set) else: assert False return HttpResponse(res, content_type="text/plain") else: raise http.Http404(f"Bad file format {media} requested")
def get(self, request, *args, **kwargs): url = djnext.options[ 'NEXTJS_DSN'] + request.path + '?' + request.GET.urlencode() if request.META.get('HTTP_ACCEPT', None) == 'text/event-stream': response = requests.get(url, stream=True) ret = http.StreamingHttpResponse(response.iter_content()) else: response = requests.get(url) ret = http.HttpResponse(content=bytes(response.content), ) ret['Content-Type'] = response.headers['Content-Type'] return ret
def _BuildStreamingResponse(self, binary_stream): """Builds HTTPResponse object for streaming.""" response = http.StreamingHttpResponse( streaming_content=binary_stream.GenerateContent(), content_type="binary/octet-stream") response["Content-Disposition"] = ("attachment; filename=%s" % binary_stream.filename) if binary_stream.content_length: response["Content-Length"] = binary_stream.content_length return response
def get_freeform_annotation(request, cur_course_user, submission_id, assessment_page_number): """ Returns the freeform annotation image corresponding to the specific submission page. Returns a 404 error if the `FreeformAnnotation` model does not exist. """ submission_page = shortcuts.get_object_or_404( models.SubmissionPage, submission=submission_id, page_number=int(assessment_page_number)) annotation = shortcuts.get_object_or_404(models.FreeformAnnotation, submission_page=submission_page) image = requests.get(annotation.annotation_image.url, stream=True) return http.StreamingHttpResponse(image, content_type='image/png')
def download_metric_module(request, metric_module_id): try: metric_module_response = api.download_metric_module_data(request, metric_module_id) # Generate response response = http.StreamingHttpResponse(metric_module_response.content) response['Content-Disposition'] = metric_module_response.headers['Content-Disposition'] response['Content-Type'] = metric_module_response.headers['Content-Type'] response['Content-Length'] = metric_module_response.headers['Content-Length'] return response except Exception as exc: redirect = reverse("horizon:crystal:metrics:index") exceptions.handle(request, _(exc.message), redirect=redirect)
def proxy_js_dev_server(request, path): # pragma: no cover """ Proxy HTTP requests to the frontend dev server in development. The implementation is very basic e.g. it doesn't handle HTTP headers. """ response = _get_from_js_dev_server(request) return http.StreamingHttpResponse( streaming_content=response.iter_content(2 ** 12), content_type=response.headers.get("Content-Type"), status=response.status_code, reason=response.reason, )
def proxy(request, path): ''' Based on the guide from Aymeric Augustin https://fractalideas.com/blog/making-react-and-django-play-well-together-hybrid-app-model/ ''' if settings.DEBUG: print("PROXY-ING: %s" % (REACT_DEV_SERVER + path)) # todo: move to logger? response = requests.get(REACT_DEV_SERVER + path) content_type = response.headers.get('Content-Type') # print(request.META) if request.META.get('HTTP_UPGRADE', '').lower() == 'websocket': # REDIRECT TO NODE SERVER # return http.HttpResponse( # content="WebSocket connections aren't supported", # status=501, # reason="Not Implemented" # ) return http.HttpResponseRedirect(REACT_DEV_SERVER + path) # This might work elif content_type == 'text/html; charset=UTF-8': result = http.HttpResponse( content=engines['django'].from_string(response.text).render(), status=response.status_code, reason=response.reason, ) else: return http.StreamingHttpResponse( streaming_content=response.iter_content(2**12), content_type=content_type, status=response.status_code, reason=response.reason, ) # set headers to NOT cache so the scripts are always live # header("Cache-Control: no-cache, must-revalidate"); //HTTP 1.1 # header("Pragma: no-cache"); //HTTP 1.0 # header("Expires: Sat, 26 Jul 1997 05:00:00 GMT"); // Date in the past result['Cache-Control'] = "no-cache, must-revalidate" result['Pragma'] = "no-cache" return result
def download_controller(request, controller_id): try: controller_response = api.download_controller(request, controller_id) # Generate response response = http.StreamingHttpResponse(controller_response.content) response['Content-Disposition'] = controller_response.headers[ 'Content-Disposition'] response['Content-Type'] = controller_response.headers['Content-Type'] response['Content-Length'] = controller_response.headers[ 'Content-Length'] return response except Exception as exc: redirect = reverse("horizon:crystal:controllers:index") exceptions.handle(request, _(exc.message), redirect=redirect)
def output_html(request, title, areas, **kwargs): kwargs['json_url'] = re.sub(r'(\.map)?\.html', '', request.get_full_path()) kwargs['title'] = title tpl = loader.render_to_string('mapit/data.html', kwargs, request=request) wraps = tpl.split('!!!DATA!!!') indent_areas = kwargs.get('indent_areas', False) show_map = kwargs.get('show_map', False) item_tpl = loader.get_template('mapit/areas_item.html') areas = map(lambda area: item_tpl.render({ 'area': area, 'indent_areas': indent_areas, 'show_map': show_map, }), areas) areas = defaultiter(areas, '<li>' + _('No matching areas found.') + '</li>') content = itertools.chain(wraps[0:1], areas, wraps[1:]) return http.StreamingHttpResponse(content)