def test_real_file_response(self): Channel("test").send({"reply_channel": "test", "http_version": "1.1", "method": "GET", "path": b"/test/"}) current_dir = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))) response = FileResponse(open(os.path.join(current_dir, "a_file"), "rb")) handler = FakeAsgiHandler(response) reply_messages = list(handler(self.get_next_message("test", require=True))) self.assertEqual(len(reply_messages), 2) self.assertEqual(response.getvalue(), b"")
def test_non_streaming_file_response(self): Channel("test").send({"reply_channel": "test", "http_version": "1.1", "method": "GET", "path": b"/test/"}) response = FileResponse(BytesIO(b"sadfdasfsdfsadf")) # This is to test the exception handling. This would only happening if # the StreamingHttpResponse was incorrectly subclassed. response.streaming = False handler = FakeAsgiHandler(response) with self.assertRaises(AttributeError): list(handler(self.get_next_message("test", require=True)))
def finalize_response(self, request, response, *args, **kwargs): response = super(BaseRasterView, self).finalize_response( request, response, *args, **kwargs) # Use streaming file responses for GDAL formats. if isinstance(getattr(response, 'accepted_renderer', None), renderers.gdal.BaseGDALRenderer): headers = response._headers response = FileResponse(response.rendered_content) response._headers = headers return response
def test_file_from_named_pipe_response(self): with tempfile.TemporaryDirectory() as temp_dir: pipe_file = os.path.join(temp_dir, 'named_pipe') os.mkfifo(pipe_file) pipe_for_read = os.open(pipe_file, os.O_RDONLY | os.O_NONBLOCK) with open(pipe_file, 'wb') as pipe_for_write: pipe_for_write.write(b'binary content') response = FileResponse(os.fdopen(pipe_for_read, mode='rb')) self.assertEqual(list(response), [b'binary content']) response.close() self.assertFalse(response.has_header('Ĉontent-Length'))
def test_unclosable_filelike_object(self): Channel("test").send({"reply_channel": "test", "http_version": "1.1", "method": "GET", "path": b"/test/"}) # This is a readable object that cannot be closed. class Unclosable: def read(self, n=-1): # Nothing to see here return b"" response = FileResponse(Unclosable()) handler = FakeAsgiHandler(response) reply_messages = list(islice(handler(self.get_next_message("test", require=True)), 5)) self.assertEqual(len(reply_messages), 1) response.close()
def test_compressed_response(self): """ If compressed responses are served with the uncompressed Content-Type and a compression Content-Encoding, browsers might automatically uncompress the file, which is most probably not wanted. """ test_tuples = ( ('.tar.gz', 'application/gzip'), ('.tar.bz2', 'application/x-bzip'), ('.tar.xz', 'application/x-xz'), ) for extension, mimetype in test_tuples: with self.subTest(ext=extension): with tempfile.NamedTemporaryFile(suffix=extension) as tmp: response = FileResponse(tmp) self.assertEqual(response['Content-Type'], mimetype) self.assertFalse(response.has_header('Content-Encoding'))
def download_output_files(request): try: output_filename = request.GET.get("filename", None) print output_filename jobid = request.GET.get("jobid", None) if check_user_has_job(request.user.id, jobid) and jobid in output_filename: output_file_path = os.path.join(OUTPUT_DATA_PATH, output_filename) print output_file_path response = FileResponse(open(output_file_path, 'r'), content_type='text/plain') response['Content-Disposition'] = 'attachment; filename="' + output_filename response['Content-Length'] = os.path.getsize(output_file_path) return response else: raise Exception("Cannot find this job or result file.") except Exception as e: response = HttpResponse(status=503) response.content = "<h3>Cannot find this job or result file.</h3>" return response
def test_file_from_buffer_response(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertEqual(response['Content-Length'], '14') self.assertEqual(response['Content-Type'], 'application/octet-stream') self.assertEqual(list(response), [b'binary content'])
def test_file_from_disk_response(self): response = FileResponse(open(__file__, 'rb')) self.assertEqual(response['Content-Length'], str(os.path.getsize(__file__))) self.assertIn(response['Content-Type'], ['text/x-python', 'text/plain']) response.close()
def test_repr(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertEqual( repr(response), '<FileResponse status_code=200, "application/octet-stream">', )
def test_content_type_buffer(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertEqual(response.headers['Content-Type'], 'application/octet-stream')
def test_content_disposition_buffer_attachment(self): response = FileResponse(io.BytesIO(b'binary content'), as_attachment=True) self.assertEqual(response.headers['Content-Disposition'], 'attachment')
def test_content_length_file(self): response = FileResponse(open(__file__, 'rb')) response.close() self.assertEqual(response.headers['Content-Length'], str(os.path.getsize(__file__)))
def load(request, filename): return FileResponse(open(filename, "rb"), content_type='application/msword', filename=filename, as_attachment=True)
def test_content_type_buffer_explicit_default(self): response = FileResponse(io.BytesIO(b'binary content'), content_type='text/html') self.assertEqual(response.headers['Content-Type'], 'text/html')
def stv_count(request): context = {'menu_active': 'home'} session = request.session.get('stvcount', {}) results_generated = context['results'] = session.get('results', {}) el_data = None do_count = True if request.GET.get('form', None): do_count = False from zeus.forms import STVElectionForm, STVBallotForm form = STVElectionForm() ballots_form = None if request.method == "POST": form = STVElectionForm(request.POST, disabled=False) if form.is_valid(): candidates = form.get_candidates() class F(STVBallotForm): pass setattr(F, 'candidates', candidates) formset_count = int(form.cleaned_data.get('ballots_count')) if not request.POST.get('submit_ballots', False): BallotsForm = formset_factory(F, extra=formset_count, max_num=formset_count) ballots_form = BallotsForm() else: BallotsForm = formset_factory(F, extra=0, max_num=formset_count) ballots_form = BallotsForm(request.POST) if ballots_form.is_valid(): el = form.get_data() for i, b in enumerate(ballots_form): choices = b.get_choices(i + 1) if not choices.get('votes'): continue el['ballots'].append(b.get_choices(i + 1)) el_data = el do_count = True else: context['error'] = _("Invalid ballot data") context['import'] = 1 context['form'] = form context['ballots_form'] = ballots_form if request.GET.get('reset', None): del request.session['stvcount'] return HttpResponseRedirect(reverse('stv_count')) if request.GET.get('download', None) and results_generated: filename = results_generated.get(request.GET.get('download', 'pdf'), '/nofile') if not os.path.exists(filename): return HttpResponseRedirect(reverse('stv_count') + "?reset=1") response = FileResponse(open(filename, 'rb'), content_type='application/pdf') response[ 'Content-Disposition'] = 'attachment; filename=%s' % os.path.basename( filename) response['Content-Length'] = os.path.getsize(filename) return response if request.method == "POST" and do_count: el_data = el_data or json.loads(request.FILES.get('data').read()) _uuid = str(uuid.uuid4()) files = stv_count_and_report(_uuid, el_data) json_file = os.path.join('/tmp', 'json-stv-results-%s' % _uuid) with open(json_file, 'w') as f: f.write(json.dumps(el_data, ensure_ascii=False)) files.append(('json', json_file)) session['results'] = dict(files) request.session['stvcount'] = session return HttpResponseRedirect(reverse('stv_count')) request.session['stvcount'] = session return render_template(request, "zeus/stvcount", context)
def get_mask(request): file = open( os.path.join(settings.BASE_DIR, 'media', request.user.username + '_mask.png'), 'rb') return FileResponse(file, content_type="application/force-download")
def get(self, request): # Initiate algebra parser parser = RasterAlgebraParser() # Get formula from request formula = request.GET.get('formula') # Get id list from request ids = self.get_ids() # Compute tile index range zoom, xmin, ymin, xmax, ymax = self.get_tile_range() # Check maximum size of target raster in pixels max_pixels = getattr(settings, 'RASTER_EXPORT_MAX_PIXELS', EXPORT_MAX_PIXELS) if WEB_MERCATOR_TILESIZE * (xmax - xmin) * WEB_MERCATOR_TILESIZE * ( ymax - ymin) > max_pixels: raise RasterAlgebraException('Export raster too large.') # Construct an empty raster with the output dimensions result_raster = self.construct_raster(zoom, xmin, xmax, ymin, ymax) target = result_raster.bands[0] # Get raster data as 1D arrays and store in dict that can be used # for formula evaluation. for xindex, x in enumerate(range(xmin, xmax + 1)): for yindex, y in enumerate(range(ymin, ymax + 1)): data = {} for name, layerid in ids.items(): tile = get_raster_tile(layerid, zoom, x, y) if tile: data[name] = tile # Ignore this tile if data is not found for all layers if len(data) != len(ids): continue # Evaluate raster algebra expression, return 400 if not successful try: # Evaluate raster algebra expression tile_result = parser.evaluate_raster_algebra(data, formula) except: raise RasterAlgebraException( 'Failed to evaluate raster algebra.') # Update nodata value on target target.nodata_value = tile_result.bands[0].nodata_value # Update results raster with algebra target.data( data=tile_result.bands[0].data(), size=(WEB_MERCATOR_TILESIZE, WEB_MERCATOR_TILESIZE), offset=(xindex * WEB_MERCATOR_TILESIZE, yindex * WEB_MERCATOR_TILESIZE), ) # Create filename base with datetime stamp filename_base = 'algebra_export' # Add name slug to filename if provided if request.GET.get('filename', ''): # Sluggify name slug = slugify(request.GET.get('filename')) # Remove all unwanted characters slug = "".join([c for c in slug if re.match(r'\w|\-', c)]) # Limit length of custom name slug slug = slug[:MAX_EXPORT_NAME_LENGTH] # Add name slug to filename base filename_base += '_' + slug filename_base += '_{0}'.format( datetime.now().strftime('%Y_%m_%d_%H_%M')) # Compress resulting raster file into a zip archive raster_workdir = getattr(settings, 'RASTER_WORKDIR', None) dest = NamedTemporaryFile(dir=raster_workdir, suffix='.zip') dest_zip = zipfile.ZipFile(dest.name, 'w', allowZip64=True) dest_zip.write( filename=self.exportfile.name, arcname=filename_base + '.tif', compress_type=zipfile.ZIP_DEFLATED, ) # Write README.txt and COLORMAP.txt files to zip file self.write_readme(dest_zip) self.write_colormap(dest_zip) # Close zip file before returning dest_zip.close() # Create file based response containing zip file and return for download response = FileResponse(open(dest.name, 'rb'), content_type='application/zip') response['Content-Disposition'] = 'attachment; filename="{0}"'.format( filename_base + '.zip') return response
def download_purchases(request): user = get_object_or_404(UserMethods, pk=request.user.pk) purchases = user.get_purchases() file = get_purchases_file(purchases) return FileResponse(file, as_attachment=True, filename='purchases.pdf')
def files(request, num): try: return FileResponse(open(pdf.getPDFName(num), 'rb'), content_type='application/pdf') except FileNotFoundError: raise Http404()
def test_file_from_disk_as_attachment(self): response = FileResponse(open(__file__, 'rb'), as_attachment=True) self.assertEqual(response['Content-Length'], str(os.path.getsize(__file__))) self.assertIn(response['Content-Type'], ['text/x-python', 'text/plain']) self.assertEqual(response['Content-Disposition'], 'attachment; filename="test_fileresponse.py"') response.close()
def get_replay(request): path = request.GET.get('rep', '') if path == '' or not os.path.isfile(path) or not is_valid_replay(path): raise Http404("No such replay: %s" % path) return FileResponse(open(path, 'rb'))
def result_page(request, result_id): result = models.result.objects.get(id=result_id) if request.method == 'GET': if 'get_dut_output' in request.GET: response = HttpResponse(result.dut_output, content_type='text/plain') response['Content-Disposition'] = \ 'attachment; filename="{}_dut_output.txt"'.format( result_id) return response elif 'get_debugger_output' in request.GET: response = HttpResponse(result.debugger_output, content_type='text/plain') response['Content-Disposition'] = \ 'attachment; filename="{}_debugger_output.txt"'.format( result_id) return response elif 'get_aux_output' in request.GET: response = HttpResponse(result.aux_output, content_type='text/plain') response['Content-Disposition'] = \ 'attachment; filename="{}_aux_output.txt"'.format( result_id) return response elif 'get_output_file' in request.GET: response = get_file(result.campaign.output_file, result_id) response['Content-Disposition'] = \ 'attachment; filename={}_{}'.format( result_id, result.campaign.output_file) return response elif 'get_log_file' in request.GET: temp_file = TemporaryFile() with open_tar(fileobj=temp_file, mode='w:gz') as archive: for log_file in result.campaign.log_files: archive.add( 'campaign-data/{}/results/{}/{}'.format( result.campaign_id, result.id, log_file), '{}_{}'.format(result.id, log_file)) response = FileResponse( temp_file, content_type='application/x-compressed') response['Content-Disposition'] = \ 'attachment; filename={}_log_files.tar.gz'.format(result.id) response['Content-Length'] = temp_file.tell() temp_file.seek(0) return response campaign_items_ = [( item[0], '/campaign/{}/{}'.format(result.campaign_id, item[1]), item[2], item[3]) for item in campaign_items] if result.campaign.output_file: output_file = 'campaign-data/{}/results/{}/{}'.format( result.campaign_id, result_id, result.campaign.output_file) output_file = \ exists(output_file) and guess_type(output_file)[0] is not None else: output_file = False result_table = tables.result(models.result.objects.filter(id=result_id)) events = result.event_set.all() event_table = tables.event(events) if request.method == 'POST' and 'launch' in request.POST: Popen([argv[0], '--campaign_id', str(result.campaign_id), 'regenerate', result_id]) if request.method == 'POST' and 'save' in request.POST: result.outcome = request.POST['outcome'] result.outcome_category = request.POST['outcome_category'] result.save() elif request.method == 'POST' and 'delete' in request.POST: if exists('campaign-data/{}/results/{}'.format( result.campaign_id, result.id)): rmtree('campaign-data/{}/results/{}'.format( result.campaign_id, result.id)) result.delete() return HttpResponse('Result deleted') injections = result.injection_set.all() if result.campaign.simics: if injections.count(): injection_table = tables.injection(injections) else: injection_table = None register_diffs = result.simics_register_diff_set.all() register_filter = filters.simics_register_diff( request.GET, queryset=register_diffs) register_diff_count = register_filter.qs.count() register_table = tables.simics_register_diff(register_filter.qs) RequestConfig( request, paginate={'per_page': table_length}).configure(register_table) memory_diffs = result.simics_memory_diff_set.all() memory_diff_count = memory_diffs.count() memory_table = tables.simics_memory_diff(memory_diffs) RequestConfig( request, paginate={'per_page': table_length}).configure(memory_table) else: register_filter = None memory_diff_count = 0 memory_table = None register_diff_count = 0 register_table = None if injections.count(): injection_table = tables.injection(injections) else: injection_table = None RequestConfig(request, paginate=False).configure(result_table) RequestConfig(request, paginate=False).configure(event_table) if injection_table: RequestConfig(request, paginate=False).configure(injection_table) return render(request, 'result.html', { 'campaign_items': campaign_items_, 'event_count': '{:,}'.format(events.count()), 'event_table': event_table, 'filter': register_filter, 'injection_table': injection_table, 'memory_diff_count': '{:,}'.format(memory_diff_count), 'memory_table': memory_table, 'navigation_items': navigation_items, 'output_file': output_file, 'register_diff_count': '{:,}'.format(register_diff_count), 'register_table': register_table, 'result': result, 'result_table': result_table})
def download(request, report_pk): report = get_object_or_404(MonthReport, pk=report_pk) if not Path(report.invoice.file_path).exists(): create_pdf(report.invoice) return FileResponse(open(report.invoice.file_path, 'rb'), as_attachment=True)
def test_content_disposition_buffer(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertFalse(response.has_header('Content-Disposition'))
def preview(request, report_pk): report = get_object_or_404(MonthReport, pk=report_pk) if not Path(report.invoice.file_path).exists(): create_pdf(report.invoice) return FileResponse(open(report.invoice.file_path, 'rb'))
def test_response_buffer(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertEqual(list(response), [b'binary content'])
def open_pdf(request, paper_id): news = Newspaper.objects.get(id=paper_id) n = news.pdf if request.method == 'GET': return FileResponse(open(f'media/{n}', 'rb'), content_type='application/pdf')
def test_content_length_buffer(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertEqual(response.headers['Content-Length'], '14')
def get_archived_file(cls, usr, url_id, mode='html', req=None, return_path=False): qset = Library.objects.filter(usr=usr, id=url_id) streaming_mode = False if not os.path.exists(settings.TMP_LOCATION): os.makedirs(settings.TMP_LOCATION) if qset: row = qset[0] media_path = row.media_path if mode in ['pdf', 'png', 'html'] and media_path: fln, ext = media_path.rsplit('.', 1) if mode == 'pdf': media_path = fln + '.pdf' elif mode == 'png': media_path = fln + '.png' elif mode == 'html': media_path = fln + '.htm' elif mode == 'archive' and media_path: mdir, _ = os.path.split(media_path) filelist = os.listdir(mdir) mlist = [] extset = set(['pdf', 'png', 'htm', 'html']) for fl in filelist: ext = fl.rsplit('.', 1) if ext and ext[-1] not in extset: mlist.append(os.path.join(mdir, fl)) for mfile in mlist: if os.path.isfile(mfile) and os.stat(mfile).st_size: media_path = mfile streaming_mode = True break if streaming_mode and req: qlist = UserSettings.objects.filter(usrid=usr) if qlist and not qlist[0].media_streaming: streaming_mode = False if media_path and os.path.exists(media_path): mtype = guess_type(media_path)[0] if not mtype: mtype = 'application/octet-stream' ext = media_path.rsplit('.')[-1] if ext: filename = row.title + '.' + ext if '.' in row.title: file_ext = row.title.rsplit('.', 1)[-1] if ext == file_ext: filename = row.title else: filename = row.title + '.bin' if mtype in ['text/html', 'text/htm']: data = cls.format_html(row, media_path) return HttpResponse(data) elif streaming_mode: if os.path.isfile(cls.CACHE_FILE): with open(cls.CACHE_FILE, 'rb') as fd: cls.VIDEO_ID_DICT = pickle.load(fd) uid = str(uuid.uuid4()) uid = uid.replace('-', '') while uid in cls.VIDEO_ID_DICT: logger.debug("no unique ID, Generating again") uid = str(uuid.uuid4()) uid = uid.replace('-', '') time.sleep(0.01) cls.VIDEO_ID_DICT.update({uid: [media_path, time.time()]}) cls.VIDEO_ID_DICT.move_to_end(uid, last=False) if len(cls.VIDEO_ID_DICT) > settings.VIDEO_PUBLIC_LIST: cls.VIDEO_ID_DICT.popitem() with open(cls.CACHE_FILE, 'wb') as fd: pickle.dump(cls.VIDEO_ID_DICT, fd) if return_path: title_slug = slugify(row.title, allow_unicode=True) if settings.ROOT_URL_LOCATION: root_loc = settings.ROOT_URL_LOCATION if root_loc.startswith('/'): root_loc = root_loc[1:] return '{}/{}/getarchivedvideo/{}-{}'.format( root_loc, usr.username, title_slug, uid) else: return '{}/getarchivedvideo/{}-{}'.format( usr.username, title_slug, uid) else: return cls.get_archived_video(req, usr.username, uid) else: response = FileResponse(open(media_path, 'rb')) mtype = 'video/webm' if mtype == 'video/x-matroska' else mtype response['mimetype'] = mtype response['content-type'] = mtype response['content-length'] = os.stat(media_path).st_size filename = filename.replace(' ', '.') logger.info('{} , {}'.format(filename, mtype)) if not cls.is_human_readable(mtype) and not streaming_mode: response[ 'Content-Disposition'] = 'attachment; filename="{}"'.format( quote(filename)) return response else: back_path = req.path_info.rsplit('/', 1)[0] + '/read' return render(req, 'archive_not_found.html', {'path': back_path}) else: return HttpResponse(status=404)
def test_content_type_file(self): response = FileResponse(open(__file__, 'rb')) response.close() self.assertIn(response.headers['Content-Type'], ['text/x-python', 'text/plain'])
def test_file_from_buffer_response(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertEqual(response['Content-Length'], '14') self.assertEqual(response['Content-Type'], 'application/octet-stream') self.assertFalse(response.has_header('Content-Disposition')) self.assertEqual(list(response), [b'binary content'])
def get(self, request, *args, **kwargs): answers = [] availability = [] business = [] defence = [] reports = [] other = [] answers_numbers = [] game_id = kwargs.get('pk') game_query = Game.objects.filter(id=game_id) game = Game.objects.get(id=game_id) level = Role_level.objects.get(id=game.scenario.level.id) player = Player.objects.filter(id=game.player_id) received_points = split_to_float_array(game.received_points, ';') maximum_points = split_to_float_array(game.maximum_points, ';') if received_points[-1] != 0: questions = [int(x) for x in game.questions.split(';')] for x in range(1, len(questions)): answers_numbers.append((Answer.objects.filter( scenario_id=game.scenario_id, next_question_id=questions[x]).values_list('number', flat=True)[0])) del questions[-1] test = [] for question in questions: question_data = Question.objects.filter( scenario_id=game.scenario_id, id=question) answer_data = Answer.objects.filter( scenario_id=game.scenario_id, question_id=question) answers.append( Answer.objects.filter(scenario_id=game.scenario_id, question_id=question).values_list( 'times_chosen', flat=True)) availability.append( question_data.values_list('availability', flat=True)[0]) business.append( question_data.values_list('business', flat=True)[0]) defence.append( question_data.values_list('defence', flat=True)[0]) reports.append( question_data.values_list('reports', flat=True)[0]) other.append(question_data.values_list('other', flat=True)[0]) passed = calculateLevelPass(received_points, level) if passed: game_query.update(level_after=level.id) players = Player.objects.filter(level=level) test = Game.objects.filter(player__in=players, id__lte=game_id).values_list( 'received_points', flat=True) game = Game.objects.get(id=game_id) test_ = [] for x in test: test_.append(split_to_float_array(x, ';')) summed_hyp = [] for x in test_: summed_hyp.append(calculateSum(x)) game_query.update(results=calculateResults( availability, business, defence, reports, other)) normal_distribution = generate_normal_distribution( summed_hyp, calculateSum(received_points)) heatmap = best_road(maximum_points, received_points) htmap = getHeatmap(answers, answers_numbers) info = [] info.append( Player.objects.filter(id=game.player_id).values_list( 'username', flat=True)[0]) info.append( Scenario.objects.filter(id=game.scenario_id).values_list( 'title', flat=True)[0]) info.append(game.scenario.level.level) if game.level_before is None: info.append(game.level_before) else: info.append(game.level_before.level) if game.level_after is None: info.append(game.level_after) else: info.append(game.level_after.level) player.update(level=game.level_after) info.append(str(game.started_at).split('.')[0]) info.append(str(game.finished_at).split('.')[0]) bar_plot_labels = [] bar_plot_data = [] all_levels = list(Role_level.objects.filter()) for x in all_levels: bar_plot_labels.append(x.level) bar_plot_data.append(Player.objects.filter(level=x.id).count()) arr = [] arr.append(calculateAverage(availability)) arr.append(calculateAverage(business)) arr.append(calculateAverage(defence)) arr.append(calculateAverage(reports)) arr.append(calculateAverage(other)) arr.append( calculateResults(availability, business, defence, reports, other)) competences_achieved = game.competences.split(';') t = [] for competence in competences_achieved: l = competence.split(':') t.append(l) ttt = Scenario.objects.filter(id=game.scenario_id).values_list( 'role_id', flat=True)[0] competences = CompetenceSerializer( Role.objects.get(id=ttt).competences.all(), many=True).data player_competences = player.values_list('competences', flat=True)[0].split(',') report_g = generate_report( info, normal_distribution, getAvailability(availability), heatmap, bar_plot(bar_plot_labels, bar_plot_data), htmap, arr, competences, t, player_competences) content = {'report': report_g} return FileResponse(report_g, as_attachment=True, filename='report.pdf') else: return Response(status=status.HTTP_204_NO_CONTENT)
def get_recipe_file(request, recipe_id): recipe = get_object_or_404(Recipe, pk=recipe_id, space=request.space) if recipe.storage: return FileResponse(get_recipe_provider(recipe).get_file(recipe)) else: return FileResponse()
def pdf_view(request): try: return FileResponse(open('/static/pp.pdf', 'rb'), content_type='application/pdf') except FileNotFoundError: raise Http404()
def resume(request): return FileResponse(open("./files/jeetendra_kashyap_Resume.pdf", "rb"), as_attachment=False, filename="Jeetendra_Kashyap_Resume.pdf")
def download_file(self, request, pk=None): filename = self.get_object().file return FileResponse(filename, as_attachment=True)
def manual(request, sykehus, kurveark=None): def default_render(): print("running default_render") return render(request, 'medisinkurve/manual.html', { 'kurveark': kurveark, 'formset': formset, 'sykehus': sykehus }) extra_forms = 24 if request.method == 'POST': print("running manual(request) with method=POST") data = { 'form-TOTAL_FORMS': str(extra_forms), 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '' } for key, value in request.POST.dict().items(): data[key] = value Unready_FormSet = formset_factory(FastMedisinForm, extra=extra_forms) formset = Unready_FormSet(data) # print("Printing request.POST: ") # print(request.POST.dict().items()) if formset.is_valid(): interaksjonsanalyse_bool = 'interaksjoner' in request.POST.dict() if kurveark == None: kurveark = KurveArk(sykehus=sykehus) else: # Populating kurveark from autofill() function. for index, fast_medisin in enumerate(kurveark.faste_medisiner): form_no = index + 1 data['form-' + str(form_no) + '-legemiddelnavn'] = fast_medisin.legemiddelnavn data['form-' + str(form_no) + '-legemiddelform'] = fast_medisin.legemiddelform data['form-' + str(form_no) + '-enhet'] = fast_medisin.enhet data[ 'form-' + str(form_no) + '-administrasjonsform'] = fast_medisin.administrasjonsform data['form-' + str(form_no) + '-dose0008'] = fast_medisin.dose0008 data['form-' + str(form_no) + '-dose0814'] = fast_medisin.dose0814 data['form-' + str(form_no) + '-dose1420'] = fast_medisin.dose1420 data['form-' + str(form_no) + '-dose2024'] = fast_medisin.dose2024 if index > 13: break #We don't accept more than 14 faste medikamenter for index, behovs_medisin in enumerate( kurveark.behovs_medisiner): form_no = index + 15 data['form-' + str(form_no) + '-legemiddelnavn'] = behovs_medisin.legemiddelnavn data['form-' + str(form_no) + '-legemiddelform'] = behovs_medisin.legemiddelform data['form-' + str(form_no) + '-enhet'] = behovs_medisin.enhet data[ 'form-' + str(form_no) + '-administrasjonsform'] = behovs_medisin.administrasjonsform data['form-' + str(form_no) + '-dose_fritekst'] = behovs_medisin.dose_fritekst if index > 23: break #We don't accept more than 8 behovs medikamenter for index, form in enumerate(formset): user_input = form.cleaned_data if user_input: if index == 0: kurveark.diagnose = user_input['diagnose'] kurveark.cave = user_input['cave'] elif index < 15: kurveark.legg_til_medikament( faste=True, find_atc_virkestoff=interaksjonsanalyse_bool, **user_input) elif index < 23: kurveark.legg_til_medikament( faste=False, find_atc_virkestoff=interaksjonsanalyse_bool, **user_input) elif index == 23: notat = '' for char in user_input['notat']: if char != '\r': notat += char kurveark.notat = notat if 'liste' in request.POST.dict(): print('Liste was clicked') kurveark.create_compact_doses() return default_render() elif 'pdf' in request.POST.dict(): print('PDF was clicked') buf = lage_pdf(kurveark) return FileResponse(buf, as_attachment=True, filename='Medisinkurve.pdf') elif 'autofill' in request.POST.dict(): print('autofill used in manual()-view') return default_render() elif 'interaksjoner' in request.POST.dict(): print('interaksjoner was clicked') kurveark.init_interaction_analysis() for interaction_tuple in kurveark.actual_interactions: interaction = interaction_tuple[2] print(interaction) return default_render() else: print(formset.errors) return HttpResponse( "<h2>Something went wrong :( (Form is invalid)</h2>") else: Unready_FormSet = formset_factory(FastMedisinForm, extra=extra_forms) formset = Unready_FormSet() raw_kurveark = KurveArk(sykehus=sykehus) return default_render()
def send_file(request, filename): fp = open(os.path.join(UEDITOR_UPLOAD_PATH, filename), 'rb') response = FileResponse(fp) response['Content-Type'] = "application/octet-stream" return response
def results_page(request, campaign_id=None): error_title = None error_message = None result_filter = None if campaign_id is not None: campaign = models.campaign.objects.get(id=campaign_id) else: campaign = None if request.method == 'GET' and 'view_output' in request.GET and \ 'view_all' not in request.GET and 'select_box' in request.GET: result_ids = map(int, dict(request.GET)['select_box']) results = models.result.objects.filter( id__in=result_ids).order_by('-id') else: if campaign_id is not None: campaign_items_ = campaign_items output_file = 'campaign-data/{}/gold/{}'.format( campaign_id, campaign.output_file) if exists(output_file) and guess_type(output_file)[0] is not None: output_file = True else: output_file = False results = campaign.result_set.all() else: campaign_items_ = None output_file = True results = models.result.objects.all() result_filter = filters.result(request.GET, queryset=results) if not result_filter.qs.count() and results.count(): error_title = 'Filter Error' error_message = 'Filter did not return any results and was ignored.' result_filter = filters.result(None, queryset=results) else: results = result_filter.qs.order_by('-id') if request.method == 'GET' and 'view_output' in request.GET: if 'view_dut_output' in request.GET: if 'view_download' in request.GET: temp_file = TemporaryFile() start = perf_counter() with open_tar(fileobj=temp_file, mode='w:gz') as archive: for result in results: with BytesIO(result.dut_output.encode('utf-8')) as \ byte_file: info = TarInfo('{}_dut_output.txt'.format( result.id)) info.size = len(result.dut_output) archive.addfile(info, byte_file) print('archive created', round(perf_counter()-start, 2), 'seconds') response = FileResponse( temp_file, content_type='application/x-compressed') response['Content-Disposition'] = \ 'attachment; filename=dut_outputs.tar.gz' response['Content-Length'] = temp_file.tell() temp_file.seek(0) return response else: return render(request, 'output.html', { 'campaign': campaign, 'campaign_items': campaign_items if campaign else None, 'navigation_items': navigation_items, 'results': results, 'type': 'dut_output'}) elif 'view_aux_output' in request.GET: if 'view_download' in request.GET: temp_file = TemporaryFile() start = perf_counter() with open_tar(fileobj=temp_file, mode='w:gz') as archive: for result in results: with BytesIO(result.aux_output.encode('utf-8')) as \ byte_file: info = TarInfo('{}_aux_output.txt'.format( result.id)) info.size = len(result.aux_output) archive.addfile(info, byte_file) print('archive created', round(perf_counter()-start, 2), 'seconds') response = FileResponse( temp_file, content_type='application/x-compressed') response['Content-Disposition'] = \ 'attachment; filename=aux_outputs.tar.gz' response['Content-Length'] = temp_file.tell() temp_file.seek(0) return response else: return render(request, 'output.html', { 'campaign': campaign, 'campaign_items': campaign_items if campaign else None, 'navigation_items': navigation_items, 'results': results, 'type': 'aux_output'}) elif 'view_debugger_output' in request.GET: if 'view_download' in request.GET: temp_file = TemporaryFile() start = perf_counter() with open_tar(fileobj=temp_file, mode='w:gz') as archive: for result in results: with BytesIO( result.debugger_output.encode('utf-8')) as \ byte_file: info = TarInfo('{}_debugger_output.txt'.format( result.id)) info.size = len(result.debugger_output) archive.addfile(info, byte_file) print('archive created', round(perf_counter()-start, 2), 'seconds') response = FileResponse( temp_file, content_type='application/x-compressed') response['Content-Disposition'] = \ 'attachment; filename=debugger_outputs.tar.gz' response['Content-Length'] = temp_file.tell() temp_file.seek(0) return response else: return render(request, 'output.html', { 'campaign': campaign, 'campaign_items': campaign_items if campaign else None, 'navigation_items': navigation_items, 'results': results, 'type': 'debugger_output'}) elif 'view_output_file' in request.GET: result_ids = [] for result in results: if exists('campaign-data/{}/results/{}/{}'.format( result.campaign_id, result.id, result.campaign.output_file)): result_ids.append(result.id) results = models.result.objects.filter( id__in=result_ids).order_by('-id') if 'view_download' in request.GET: temp_file = TemporaryFile() start = perf_counter() with open_tar(fileobj=temp_file, mode='w:gz') as archive: for result in results: archive.add( 'campaign-data/{}/results/{}/{}'.format( result.campaign_id, result.id, result.campaign.output_file), '{}_{}'.format( result.id, result.campaign.output_file)) print('archive created', round(perf_counter()-start, 2), 'seconds') response = FileResponse( temp_file, content_type='application/x-compressed') response['Content-Disposition'] = \ 'attachment; filename=output_files.tar.gz' response['Content-Length'] = temp_file.tell() temp_file.seek(0) return response else: return render(request, 'output.html', { 'campaign': campaign, 'campaign_items': campaign_items if campaign else None, 'navigation_items': navigation_items, 'results': results, 'type': 'output_file'}) elif 'view_log_file' in request.GET: if 'view_download' in request.GET: temp_file = TemporaryFile() start = perf_counter() with open_tar(fileobj=temp_file, mode='w:gz') as archive: for result in results: for log_file in result.campaign.log_files: archive.add( 'campaign-data/{}/results/{}/{}'.format( result.campaign_id, result.id, log_file), '{}_{}'.format(result.id, log_file)) print('archive created', round(perf_counter()-start, 2), 'seconds') response = FileResponse( temp_file, content_type='application/x-compressed') response['Content-Disposition'] = \ 'attachment; filename=log_files.tar.gz' response['Content-Length'] = temp_file.tell() temp_file.seek(0) return response else: return render(request, 'output.html', { 'campaign': campaign, 'campaign_items': campaign_items if campaign else None, 'navigation_items': navigation_items, 'results': results, 'type': 'log_file'}) elif request.method == 'POST': if 'new_outcome_category' in request.POST: results.values('outcome_category').update( outcome_category=request.POST['new_outcome_category']) elif 'new_outcome' in request.POST: results.values('outcome').update( outcome=request.POST['new_outcome']) elif 'delete' in request.POST and 'results[]' in request.POST: result_ids = [int(result_id) for result_id in dict(request.POST)['results[]']] results_to_delete = models.result.objects.filter(id__in=result_ids) for result in results_to_delete: if exists('campaign-data/{}/results/{}'.format( result.campaign_id, result.id)): rmtree('campaign-data/{}/results/{}'.format( result.campaign_id, result.id)) results_to_delete.delete() elif 'delete_all' in request.POST: for result in results: if exists('campaign-data/{}/results/{}'.format( result.campaign_id, result.id)): rmtree('campaign-data/{}/results/{}'.format( result.campaign_id, result.id)) results.delete() if campaign_id: return redirect('/campaign/{}/results'.format(campaign_id)) else: return redirect('/results') if campaign_id is None: result_table = tables.all_results(results) else: result_table = tables.results(results) RequestConfig( request, paginate={'per_page': table_length}).configure(result_table) return render(request, 'results.html', { 'campaign': campaign, 'campaign_items': campaign_items_, 'error_message': error_message, 'error_title': error_title, 'filter': result_filter, 'filter_tabs': True, 'navigation_items': navigation_items, 'output_file': output_file, 'result_count': '{:,}'.format(results.count()), 'result_table': result_table})