def optimization(request): pathway = get_pathway_from_request(request) n = request.session['collection_name'] email = request.session['provided_email'] obj_type = request.GET['obj_type'] ot = 'biomass' if obj_type == '0': # customer defined ot = 'user' # Add an FBA task. task = Task(task_type = 'fba', main_file = n, email = email, status = "TODO") # Get the uuid from task as file name prefix. file_system = FileSystemStorage() uuid = str(task.uuid) ampl_file = file_system.open(uuid + ".ampl", "w") variable_mapping = file_system.open(uuid + ".map", "w") report_header = file_system.open(uuid + ".header", "w") pathway.output_ampl(ampl_file, variable_mapping, report_header, objective_type = ot ) ampl_file.close() variable_mapping.close() report_header.close() task.save() return HttpResponse(content = "New Optimization problem submitted .. ", status = 200, content_type = "text/html")
def export_pdf(request): class MyFPDF(FPDF, HTMLMixin): pass pdf = MyFPDF() #First page pdf.add_page() pdf.write_html(html) fs = FileSystemStorage( os.path.abspath(os.path.join(os.path.dirname(__file__)))) downloaded_pdf = os.path.abspath(os.path.join(os.path.dirname(__file__),'html12345.pdf')) pdf.output(downloaded_pdf, 'F') project_path = os.path.abspath(os.path.join(os.path.dirname(__file__))) with fs.open("html12345.pdf") as pdf_report: for parent, dirnames, filenames in os.walk(project_path): for fn in filenames: if fn.lower().endswith('.pdf'): os.remove(os.path.join(parent, fn)) response = HttpResponse(pdf_report, content_type='application/pdf') date_data = datetime.datetime.now().date() fname = "report_%s.pdf" % (date_data) response['Content-Disposition'] = 'attachment; filename=%s' % fname return response
class UploadsStorage(SessionStorage): def __init__(self, prefix, request, *args, **kwargs): self.file_storage = FileSystemStorage() super(UploadsStorage, self).__init__(prefix, request, *args, **kwargs) def set_step_data(self, step, cleaned_data): """ This is currently based on ssession, but should be abstracted to work with any storage """ files = cleaned_data.get('files', None) del cleaned_data['files'] if files: #setting the format straight cleaned_data._mutable = True cleaned_data.setlist('files', files) cleaned_data._mutable = False print cleaned_data return super(UploadsStorage, self).set_step_data(step, cleaned_data) def get_files(self): files = MultiValueDict({}) for step, step_data in self.request.session[self.prefix][self.step_data_session_key].items(): if step_data.has_key('files'): for file in step_data.getlist('files'): files.appendlist(step+'-file', self.file_storage.open(file.get('path'))) return files
class FileLikeObjectTestCase(LiveServerBase): """ Test file-like objects (#15644). """ def setUp(self): self.temp_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(location=self.temp_dir) def tearDown(self): shutil.rmtree(self.temp_dir) def test_urllib2_urlopen(self): """ Test the File storage API with a file like object coming from urllib2.urlopen() """ file_like_object = self.urlopen("/example_view/") f = File(file_like_object) stored_filename = self.storage.save("remote_file.html", f) remote_file = self.urlopen("/example_view/") with self.storage.open(stored_filename) as stored_file: self.assertEqual(stored_file.read(), remote_file.read())
class FileLikeObjectTestCase(LiveServerTestCase): """ Test file-like objects (#15644). """ available_apps = [] def setUp(self): self.temp_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(location=self.temp_dir) def tearDown(self): shutil.rmtree(self.temp_dir) def test_urllib_request_urlopen(self): """ Test the File storage API with a file-like object coming from urllib.request.urlopen(). """ file_like_object = urlopen(self.live_server_url + '/') f = File(file_like_object) stored_filename = self.storage.save("remote_file.html", f) remote_file = urlopen(self.live_server_url + '/') with self.storage.open(stored_filename) as stored_file: self.assertEqual(stored_file.read(), remote_file.read())
def get(self, request, *args, **kwargs): print "args: " + str(args) print "kwargs: " + str(kwargs) fl = self.model.objects.get(id=kwargs['pk']) fs = FileSystemStorage() filename = fl.file_name print "filename: " + str(filename) wrapper = FileWrapper(fs.open(filename)) response = HttpResponse(wrapper, content_type='application/pdf') response['Content-Disposition'] = 'attachment; filename=' + fl.file_name + '' response['Content-Length'] = os.path.getsize(fs.location+'/'+filename) return response
def get_placeholder_image(self): if not hasattr(self.__class__, '_PlaceholderImageMixin__placeholder_image_cache'): path = finders.find(self.PLACEHOLDER_IMAGE_PATH) if path: location, file_name = os.path.split(path) fs = FileSystemStorage(location=location) image = ImageFile(fs.open(file_name)) image.storage = fs self.__class__.__placeholder_image_cache = image else: self.__class__.__placeholder_image_cache = None return self.__class__.__placeholder_image_cache
def setUp(self): test_storage = FileSystemStorage( location=getattr(settings, 'FILE_UPLOAD_TEMP_DIR')) fixtures_storage = FileSystemStorage(location=CHUNKS_ROOT) for filename in fixtures_storage.listdir('.')[1]: test_storage.save( filename, fixtures_storage.open(filename) ) self.seagull = ResumableFile(test_storage, seagull) self.craw = ResumableFile(test_storage, craw) self.storage = test_storage
def dfba_solve(request): pathway = get_pathway_from_request(request) name = request.session["collection_name"] associated_file_key = request.session["dfba_upload"] email = request.session['provided_email'] task = Task(task_type = 'dfba', main_file = name, email = email, status = "TODO") file_system = FileSystemStorage() uuid = str(task.uuid) user_upload_temp_filename = "dfba/" + associated_file_key user_upload_temp_file = file_system.open(user_upload_temp_filename, "r") ampl_file = file_system.open(uuid + ".ampl", "w") variable_mapping = file_system.open(uuid + ".map", "w") report_header = file_system.open(uuid + ".header", "w") obj_type = request.GET['obj_type'] ot = 'biomass' # 1 = biomass if obj_type == '0': # 0 = customer defined ot = 'user' pathway.output_ampl(ampl_file, variable_mapping, report_header, model_type="dfba", additional_file = user_upload_temp_file, objective_type = ot) ampl_file.close() variable_mapping.close() report_header.close() user_upload_temp_file.close() # cleanup the temp file from user upload. file_system.delete(user_upload_temp_filename) task.save() return HttpResponse(content = "New DFBA optimization problem submitted .. ", status = 200, content_type = "text/html")
def sbml(request): n = request.session['collection_name'] address = request.session['provided_email'] pathway = get_pathway_from_request(request) fs = FileSystemStorage() f = fs.open(n + ".sbml", "w") pathway.output_sbml(f, str(n)) f.close() # The first is the disk file to read from, the second is the file name used # in email attachment. attachment = ( n + ".sbml", n + '.sbml') send_mail(address, attachment, title="SBML") return HttpResponse(content = "SBML file send.", status = 200, content_type = "text/html")
def generate_report(name, suffix): fs = FileSystemStorage() amplfile = name + ".ampl" amplresult = name + suffix + "_result.txt" mapresult = name + ".map" # Step 0. Write the report file finaloutput = fs.open(name + suffix + "_report.txt", "w") #### Step 0.1 Read the variable correspondence fmap = fs.open(mapresult, "r") d = {} for l in fmap: vname, oldname = l.split()[:2] d[vname] = oldname fmap.close() ### Step 1: transfer _header to _report fheader = fs.open(name + "_header.txt", "r") for l in fheader: finaloutput.write(l) fheader.close() ### Step 2: transfer ampl to report fampl = fs.open(amplfile, "r") for l in fampl: finaloutput.write(l) fampl.close() ### Step 3: conversions finaloutput.write("\n\n === Name conversions between variables and fluxes === \n") fmap = fs.open(mapresult, "r") for l in fmap: finaloutput.write(l) fmap.close() finaloutput.write("\n\n======== Results ========= \n") fampl_result = fs.open(amplresult, "r") fl = fampl_result.xreadlines() for l in fl: print "Read l is", l temp = l.split() print " temp is ", temp if len(temp) == 2 and l[0]=="V": name, value = l.split() finaloutput.write(d[name]) finaloutput.write( "\t -> \t ") finaloutput.write( value) finaloutput.write( "\n" ) else: finaloutput.write(l) fampl_result.close() finaloutput.close()
def get(self, request, *args, **kwargs): print "This GET", args, kwargs today = timezone.now().date() bs = BillingSchedule.objects.get(start_date__lte=today, end_date__gte=today) wbx = WbsMasterList() fs = FileSystemStorage() filename = wbx.filename print "filename: " + str(filename) wrapper = FileWrapper(fs.open(filename)) response = HttpResponse(wrapper, content_type='application/vnd.ms-excel') response['Content-Disposition'] = 'attachment; filename=' + filename + '' response['Content-Length'] = os.path.getsize(fs.location+'/'+filename) return response
class ThumbnailField(object): """Instances of this class will be used to access data of the generated thumbnails """ def __init__(self, name): self.name = name self.storage = FileSystemStorage() def path(self): return self.storage.path(self.name) def delete(self): return self.storage.delete(self.name) def open(self): self.file = self.storage.open(self.name) def chunks(self, chunk_size=None): """ Read the file and yield chucks of ``chunk_size`` bytes (defaults to ``UploadedFile.DEFAULT_CHUNK_SIZE``). """ if not chunk_size: chunk_size = 64 * 2 ** 10 if hasattr(self.file, 'seek'): self.file.seek(0) # Assume the pointer is at zero... counter = self.file.size while counter > 0: yield self.file.read(chunk_size) counter -= chunk_size def close(self): self.file.close() def url(self): return self.storage.url(self.name) def size(self): return self.storage.size(self.name)
def write_pathway_for_plot(pathway, filename): node_adj = {} for rname, r in pathway.reactions.iteritems(): if r.products and r.substrates: if r.name not in node_adj: node_adj[r.name] = [] for s in r.substrates: for p in r.products: if s not in node_adj: node_adj[s] = [] node_adj[s].append(r.name) node_adj[r.name].append(p) fs = FileSystemStorage() f = fs.open(filename + ".adjlist", "w") for key, item in node_adj.iteritems(): f.write(key) f.write(' ') f.write(' '.join(item)) f.write('\n') f.close()
def send_mail(address, attachments, title = ""): msg = MIMEMultipart() msg['Subject'] = 'Mail from MicrobesFlux --' + title msg['From'] = '*****@*****.**' msg['To'] = address fromaddr = "*****@*****.**" toaddrs = [address, ] content = MIMEText("Dear MicrobesFlux User: Thank you for using our website. -- MicrobesFlux") msg.attach(content) fs = FileSystemStorage() for fname in attachments: fp = fs.open(fname, "rb") content = MIMEText(fp.read()) content.add_header('Content-Disposition', 'attachment; filename="'+fname+'"') fp.close() msg.attach(content) server = smtplib.SMTP('localhost') server.sendmail(fromaddr, toaddrs, msg.as_string()) server.quit()
def Pedido_Print_pdf(request): pedido = Pedido.objects.all() context = { 'pedido': pedido, } html_string = render_to_string('cliente/pedido_print.html', context) html = HTML(string=html_string, base_url=request.build_absolute_uri()) html.write_pdf(target='/tmp/pedido_print_pdf.pdf') fs = FileSystemStorage('/tmp') with fs.open('pedido_print_pdf.pdf') as pdf: response = HttpResponse(pdf, content_type='application/pdf') response['Content-Disposition'] = "inline; filename='print.pdf'" return response return response return render(request, 'cliente/pedido_print.html', context)
def save_temp_tertiary_image_from_base64String(imageString, dish): INCORRECT_PADDING_EXCEPTION = "Incorrect padding" try: if not os.path.exists(settings.TEMP): os.mkdir(settings.TEMP) if not os.path.exists(settings.TEMP + "/" + str(dish.pk)): os.mkdir(settings.TEMP + "/" + str(dish.pk)) url = os.path.join(settings.TEMP + "/" + str(dish.pk), TEMP_TERTIARY_IMAGE_NAME) storage = FileSystemStorage(location=url) image = base64.b64decode(imageString) with storage.open('', 'wb+') as destination: destination.write(image) destination.close() return url except Exception as e: print('exception: ' + str(e)) if str(e) == INCORRECT_PADDING_EXCEPTION: imageString += '=' * ((4 - len(imageString) % 4) % 4) return save_temp_tertiary_image_from_base64String( imageString, dish) return None
def get_backup(request): """ get database backup from a pharmacy based on it's schema name """ if request.method == 'GET': schema_name = request.GET.get('schema_name') command = 'tenant_command dumpdata' manage = os.path.join(settings.BASE_DIR, 'manage.py') path = f'media/{schema_name}.json' output = open(path, 'w+') os.system(f'{manage} {command} --schema={schema_name} --indent=4 > {path}') output.close() fs = FileSystemStorage("") with fs.open(path) as json: response = HttpResponse(json, content_type='application/json') response[ 'Content-Disposition'] = 'attachment; filename="' + schema_name + '.json' '"' return response
def generate_and_save_preview(self): fs = FileSystemStorage() # Generate a preview image and dump it into a bytes buffer img = PIL.Image.open(fs.open(self.image.name)) img.thumbnail((PREVIEW_WIDTH, PREVIEW_HEIGHT)) # Handle image rotation specified via exif rot, h_flip, v_flip = pil_helper.get_image_rotation(img) if rot: img = img.rotate(rot) buf = io.BytesIO() img.save(buf, 'png') # Save the image buffer to default file storage preview_path = fs.get_available_name(self.image.name) fs.save(preview_path, buf) self.preview = preview_path self.preview_width = img.width self.preview_height = img.height self.save()
def save_temp_profile_image_from_base64String(imageString, user): INCORRECT_PADDING_EXCEPTION = "Incorrect padding" try: if not os.path.exists(settings.TEMP): os.mkdir(settings.TEMP) if not os.path.exists(settings.TEMP + "/" + str(user.pk)): os.mkdir(settings.TEMP + "/" + str(user.pk)) url = os.path.join(settings.TEMP + "/" + str(user.pk), TEMP_PROFILE_IMAGE_NAME) storage = FileSystemStorage(location=url) image = base64.b64decode(imageString) with storage.open('', 'wb+') as destination: destination.write(image) destination.close() return url except Exception as e: # print("exception: " + str(e)) # workaround for an issue I found if str(e) == INCORRECT_PADDING_EXCEPTION: imageString += "=" * ((4 - len(imageString) % 4) % 4) return save_temp_profile_image_from_base64String(imageString, user) return None
def model_form_upload(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): #pdb.set_trace() #Python debugger form.save() filename = request.FILES['document'].name filename2 = 'new' + filename filepath = 'media/media/' + request.FILES['document'].name filepath2 = 'media/media/' + 'new' + request.FILES['document'].name subprocess.run(["sox", '-v 0.75', filepath, filepath2, "rate", "16000"]) fs = FileSystemStorage(location='media/media/') if fs.exists(filename): print(filename) try: readaudio = fs.open(filename2) except: return HttpResponse("<h2>Opening the file after using sox didn't work.</h2>") stuff = request.FILES['document'] json_out = post_upload(readaudio, filename2) if json_out == -1: return HttpResponse("<h2>The file sample rate does not adhere to Watson requirements.</h2>") json_outone = json_out[0] json_outtwo = json_out[1] json_outthree = json_out[2] #pdb.set_trace() return render(request, 'upload/home.html', { 'json_out': json_out, 'json_outone': json_out[0], 'json_outtwo': json_out[1], 'json_outthree': json_out[2], 'filename': filename, }) else: form = DocumentForm() return render(request, 'about/home.html', { 'form': form })
def upload(request): try: if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) file_str = [line.rstrip('\n') for line in fs.open(filename, 'r')] os.remove(os.path.join(settings.MEDIA_ROOT, str(filename))) status_dict = [] for file_line in file_str: user_str = file_line.split(',') try: if EMAIL_REGEX.match(user_str[2]): try: password = User.objects.make_random_password() user = User.objects.create_user( user_str[2], user_str[2], password) user.first_name = user_str[0] user.last_name = user_str[1] user.save() email = EmailMessage( 'Registration at shaurman', 'Registration at shaurman is complete. Your Username is {} Your password is {}' .format(user_str[2], password), to=[user_str[2]]) email.send() except IntegrityError: status_dict.append(user_str[2] + " user is exist") except IndexError: status_dict.append(file_line + " bad line format") if len(status_dict) > 0: return render(request, 'adm/upload.html', {'errors': status_dict}) return render(request, 'adm/upload.html', {'successes': "all users added successful"}) except MultiValueDictKeyError: pass return render(request, 'adm/upload.html')
def save(self): try: backgroundimage = self.validated_data['backgroundimage'] avatarimage = self.validated_data['avatarimage'] name = self.validated_data['name'] description = self.validated_data['description'] community = Communities( name=name, backgroundimage=backgroundimage, avatarimage=avatarimage, description=description, ) url = os.path.join(settings.TEMP, str(backgroundimage)) storage = FileSystemStorage(location=url) with storage.open('', 'wb+') as destination: for chunk in backgroundimage.chunks(): destination.write(chunk) destination.close() if not is_image_size_valid(url, IMAGE_SIZE_MAX_BYTES): os.remove(url) raise serializers.ValidationError( {"response": "That image is too large. Images must be less than 3 MB. Try a different image."}) if not is_image_aspect_ratio_valid(url): os.remove(url) raise serializers.ValidationError( {"response": "Image height must not exceed image width. Try a different image."}) os.remove(url) community.save() return community except KeyError: raise serializers.ValidationError( {"response": "You must have name, backgroundimage, avatarimage and description for community."})
def download(request): html_email = get_template('newsletter/newsletter.html') cnt = { 'news1': News.objects.all(), 'release1': Release.objects.all(), 'future1': Future.objects.all(), 'Installs1': Installs.objects.all(), 'date': DATE, } # renders html email with the given context html_cont = html_email.render(cnt) # optimizes the email by removing spaces and comments html_cont = htmlmin.minify(html_cont, remove_all_empty_space=True, remove_comments=True) HCTI_API_ENDPOINT = "https://hcti.io/v1/image" HCTI_API_USER_ID = '374023f3-5b29-4508-b7eb-d265edf16713' HCTI_API_KEY = '27c9a5d8-2687-40ca-8584-de2237f52003' data = {'html': html_cont} image = requests.post(url=HCTI_API_ENDPOINT, data=data, auth=(HCTI_API_USER_ID, HCTI_API_KEY)) url = image.json()['url'] download = requests.get(url) open("newsletter.png", 'wb').write(download.content) fs = FileSystemStorage() filename = 'newsletter.png' with fs.open(filename) as pdf: response = HttpResponse(pdf, content_type='application/png') response[ 'Content-Disposition'] = 'attachment; filename="newsletter.png"' return response
def index(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) img = fs.open(filename) img = tf.keras.preprocessing.image.load_img(img, target_size=(224, 224)) img = tf.keras.preprocessing.image.img_to_array(img) img = np.expand_dims(img, axis=0) img = tf.keras.applications.inception_v3.preprocess_input(img) try: model = load_model() prediction = tf.keras.applications.inception_v3.decode_predictions( model.predict(img))[0][0][1] except Exception as e: prediction = "# Something went worng #\n" + e return render(request, 'ImageRecognition/index.html', { 'uploaded_file_url': uploaded_file_url, 'prediction': prediction }) return render(request, 'ImageRecognition/index.html')
def pdf_view(request, pdf_filename, json_filename): fs = FileSystemStorage() if fs.exists(pdf_filename): if fs.exists(json_filename): z = zipstream.ZipFile() z.write(pdf_filename, 'safe-output-compressed.pdf') z.write(json_filename, 'virustotal-output.json') zip_filename = 'test.zip' with open(zip_filename, 'wb') as f: for data in z: f.write(data) response = HttpResponse(open(zip_filename, 'rb'), content_type='application/zip') return response else: with fs.open(pdf_filename) as pdf: response = HttpResponse(pdf, content_type='application/pdf') response[ 'Content-Disposition'] = 'attachment; filename="safe.pdf"' return response else: return HttpResponseNotFound('Not Found!!!')
def editor(request): context = {} if request.method == 'POST' and "update" in request.POST: xls_data = get_form_data_from_request(request) context['items'] = xls_data context['message'] = "Поля обновлены." elif request.method == 'POST' and 'download' in request.POST: xls_data = get_form_data_from_request(request) request.session['xls_data'] = xls_data return download(request) else: fs = FileSystemStorage() file = fs.open(request.session['filepath'], mode='rb') if file: context['items'] = load_data_from_xlsx_file(file) else: context['error'] = 'No xls_file uploaded. Upload xls_file first.' return render(request, 'excelprocesser/editor.html', context)
def infoSisbenPdf(request): fi = request.GET['fi'] ff = request.GET['ff'] beneficiarios = Carnet.objects.filter(fechaExpide__range=[fi, ff]) fin = parse_date(fi) ffn = parse_date(ff) tl = beneficiarios.count html_string = render_to_string('infoSisbenPdf.html', { 'beneficiarios': beneficiarios, 'fi': fin, 'ff': ffn, 'tl': tl }) html = HTML(string=html_string) html.write_pdf(target='/tmp/infoSisben.pdf') fs = FileSystemStorage('/tmp') with fs.open('infoSisben.pdf') as pdf: response = HttpResponse(pdf, content_type='application/pdf') response['Content-Disposition'] = 'filename="infoSisben.pdf"'.format( "order.id") return response
def list(self, request, *args, **kwargs): queryset = self.filter_queryset( self.get_queryset()).filter(is_active=True) start_date = self.request.query_params.get('date__gte')[:10] context = { 'client': self.request.profile.client, 'invoices': queryset, 'start_date': start_date } html_string = render_to_string(self.pdf_template, context) html = HTML(string=html_string) html.write_pdf( target='/tmp/mypdf.pdf', stylesheets=[CSS(string='@page { size: A4; margin: 0.5cm }')]) fs = FileSystemStorage('/tmp') filename = 'estados de cuenta.pdf' with fs.open('mypdf.pdf') as pdf: response = HttpResponse(pdf, content_type='application/pdf') response[ 'Content-Disposition'] = f'attachment; filename="{filename}"' return response
def external(request): video = request.FILES['video'] print("video is ", video) fs = FileSystemStorage() filename = fs.save(video.name, video) fileurl = fs.open(filename) templateurl = fs.url(filename) print("file raw url", filename) print("file full url", fileurl) print("template url", templateurl) video = run([ sys.executable, 'C://Users//ishit//Downloads//sih2020_model.py', str(fileurl), str(filename) ], shell=False, stdout=PIPE, encoding='utf-8') print(video.stdout) return render(request, 'home.html', { 'raw_url': templateurl, 'edit_url': video.stdout })
def get(self, request, *args, **kwargs): text = [ 'Hello', 'World', 'Foo', 'Bar', ] html_string = render_to_string('core/weasyprint/base_pdf.html', {'text': text}) html = HTML(string=html_string) html.write_pdf(target='/tmp/report2.pdf') # Django built-in fs = FileSystemStorage('/tmp/') with fs.open('report2.pdf') as pdf: response = HttpResponse(pdf, content_type='application/pdf') # inline: make file to be open as to print response['Content-Disposition'] = 'filename="report2.pdf"' return response
def checkout(request): context_data = {_TITLE_KEY: "Checkout", "cart_size": getNumOfItemsInCart()} cart_file = FileSystemStorage(location=MEDIA_PATH) cart = {} item_sum = 0 with cart_file.open("shopping_info.json", "r") as f: try: cart = json.load(f) except: cart = {} context_data["cart"] = [] for key in cart["cart"]: item = cart["cart"][key] db_item = ShopItem.objects.filter(image=item["name"])[0] item_sum += float(db_item.price) * float(item["amount"]) context_data["cart"].append(generateItemInfo(db_item)) context_data["total"] = round(item_sum, 2) recordUrl(redirect("shop-checkout").url) return render(request, 'shopcenter/checkout.html', context_data)
def gerar_pdf_descartes(request): cabra = request.POST.get('cabra') inicio = request.POST.get('inicio') fim = request.POST.get('fim') if (cabra != '0'): producao = Producao.objects.raw( "select * from animais_producao" + " inner join animais_animal on animais_producao.id_cabra_id = animais_animal.id" + " where animais_producao.id_cabra_id = " + animal + " and animais_producao.data_producao between '" + inicio + "' and '" + fim + "'" + " and animais_producao.descarte_producao = 1" " order by animais_producao.data_producao desc;") else: producao = Producao.objects.raw( "select * from animais_producao" + " inner join animais_animal on animais_producao.id_cabra_id = animais_animal.id" + " where animais_producao.data_producao between '" + inicio + "' and '" + fim + "'" + " and animais_producao.descarte_producao = 1" " order by animais_producao.data_producao desc;") dados = {'producao': producao, 'inicio': inicio, 'fim': fim} html_string = render_to_string('relatorios/tabela_descarte.html', dados) html = HTML(string=html_string) html.write_pdf(target='/tmp/descarte.pdf') fs = FileSystemStorage('/tmp') with fs.open('descarte.pdf') as pdf: response = HttpResponse(pdf, content_type='application/pdf') response['Content-Disposition'] = 'attachment; filename="descarte.pdf"' return response return response
def gerar_pdf_medicacoes(request): animal = request.POST.get('animal') inicio = request.POST.get('inicio') fim = request.POST.get('fim') if (animal != '0'): medicacoes = Medicacao.objects.raw( "select * from animais_medicacao" + " inner join animais_tipomedicacao on animais_tipomedicacao.id = animais_medicacao.medicacao_id" + " where animais_medicacao.id_animal_id=" + animal + " and animais_medicacao.data_medicacao between '" + inicio + "' and '" + fim + "'" " order by animais_medicacao.id desc;") else: medicacoes = Medicacao.objects.raw( "select * from animais_medicacao" + " inner join animais_tipomedicacao on animais_tipomedicacao.id = animais_medicacao.medicacao_id" + " where animais_medicacao.data_medicacao between '" + inicio + "' and '" + fim + "'" " order by animais_medicacao.id desc;") dados = {'medicacoes': medicacoes, 'inicio': inicio, 'fim': fim} html_string = render_to_string('relatorios/tabela_medicacao.html', dados) html = HTML(string=html_string) html.write_pdf(target='/tmp/medicacoes.pdf') fs = FileSystemStorage('/tmp') with fs.open('medicacoes.pdf') as pdf: response = HttpResponse(pdf, content_type='application/pdf') response[ 'Content-Disposition'] = 'attachment; filename="medicacoes.pdf"' return response return response
class DefaultStorageManager(StorageManagerInterface): def __init__(self): self._fsm = FileSystemStorage() def _get_concrete_manager(self): return DefaultStorageManager() def delete(self, name): return self._fsm.delete(name) def exists(self, name): return self._fsm.exists(name) def listdir(self, path): return self._fsm.listdir(path) def open(self, name, mode='rb'): try: return self._fsm.open(name, mode=mode) except SuspiciousFileOperation: return open(name, mode=mode) def path(self, name): return self._fsm.path(name) def save(self, name, content, max_length=None): return self._fsm.save(name, content, max_length=max_length) def size(self, name): return self._fsm.size(name) def url(self, name): return self._fsm.url(name) def generate_filename(self, filename): return self._fsm.generate_filename(filename)
def validate(self, blog_post): try: title = blog_post['title'] if len(title) < MIN_TITLE_LENGTH: raise serializers.ValidationError( {"response": "Enter a title longer than " + str(MIN_TITLE_LENGTH) + " characters."}) body = blog_post['body'] if len(body) < MIN_BODY_LENGTH: raise serializers.ValidationError( {"response": "Enter a body longer than " + str(MIN_BODY_LENGTH) + " characters."}) image = blog_post['image'] url = os.path.join(settings.TEMP, str(image)) storage = FileSystemStorage(location=url) with storage.open('', 'wb+') as destination: for chunk in image.chunks(): destination.write(chunk) destination.close() # Check image size if not is_image_size_valid(url, IMAGE_SIZE_MAX_BYTES): os.remove(url) raise serializers.ValidationError( {"response": "That image is too large. Images must be less than 2 MB. Try a different image."}) # Check image aspect ratio if not is_image_aspect_ratio_valid(url): os.remove(url) raise serializers.ValidationError( {"response": "Image height must not exceed image width. Try a different image."}) os.remove(url) except KeyError: pass return blog_post
def generate(request, commande_ref): commande = get_object_or_404(Commandes, numero_commande=commande_ref) try: facture = Facture.objects.get(commande=commande) except Facture.DoesNotExist: facture = Facture.objects.create(commande=commande, status="Payé") societe = get_object_or_404(Societe, id=1) filename = 'facture_' + commande_ref + '.pdf' html_string = render_to_string('commandes/facture.html', { 'facture': facture, 'societe': societe }) html = HTML(string=html_string) html.write_pdf(target='/tmp/' + filename) fs = FileSystemStorage('/tmp') with fs.open(filename) as pdf: response = HttpResponse(pdf, content_type='application/pdf') #response['Content-Disposition'] = 'attachment; filename="mypdf.pdf"' return response return response
class FileLikeObjectTestCase(LiveServerBase): """ Test file-like objects (#15644). """ def setUp(self): self.temp_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(location=self.temp_dir) def tearDown(self): shutil.rmtree(self.temp_dir) def test_urllib2_urlopen(self): """ Test the File storage API with a file like object coming from urllib2.urlopen() """ file_like_object = self.urlopen('/example_view/') f = File(file_like_object) stored_filename = self.storage.save("remote_file.html", f) stored_file = self.storage.open(stored_filename) remote_file = self.urlopen('/example_view/') self.assertEqual(stored_file.read(), remote_file.read())
def csv_json(request, pk): obj = FileData.objects.get(pk=pk) fs = FileSystemStorage() csv = fs.open(obj.file_title) df = pd.read_csv(csv) df = df.dropna(axis='columns') print(df.values.tolist()) dataset = df.values.tolist() label = [x[0] for x in dataset] print(dataset) data = [[x[1], x[2]] for x in dataset] data = np.array(data).transpose().tolist() print(data) return JsonResponse({ 'lables': label, 'dataset': [{ "label": list(df), 'data': data, 'colors': colors[:len(data[0])] }] })
def test1(request): inp = request.FILES['myfile'] inp1 = request.POST.get('mykwrd') fs = FileSystemStorage() filename = fs.save(inp.name, inp) fileurl = fs.open(filename) templateurl = fs.url(filename) print(filename) print('file raw url', filename) print('file full url', fileurl) print('template url is ', templateurl) inp = run([ sys.executable, 'C:\\Users\\DIVESH\\projects\\Essay checker\\account\\check1.py', str(fileurl), str(filename), inp1 ], shell=False, stdout=PIPE) return render(request, 'index.html', { 'raw_url': templateurl, 'edit_url': inp.stdout })
def put_article(request): if 'pk' in request.POST: article = Article.objects.get(pk=request.POST['pk']) else: article = Article.objects.create() photoFile = request.FILES.get('photo', None) if photoFile: im = Image.open(photoFile) width, height = im.size maxDimension = MAX_IMAGE_SIZE scaling = min(maxDimension / width, maxDimension / height) scaling = maxDimension / width if scaling < 1.0: im.thumbnail((scaling * width, scaling * height), Image.ANTIALIAS) fs = FileSystemStorage() with fs.open(photoFile.name, 'wb') as pf: im.save(pf) article.photo = urllib.unquote(fs.url(photoFile.name)) for key in request.POST.keys(): if key != 'pk': setattr(article, key, request.POST[key]) article.save() return HttpResponse(json.dumps(article.forJSON_V2()))
def form_valid(self, form): invoice_number = form.cleaned_data['invoice_number'] seller_company_name = form.cleaned_data['seller_company_name'] data = { 'invoice_number': invoice_number, 'date': form.cleaned_data['date'], 'seller_company_name': seller_company_name, 'address': form.cleaned_data['address'], 'inn_kpp_seller': form.cleaned_data['inn_kpp_seller'], 'shipper_address': form.cleaned_data['shipper_address'] } html_string = render_to_string('documents/invoice_template.html', data) html = HTML(string=html_string) html.write_pdf(target='/tmp/{}_{}.pdf'.format(invoice_number, seller_company_name)) fs = FileSystemStorage('/tmp') with fs.open('{}_{}.pdf'.format(invoice_number, seller_company_name)) as pdf: response = HttpResponse(pdf, content_type='application/pdf') response[ 'Content-Disposition'] = 'attachment; filename="{}_{}.pdf"'.format( invoice_number, seller_company_name) return response return super().form_valid(form)
def cart(request): cart_file = FileSystemStorage(location=MEDIA_PATH) cart = None context = { _TITLE_KEY: "Cart", "cart_size": getNumOfItemsInCart(), "cart": [], "static": STATIC_PATH, } with cart_file.open("shopping_info.json", "r") as f: try: cart = json.load(f) except: cart = {} for key in cart["cart"]: item = ShopItem.objects.filter(image=cart["cart"][key]["name"])[0] amt = cart["cart"][key]["amount"] context["cart"].append({"amt": amt, "item": generateItemInfo(item)}) recordUrl(redirect("shop-cart").url) return render(request, 'shopcenter/cart.html', context)
def genInvoiceStock(request): print "Enter to Generate Invoic Stock" data = {} try: if request.method == 'POST': pdfName = generateInvoiceHandler(request) fs = FileSystemStorage("tradeStockControlApp/static/invoice") with fs.open(pdfName) as pdf: response = HttpResponse(pdf, content_type='application/pdf') response[ 'Content-Disposition'] = 'attachment; filename="' + pdfName + '"' response['PDF-NAME'] = pdfName response.write(pdf) return response except Exception as e: print e data['message'] = "Server: Generate Invoice Fail, {}".format(e) return JsonResponse(data, status=400) data['message'] = "Server: Generate Invoice Fail." return JsonResponse(data, status=400)
def analysis_pdf(request, id): course_obj = Course.objects.filter(id=id).first() questions = Question.objects.filter(course=course_obj) analysis = [] for question in questions: average = len( Response.objects.filter(question=question, answer='Average')) high = len(Response.objects.filter(question=question, answer='High')) low = len(Response.objects.filter(question=question, answer='Low')) total = average + high + low analysis.append((average, high, low, question, total)) html_string = render_to_string('teacher/analysis.html', { 'analysis': analysis, 'course': course_obj }) html = HTML(string=html_string, base_url=request.build_absolute_uri()) fs = FileSystemStorage('./') print(fs.location) html.write_pdf(target='./mypdf.pdf') with fs.open('mypdf.pdf') as pdf: response = HttpResponse(pdf, content_type='application/pdf') response['Content-Disposition'] = 'attachment; filename="mypdf.pdf"' return response return response
class ExecutableCodeCheck(BaseSecurityCheck): name = _("Checking that uploaded files won't be executed by the server") desc = _('A misconfiguration in the web server can cause files attached ' 'to review requests to be executed as code. The file types ' 'checked in this test are: .html, .htm, .shtml, .php, .php3, ' '.php4, .php5, .phps, .asp, .pl, .py, .fcgi, .cgi, .phtml, ' '.pht, .jsp, .sh, and .rb.') fix_info = _('For instructions on how to fix this problem, please visit ' '<a href="http://support.beanbaginc.com/support/solutions/' 'articles/110173-securing-file-attachments">' 'http://support.beanbaginc.com/support/solutions/articles/' '110173-securing-file-attachments</a>') def __init__(self): loc = os.path.join(settings.MEDIA_ROOT, 'uploaded', 'files') self.storage = FileSystemStorage(location=loc) self.directory = settings.MEDIA_URL + 'uploaded/files/' self.file_checks = [ ( ['.php', '.php3', '.php4', '.php5', '.phps', '.phtml', '.phtm'], '<?php echo "Hello, World!"; ?>' ), ( ['.pl', '.py'], 'print "Hello, World!"' ), ( ['.html', '.htm', '.shtml', '.pht'], ('<HTML>\n' '<HEAD>\n' '<TITLE>Hello, world!</TITLE>\n' '</HEAD>\n' '<BODY>\n' '<H1>Hello, world!</H1>\n' '<!--#echo var="LAST_MODIFIED" -->\n' '<!--#exec cmd="echo HI!" -->\n' '</BODY>\n' '</HTML>') ), ( ['.jsp'], '<%= new String("Hello!") %>' ), ( ['.asp'], '<%="Hello World!"%>' ), ( ['.fcgi', '.cgi', '.sh'], ('#!/bin/sh\n' 'echo "Hello World!"') ), ( ['.rb'], 'puts "Hello world!"' ) ] def setUp(self): if self._using_default_storage(): for i, file_check in enumerate(self.file_checks): extensions_list, content = file_check bad_extensions = [] for ext in extensions_list: try: self.storage.save('exec_check' + ext, ContentFile(content)) except OSError: # Some web server configurations prevent even saving # files with certain extensions. In this case, things # will definitely succeed. bad_extensions.append(ext) # Filter out any extensions that we failed to save, because we # don't need to check that they downloaded properly. extensions_list = [ext for ext in extensions_list if ext not in bad_extensions] self.file_checks[i] = extensions_list, content def execute(self): error_msg = '' ext_result = True final_result = True failed_exts = [] if self._using_default_storage(): for extensions_list, content in self.file_checks: for ext in extensions_list: try: ext_result = self.download_and_compare( 'exec_check' + ext) if final_result and not ext_result: final_result = False except Exception as e: return (False, _('Uncaught exception during test: %s') % e) if not ext_result: failed_exts.append(ext) if not final_result: error_msg = _( ngettext( 'The web server incorrectly executed these file types: %s', 'The web server incorrectly executed this file type: %s', len(failed_exts)) % ', '.join(failed_exts)) return final_result, error_msg def tearDown(self): if self._using_default_storage(): for extensions_list, content in self.file_checks: for ext in extensions_list: self.storage.delete('exec_check' + ext) def download_and_compare(self, to_download): data = urlopen(_get_url(self.directory) + to_download).read() with self.storage.open(to_download, 'r') as f: return data == f.read() def _using_default_storage(self): return (settings.DEFAULT_FILE_STORAGE == 'django.core.files.storage.FileSystemStorage')
class RemoteFinder(BaseFinder): def __init__(self): self.always_verify = getattr(settings, "REMOTE_FINDER_ALWAYS_VERIFY", False) self.cache_dir = getattr(settings, "REMOTE_FINDER_CACHE_DIR", None) if not self.cache_dir: raise ImproperlyConfigured("settings.REMOTE_FINDER_CACHE_DIR must point to a cache directory.") self.storage = FileSystemStorage(self.cache_dir) try: resources_setting = settings.REMOTE_FINDER_RESOURCES except AttributeError: logger.warning("RemoteFinder is enabled, but settings.REMOTE_FINDER_RESOURCES is not defined.") resources_setting = () if not isinstance(resources_setting, (list, tuple)): raise ImproperlyConfigured("settings.REMOTE_FINDER_RESOURCES must be a list or tuple") resources = {} for resource in resources_setting: try: path, url, cksm = resource except ValueError: raise ImproperlyConfigured("Each item in settings.REMOTE_FINDER_RESOURCES must be a tuple of three elements (path, url, cksm).") try: hash_type, expected_hexdigest = cksm.split(':') except ValueError: raise ImproperlyConfigured("RemoteFinder checksum `%s` is not in `hash_type:hexdigest` format." % cksm) try: hash_func = hash_func_map[hash_type] except KeyError: raise ImproperlyConfigured("RemoteFinder: hash type `%s` unknown" % hash_type) try: expected_digest = bytearray.fromhex(expected_hexdigest) except ValueError: raise ImproperlyConfigured("Cannot parse hex string in settings.REMOTE_FINDER_RESOURCES: `%s`" % expected_hexdigest) if len(expected_digest) != hash_func().digest_size: raise ImproperlyConfigured("settings.REMOTE_FINDER_RESOURCES: %s digest expected %d bytes but %d provided: `%s`" % (hash_type, hash_func().digest_size, len(expected_digest), expected_hexdigest)) resources[path] = _ResourceInfo(url, hash_func, expected_digest) self.resources = resources def find(self, path, all=False): try: resource_info = self.resources[path] except KeyError: return [] self.fetch(path, resource_info) match = self.storage.path(path) if all: return [match] else: return match def fetch(self, path, resource_info): if self.storage.exists(path): # check to see if the hash has already been verified in the # lifetime of this process if resource_info.hash_verified and not self.always_verify: return # verify the hash f = self.storage.open(path) try: content = f.read() finally: f.close() digest = resource_info.hash_func(content).digest() if digest == resource_info.expected_digest: resource_info.hash_verified = True return # hash verification failed, so delete it from storage and # re-download the file logger.info("Hash verification failed, so deleting %s from storage", path) # The following line does /not/ raise an exception if the file is # already deleted, which is desirable for us as it prevents an # error in the case of a race condition. self.storage.delete(path) # download the file logger.info("Downloading %s", resource_info.url) f = urlopen(resource_info.url) try: content = f.read() finally: f.close() # check its hash digest = resource_info.hash_func(content).digest() if digest != resource_info.expected_digest: raise RuntimeError("Digest for %s does not match expected value given in settings.REMOTE_FINDER_RESOURCES", resource_info.url) # save it name = self.storage.save(path, ContentFile(content)) if name == path: resource_info.hash_verified = True else: logger.warning("Save failed: %r != %r", name, path) def list(self, ignore_patterns): for path, resource_info in self.resources.items(): if matches_patterns(path, ignore_patterns): continue self.fetch(path, resource_info) yield path, self.storage
class WebcamStorage: PICTURE_DIR = 'pics' PREDICTION_DIR = 'predictions' def __init__(self, location): self.fs = FileSystemStorage(location) def dataset_path(self, webcam_id): return self.fs.path(webcam_id + '.h5') def _image_path(self, webcam_id, img_dir, timestamp=None): rel_path = os.path.join(webcam_id, img_dir) if timestamp is not None: path_format = settings.PICTURE_PATH.replace('%t', str(timestamp)) pic_date = datetime.fromtimestamp(timestamp) pic_path = pic_date.strftime(path_format) rel_path = os.path.join(rel_path, pic_path) return rel_path def picture_path(self, webcam_id, timestamp=None): """ Return the path of a picture from the given webcam and taken at given `timestamp`. If `timestamp` is None, return the of the directory. """ return self._image_path(webcam_id, self.PICTURE_DIR, timestamp) def prediction_path(self, webcam_id, params_name, timestamp=None): """ Return the path of a prediction image for the given webcam, the prediction parameters named `params_name` and made at `timestamp`. If `timestamp` is None, return the path of the directory. """ img_dir = os.path.join(self.PREDICTION_DIR, params_name) return self._image_path(webcam_id, img_dir, timestamp) def get_pixels(self, img, webcam_id): """ Return the pixels data of an image, as given by `ImageSet.pixels_from_file()`. Parameters ---------- img : int or str If a number, it is assumed to be a timestamp of when the picture was taken by the webcam. It will be read from the images set. If a string, it is assumed to be a path, it will be read from the file at this path. webcam_id : str The id of the webcam that took the picture """ with self.get_dataset(webcam_id) as dataset: if isinstance(img, Number): img_dict = dataset.imgset.get_image(img, False) return img_dict['pixels'] else: if not os.path.isabs(img): img = self.fs.path(img) return dataset.imgset.pixels_from_file(img) def add_webcam(self, webcam_id): """ Create the required files and directories for a new webcam """ logger.info("Creating webcam %s on file system", webcam_id) # create pytables files hdf5_path = self.dataset_path(webcam_id) w, h = settings.WEBCAM_SIZE img_shape = h, w, 3 w, h = settings.DEFAULT_FEATURES_SIZE feat_shape = h, w, 3 with ImageSet.create(hdf5_path, img_shape) as imageset: extractor = RawFeatures(feat_shape, img_shape) imageset.add_feature_set(extractor) DataSet.create(imageset).close() # create directories for pictures pics_path = self.fs.path(self.picture_path(webcam_id)) os.makedirs(pics_path) def delete_webcam(self, webcam_id): """ Delete the files and directories related to a webcam """ logger.info("Deleting webcam %s from file system", webcam_id) hdf5_path = self.dataset_path(webcam_id) os.remove(hdf5_path) shutil.rmtree(self.fs.path(webcam_id)) def add_feature_set(self, feature_set_model): feat_type = feature_set_model.extract_type webcam_id = feature_set_model.webcam.webcam_id if feat_type not in features_extractors: raise ValueError("No features extractor named %s" % feat_type) def task(webcam_id, feat_type): with self.open_dataset(webcam_id) as dataset: if feat_type == 'raw': img_shape = dataset.imgset.img_shape w, h = settings.DEFAULT_FEATURES_SIZE feat_shape = h, w, 3 extractor = RawFeatures(feat_shape, img_shape) elif feat_type == 'pca': logger.info("Starting computation of a PCM model") sample = dataset.imgset.sample() extractor = PCAFeatures.create(sample) else: # rbm logger.info("Starting computation of a RBM model") sample = dataset.imgset.sample() extractor = RBMFeatures.create(sample) if extractor.name in dataset.imgset.feature_sets: raise ValueError("The feature set %s already exists" % extractor.name) logger.info("Adding a new set of features %s to webcam %s", feat_type, webcam_id) dataset.imgset.add_feature_set(extractor) t = threading.Thread(target=task, args=[webcam_id, feat_type]) t.start() return feat_type # FIXME an event should send name to model def delete_feature_set(self, feature_set_model): logger.info("Deleting feature set %s", feature_set_model.name) webcam_id = feature_set_model.webcam.webcam_id with self.get_dataset(webcam_id) as dataset: dataset.imgset.remove_feature_set(feature_set_model.name) dataset.repack() def open_dataset(self, webcam_id): hdf5_path = self.dataset_path(webcam_id) return DataSet.open(hdf5_path) @contextmanager def get_dataset(self, webcam_id): """ Returns the dataset of `webcam_id`, cached for the current request. This method should be called only in a request thread. Returns a dummy context manager for backward compatibility """ cache = get_dataset_cache() yield cache[self.dataset_path(webcam_id)] def add_picture(self, webcam, timestamp, fp): """ Add a new picture associated to `webcam` Parameters ---------- webcam : models.Webcam instance timestamp : str or int The UNIX Epoch of when the picture was taken fp : str or `file-like` object The filename or pointer to the file of the image. If a file object, it must be accepted by Pillow """ # read and resize the image img = Image.open(fp) if img.size != settings.WEBCAM_SIZE: img = img.resize(settings.WEBCAM_SIZE) # store the image in file filepath = self.picture_path(webcam.webcam_id, timestamp) dirname = self.fs.path(os.path.dirname(filepath)) if not os.path.exists(dirname): os.makedirs(dirname) with self.fs.open(filepath, mode='wb') as fp_res: img.save(fp_res) # store the image in dataset with self.get_dataset(webcam.webcam_id) as dataset: # FIXME give directly PIL reference abspath = self.fs.path(filepath) img_dict = dataset.add_image(abspath) return img_dict def add_prediction(self, prediction): dirname = os.path.dirname(self.fs.path(prediction.path)) if not os.path.exists(dirname): os.makedirs(dirname) with self.fs.open(prediction.path, 'w') as fp: plt.imsave(fp, prediction.sci_bytes) def add_examples_set(self, params): """ Create the directories and pytables group for a set of examples """ def task(params): logger.info("Creating example set %s on file system", params.name) cam_id = params.webcam.webcam_id pred_path = self.fs.path(self.prediction_path(cam_id, params.name)) try: # Make sure the directory is empty if it exists shutil.rmtree(pred_path, ignore_errors=True) os.makedirs(pred_path) with self.open_dataset(cam_id) as dataset: dataset.make_set(params.name, params.intervals, params.features.name) except Exception: logger.exception("Error while creating set %s", params.name) else: logger.info("Done creating the set %s", params.name) t = threading.Thread(target=task, args=[params]) t.start() def delete_examples_set(self, params): """ Remove the directories and pytables group for a set of examples """ logger.info("Deleting example set %s from file system", params.name) cam_id = params.webcam.webcam_id pred_path = self.fs.path(self.prediction_path(cam_id, params.name)) try: shutil.rmtree(pred_path) with self.get_dataset(cam_id) as dataset: dataset.delete_set(params.name) dataset.repack() except Exception: logger.exception("Error while deleting set %s", params.name) else: logger.info("Done deleting the set %s", params.name)
class SitemapGenerator(object): def __init__(self): self.has_changes = False self.storage = FileSystemStorage(location=ROOT_DIR) self.sitemaps = ROOT_SITEMAP @staticmethod def get_hash(bytestream): return hashlib.md5(bytestream).digest() def read_hash(self, path): with self.storage.open(path) as f: result = self.get_hash(f.read()) return result def write(self): old_index_md5 = None baseurl = _url parts = [] # Collect all pages and write them. for section, site in self.sitemaps.items(): if callable(site): pages = site().paginator.num_pages else: pages = site.paginator.num_pages for page in range(1, pages + 1): filename = FILENAME_TEMPLATE % {'section': section, 'page': page} lastmod = self.write_page(site, page, filename) parts.append({ 'location': '%s%s' % (baseurl, filename), 'lastmod': lastmod }) path = os.path.join(ROOT_DIR, 'sitemap.xml') if self.storage.exists(path): old_index_md5 = self.read_hash(path) self.storage.delete(path) output = loader.render_to_string(INDEX_TEMPLATE, {'sitemaps': parts}) self.storage.save(path, ContentFile(output)) with self.storage.open(path) as sitemap_index: if self.get_hash(sitemap_index.read()) != old_index_md5: self.has_changes = True def write_page(self, site, page, filename): old_page_md5 = None urls = [] try: if callable(site): urls.extend(site().get_urls(page)) else: urls.extend(site.get_urls(page)) except EmptyPage: self.out("Page %s empty" % page) except PageNotAnInteger: self.out("No page '%s'" % page) lastmods = [lastmod for lastmod in [u.get('lastmod') for u in urls] if lastmod is not None] file_lastmod = max(lastmods) if len(lastmods) > 0 else None path = os.path.join(ROOT_DIR, filename) template = getattr(site, 'sitemap_template', 'sitemap.xml') if self.storage.exists(path): old_page_md5 = self.read_hash(path) self.storage.delete(path) output = smart_str(loader.render_to_string(template, {'urlset': urls})) self.storage.save(path, ContentFile(output)) with self.storage.open(path) as sitemap_page: if old_page_md5 != self.get_hash(sitemap_page.read()): self.has_changes = True
class SchizophreniaStorage(Storage): SYNCED = 'synced' VERIFIED = 'verified' def __init__(self, source=None, target=None): if not source: source = settings.SCHIZOPHRENIA_SOURCE_STORAGE if not target: target = settings.SCHIZOPHRENIA_TARGET_STORAGE self.source = get_storage(source)() self.target = get_storage(target)() self.downloads = FileSystemStorage(settings.SCHIZOPHRENIA_CACHE_DIR) def download(self, name): """Download file and return instance of local File""" if self.downloads.exists(name): return self.downloads.open(name) remote_file = self.source.open(name) self.downloads.save(name, remote_file) return self.downloads.open(name) def _get_file_cache_key(self, name): return 'schizophrenia_state_%s' % name def sync(self, name, verify=False): """Get file from source storage and upload to target""" logger.debug('Checking cached state ...') # Check cached state, return if synced cache_key = self._get_file_cache_key(name) cached_state = cache.get(cache_key, None) if cached_state == self.VERIFIED: logger.info('File was verified, skipping') cache.set(cache_key, self.VERIFIED) return True elif cached_state == self.SYNCED and not verify: logger.info('File was synced, skipping because verify=False') return True elif cached_state == self.SYNCED or self.target.exists(name): logger.info('File was synced, verifying ...') # If file exists on target, verify. Return if synced try: self.verify(name) except VerificationException: logger.info("File didn't verify, syncing again ...") cached_state = None cache.delete(cache_key) else: logger.info('File verified OK') cache.set(cache_key, self.VERIFIED) return True # Sync logger.debug('Downloading source file ...') local_file = self.download(name) logger.debug('Uploading to target storage ...') self.target.save(name, local_file) cache.set(cache_key, self.SYNCED) # Verify if verify: logger.debug('Verifying ...') try: self.verify(name) logger.debug('Verified OK') cache.set(cache_key, self.VERIFIED) except VerificationException: raise finally: self.downloads.delete(name) self.downloads.delete(name) return True def issynced(self, name): """Does the file exist on target storage?""" return self.target.exists(name) def verify(self, name): if self.target.open(name).read() != self.download(name).read(): raise VerificationException("Sync verification failed for '%s'" % name) return True def cleanup(self, force=False): """Cleanup empty directories that might be left over from downloads""" if force: shutil.rmtree(settings.SCHIZOPHRENIA_CACHE_DIR) else: self._remove_empty_folders(settings.SCHIZOPHRENIA_CACHE_DIR) def _remove_empty_folders(self, path): if not os.path.isdir(path): return # remove empty subfolders files = os.listdir(path) if len(files): for f in files: fullpath = os.path.join(path, f) if os.path.isdir(fullpath): self._remove_empty_folders(fullpath) # if folder empty, delete it files = os.listdir(path) if len(files) == 0: os.rmdir(path) def _open(self, name, *args, **kwargs): """Reads from target storage if verified, otherwise source""" storage = self._get_verified_storage(name) return storage.open(name, *args, **kwargs) def _storage_save(self, storage, name, content): """Save to storage""" try: name = storage.save(name, content) except TypeError: content = CompatibleFile(file=content) name = storage.save(name, content) return name def _save(self, name, content): """Saves both source and target but returns value of target storage""" source_name = self._storage_save(self.source, name, content) target_name = self._storage_save(self.target, name, content) if source_name != target_name: raise ValueError("Storages saved with different names") return target_name def get_available_name(self, name): source_name = self.source.get_available_name(name) target_name = self.target.get_available_name(source_name) if source_name != target_name: raise ValueError("Storages returned different values from " "get_available_name.") return target_name def get_valid_name(self, name): source_name = self.source.get_valid_name(name) target_name = self.target.get_valid_name(name) if source_name != target_name: raise ValueError("Storages returned different values from " "get_valid_name.") return target_name def _get_verified_storage(self, name): if cache.get(self._get_file_cache_key(name), None) == self.VERIFIED: storage = self.target else: storage = self.source return storage def delete(self, name): self.target.delete(name) return self.source.delete(name) def exists(self, name): storage = self._get_verified_storage(name) return storage.exists(name) def listdir(self, path): return self.source.listdir(path) def size(self, name): storage = self._get_verified_storage(name) return storage.size(name) def url(self, name): storage = self._get_verified_storage(name) return storage.url(name)
def open(self, name, mode="r"): return FileSystemStorage.open(self, name, mode)
class PeruimUtils(object): def __init__(self): self.fs = FileSystemStorage(location=settings.PERUIM_FILE_PATH) self.samples_dir = "droidbot_samples" self.report_dir = "peruim_user_reports" def get_perm_desc(self, report_path): """ get the permission description of given package :param report_path: report path of target app :return: """ available_reports = self.get_available_reports() if report_path not in available_reports: return None perm_desc_path = "%s/perm_desc.json" % (report_path) storage = self.fs.open(perm_desc_path, 'r') perm_desc = json.load(storage) return perm_desc def get_app_info(self, report_path): app_info = {} app_info['Package'] = report_path app_info['Name'] = 'Unknown' app_info['Category'] = 'Unknown' return app_info def get_state_image(self, report_path, state_tag): image_path = "%s/device_states/snapshot_%s.png" % (report_path, state_tag) # print image_path if self.fs.exists(image_path): image = self.fs.open(image_path, "rb").read() else: image = None return image def get_component_image(self, report_path, component_id): image_path = "%s/components/%s.jpg" % (report_path, component_id) # print image_path if self.fs.exists(image_path): image = self.fs.open(image_path, "rb").read() else: image = None return image def save_report(self, report): print report tag = self.get_time_tag() report_path = "%s/%s_%s.json" % (self.report_dir, tag, report['user_name']) report_file = self.fs.open(report_path, 'w') json.dump(report, report_file) def get_time_tag(self): from datetime import datetime tag = datetime.now().strftime("%Y-%m-%d_%H%M%S") return tag def get_available_reports(self): available_reports = [] base_path = self.fs.path('.') if self.fs.exists(self.samples_dir): import os for root, dirs, files in os.walk(base_path): for f in files: if f == "perm_desc.json": report_path = os.path.relpath(root, base_path) # print report_path available_reports.append(report_path) # available_packages = self.fs.listdir(self.samples_dir) # print available_packages # for package in available_packages[0]: # perm_desc_path = "%s/%s/perm_desc.json" % (self.samples_dir, package) # if self.fs.exists(perm_desc_path): # available_reports.append(package) return available_reports
class ExecutableCodeCheck(BaseSecurityCheck): """Check that uploaded files don't get executed. Web servers like to run code in files named things like .php or .shtml. This check makes sure that user-uploaded files do not get executed when loading them via their URL. """ name = _("Checking that uploaded files won't be executed by the server") desc = _('A misconfiguration in the web server can cause files attached ' 'to review requests to be executed as code. The file types ' 'checked in this test are: .html, .htm, .shtml, .php, .php3, ' '.php4, .php5, .phps, .asp, .pl, .py, .fcgi, .cgi, .phtml, ' '.phtm, .pht, .jsp, .sh, and .rb.') fix_info = _('For instructions on how to fix this problem, please visit ' '<a href="http://support.beanbaginc.com/support/solutions/' 'articles/110173-securing-file-attachments">' 'http://support.beanbaginc.com/support/solutions/articles/' '110173-securing-file-attachments</a>') def __init__(self): """Initialize the security check.""" loc = os.path.join(settings.MEDIA_ROOT, 'uploaded', 'files') self.storage = FileSystemStorage(location=loc) self.directory = settings.MEDIA_URL + 'uploaded/files/' self.file_checks = [ ( ['.php', '.php3', '.php4', '.php5', '.phps', '.phtml', '.phtm'], '<?php echo "Hello, World!"; ?>' ), ( ['.pl', '.py'], 'print "Hello, World!"' ), ( ['.html', '.htm', '.shtml', '.pht'], ('<HTML>\n' '<HEAD>\n' '<TITLE>Hello, world!</TITLE>\n' '</HEAD>\n' '<BODY>\n' '<H1>Hello, world!</H1>\n' '<!--#echo var="LAST_MODIFIED" -->\n' '<!--#exec cmd="echo HI!" -->\n' '</BODY>\n' '</HTML>') ), ( ['.jsp'], '<%= new String("Hello!") %>' ), ( ['.asp'], '<%="Hello World!"%>' ), ( ['.fcgi', '.cgi', '.sh'], ('#!/bin/sh\n' 'echo "Hello World!"') ), ( ['.rb'], 'puts "Hello world!"' ) ] def setUp(self): """Set up the security check. This will go through the various file extensions that we intend to check and create file attachments with the given content. """ if self._using_default_storage(): for i, file_check in enumerate(self.file_checks): extensions_list, content = file_check bad_extensions = [] for ext in extensions_list: try: self.storage.save('exec_check' + ext, ContentFile(content)) except OSError: # Some web server configurations prevent even saving # files with certain extensions. In this case, things # will definitely succeed. bad_extensions.append(ext) # Filter out any extensions that we failed to save, because we # don't need to check that they downloaded properly. extensions_list = [ext for ext in extensions_list if ext not in bad_extensions] self.file_checks[i] = extensions_list, content def execute(self): """Execute the security check. This will download each file that was created in ``setUp`` and check that the content matches what we expect. """ error_msg = '' ext_result = True final_result = True failed_exts = [] if self._using_default_storage(): for extensions_list, content in self.file_checks: for ext in extensions_list: try: ext_result = self.download_and_compare( 'exec_check' + ext) if final_result and not ext_result: final_result = False except Exception as e: return (False, _('Uncaught exception during test: %s') % e) if not ext_result: failed_exts.append(ext) if not final_result: error_msg = _( ngettext( 'The web server incorrectly executed these file types: %s', 'The web server incorrectly executed this file type: %s', len(failed_exts)) % ', '.join(failed_exts)) return final_result, error_msg def tearDown(self): """Tear down the security check. This will delete all of the files created in ``setUp``. """ if self._using_default_storage(): for extensions_list, content in self.file_checks: for ext in extensions_list: self.storage.delete('exec_check' + ext) def download_and_compare(self, to_download): """Download a file and compare the resulting response to the file. This makes sure that when we fetch a file via its URL, the returned contents are identical to the file contents. This returns True if the file contents match, and False otherwise. """ try: data = urlopen(build_server_url(self.directory, to_download)).read() except HTTPError as e: # An HTTP 403 is also an acceptable response if e.code == 403: return True else: raise e with self.storage.open(to_download, 'r') as f: return data == f.read() def _using_default_storage(self): """Return whether the server is using the built-in file storage.""" return (settings.DEFAULT_FILE_STORAGE == 'django.core.files.storage.FileSystemStorage')