def item_extra_kwargs(self, item): kwargs = { 'when': '%s %s ago' % ( item.when_prefix(), simpletimesince(item.when())) } if item.website_url: kwargs['website_url'] = iri_to_uri(item.website_url) if item.has_thumbnail: site = Site.objects.get_current() if item.thumbnail_url: kwargs['thumbnail'] = iri_to_uri(item.thumbnail_url) else: default_url = default_storage.url( item.get_resized_thumb_storage_path(375, 295)) if not (default_url.startswith('http://') or default_url.startswith('https://')): default_url = 'http://%s%s' % (site.domain, default_url) kwargs['thumbnail'] = default_url kwargs['thumbnails_resized'] = resized = {} for size in Video.THUMB_SIZES: url = default_storage.url( item.get_resized_thumb_storage_path(*size)) if not (url.startswith('http://') or url.startswith('http://')): url = 'http://%s%s' % (site.domain, url) resized[size] = url if item.embed_code: kwargs['embed_code'] = item.embed_code return kwargs
def check_availability(self): """ Perform check against Default Storage. """ try: name = default_storage.get_valid_name('Informer Storage') # Save data. content = ContentFile('File used by StorageInformer checking.') path = default_storage.save(name, content) # Check properties. default_storage.size(path) default_storage.url(path) default_storage.path(path) default_storage.get_accessed_time(path) default_storage.get_available_name(path) default_storage.get_created_time(path) default_storage.get_modified_time(path) default_storage.get_valid_name(path) # And remove file. default_storage.delete(path) storage = default_storage.__class__.__name__ except Exception as error: raise InformerException( f'An error occurred when trying to use your Storage: {error}') else: return True, f'Your {storage} is operational.'
def retrieve(self, request, *args, **kwargs): session_id = kwargs.get('pk') self.session = get_object_or_404(Session, id=session_id) path = self.gen_session_path() data = { 'type': 'guacamole' if self.session.protocol == 'rdp' else 'json', 'src': '', } if default_storage.exists(path): url = default_storage.url(path) data['src'] = url return Response(data) else: configs = settings.TERMINAL_REPLAY_STORAGE.items() if not configs: return HttpResponseNotFound() for name, config in configs: client = jms_storage.init(config) date = self.session.date_start.strftime('%Y-%m-%d') file_path = os.path.join(date, str(self.session.id) + '.replay.gz') target_path = default_storage.base_location + '/' + path if client and client.has_file(file_path) and \ client.download_file(file_path, target_path): url = default_storage.url(path) data['src'] = url return Response(data) return HttpResponseNotFound()
def retrieve(self, request, *args, **kwargs): session_id = kwargs.get('pk') self.session = get_object_or_404(Session, id=session_id) # 新版本和老版本的文件后缀不同 session_path = self.get_session_path() # 存在外部存储上的路径 local_path = self.get_local_path() local_path_v1 = self.get_local_path(version=1) # 去default storage中查找 for _local_path in (local_path, local_path_v1, session_path): if default_storage.exists(_local_path): url = default_storage.url(_local_path) return redirect(url) # 去定义的外部storage查找 configs = settings.TERMINAL_REPLAY_STORAGE configs = {k: v for k, v in configs.items() if v['TYPE'] != 'server'} if not configs: return HttpResponseNotFound() target_path = os.path.join(default_storage.base_location, local_path) # 保存到storage的路径 target_dir = os.path.dirname(target_path) if not os.path.isdir(target_dir): os.makedirs(target_dir, exist_ok=True) storage = jms_storage.get_multi_object_storage(configs) ok, err = storage.download(session_path, target_path) if not ok: logger.error("Failed download replay file: {}".format(err)) return HttpResponseNotFound() return redirect(default_storage.url(local_path))
def _mp_dict(mp, mp_fractions={}): data = { 'first_name': mp['first_name'], 'last_name': mp['last_name'], 'full_name': ' '.join([mp['first_name'], mp['last_name']]), 'slug': mp['slug'], 'url': reverse('mp_profile', kwargs={'mp_slug': mp['slug']}), 'photo': default_storage.url(mp['photo']), 'statement_count': int(mp['statement_count']), 'long_statement_count': mp['long_statement_count'], 'vote_percentage': round(mp['vote_percentage']), 'proposed_law_project_count': mp['proposed_law_project_count'], 'passed_law_project_count': mp['passed_law_project_count'], 'passed_law_project_ratio': round(mp['passed_law_project_ratio']), } fraction = mp_fractions.get(mp['pk']) if fraction: data.update({ 'fraction_name': fraction['name'], 'fraction_url': reverse('mp_fraction', kwargs={ 'fraction_slug': fraction['slug'] }), 'fraction_slug': fraction['slug'], 'fraction_logo_url': default_storage.url(fraction['logo']), }) return data
def retrieve(self, request, *args, **kwargs): session_id = kwargs.get('pk') self.session = get_object_or_404(Session, id=session_id) path = self.gen_session_path() if default_storage.exists(path): url = default_storage.url(path) return redirect(url) else: config = settings.TERMINAL_REPLAY_STORAGE configs = copy.deepcopy(config) for cfg in config: if config[cfg]['TYPE'] == 'server': configs.__delitem__(cfg) if not configs: return HttpResponseNotFound() date = self.session.date_start.strftime('%Y-%m-%d') file_path = os.path.join(date, str(self.session.id) + '.replay.gz') target_path = default_storage.base_location + '/' + path storage = jms_storage.get_multi_object_storage(configs) ok, err = storage.download(file_path, target_path) if ok: return redirect(default_storage.url(path)) else: logger.error("Failed download replay file: {}".format(err)) return HttpResponseNotFound()
def recent_photos(request): ''' returns all the images from the data base ''' imgs = [] for obj in Image_File.objects.filter(is_image=True).order_by("-date_created"): uploaded_url = default_storage.url(obj.upload.url) thumburl = default_storage.url(obj.thumbnail.url) imgs.append({'src': uploaded_url, 'thumb': thumburl, 'is_image': True}) return render(request, 'admin/browse.html', {'files': imgs})
def _unserialize(self, value: str, as_type: type) -> Any: if as_type is None and value is not None and value.startswith('file://'): as_type = File if as_type is not None and isinstance(value, as_type): return value elif value is None: return None elif as_type == int or as_type == float or as_type == decimal.Decimal: return as_type(value) elif as_type == dict or as_type == list: return json.loads(value) elif as_type == bool or value in ('True', 'False'): return value == 'True' elif as_type == File: try: fi = default_storage.open(value[7:], 'r') fi.url = default_storage.url(value[7:]) return fi except OSError: return False elif as_type == datetime: return dateutil.parser.parse(value) elif as_type == date: return dateutil.parser.parse(value).date() elif as_type == time: return dateutil.parser.parse(value).time() elif as_type == LazyI18nString and not isinstance(value, LazyI18nString): try: return LazyI18nString(json.loads(value)) except ValueError: return LazyI18nString(str(value)) elif as_type is not None and issubclass(as_type, Model): return as_type.objects.get(pk=value) return value
def redactor_upload(request, upload_to=None, form_class=ImageForm, response=lambda name, url: url): form = form_class(request.POST, request.FILES) if form.is_valid(): file_ = form.cleaned_data['file'] filename = "%s.%s" % (uuid.uuid4(), file_.name.split('.')[-1]) path = os.path.join(upload_to or UPLOAD_PATH, filename) real_path = default_storage.save(path, file_) if form_class == ImageForm: redim = get_thumbnailer(real_path).get_thumbnail({'size': (500, 375), 'crop': "scale"}).url images = [{"filelink": redim, "file": real_path }] return HttpResponse( json.dumps(images), mimetype="application/json" ) #return HttpResponse( # response( redim, # redim) # ) else: files = [{'filelink': default_storage.url(real_path)}] return HttpResponse( json.dumps(files), mimetype="application/json" ) return HttpResponse(status=403)
def handle(self, *args, **kwargs): media_folder = os.path.join(settings.MEDIA_URL, 'spirit', 'images') comments = Comment.objects.filter(comment__contains=media_folder).order_by('pk') for comment in comments: comment_text = comment.comment comment_html = comment.comment_html relative_file_urls = re.findall(r'\({}([^(]*)\)'.format(media_folder), comment_text) for relative_file_url in relative_file_urls: local_file_url = u"{}{}".format(media_folder, relative_file_url) full_file_name = u"{}{}".format(settings.BASE_DIR, local_file_url) if not os.path.exists(full_file_name): continue with open(full_file_name) as fp: raw_file_data = StringIO(fp.read()) file_name = os.path.basename(full_file_name) default_storage.save(file_name, raw_file_data) comment_image_url = default_storage.url(file_name) comment_text = comment_text.replace(local_file_url, comment_image_url) comment_html = comment_html.replace(local_file_url, comment_image_url) comment.comment_text = comment_text comment.comment_html = comment_html comment.save()
def upload_report(request): if request.method == 'POST': form = ReportForm(request.POST, request.FILES) if form.is_valid(): data = form.cleaned_data # instance = Report(reporter = data['reporter'], system = data['system'], province = data['province'], city = data['city'], report_file=request.FILES['report_file'], log_file=request.FILES['log_file']) # all_report_text = request.FILES['report_file'].read() # #抽取通过测试的数目和未通过的数目 # pattern = re.compile(r'"fail":\d+,"label":"All Tests","pass":\d+') # match = pattern.search(all_report_text) # match_str = match.group() # nums = re.findall(r'\d+', match_str) # fail_num = int(nums[0]) # pass_num = int(nums[1]) # instance.pass_num = pass_num # instance.total_num = fail_num + pass_num # instance.save() report_path, total_num, pass_num = handle_uploaded_file(request.FILES['zip'], data['system'], data['province'], data['reporter']) report_url = default_storage.url(report_path) instance = Report(reporter = data['reporter'], system = data['system'], province = data['province'], city = data['city'], total_num = total_num, pass_num = pass_num, report_path = report_url) instance.save() return HttpResponseRedirect('/success/') else: form = ReportForm() return render_to_response('upload.html', {'form': form})
def get_browser_and_captcha(username): """ Get the mechanize browser and captcha image: Note we have to store this in a global variable otherwise the captcha does not match :return: a tuple of two items, the first is the mechanize browser and the second is the base64 representation of the capture image """ br = mechanize.Browser() url = 'http://www.southernrailway.com/your-journey/customer-services/delay-repay/delay-repay-form' response = br.open(url) global GLOBAL_BROWSER print "\n\nSETTING GLOBAL BROWSER" GLOBAL_BROWSER[username] = br print GLOBAL_BROWSER[username] print soup = BeautifulSoup(response.get_data(), "html.parser") imgs = soup.find_all('img') img = None for imgage in imgs: if 'CAP' in str(imgage): img = imgage image_response = br.open_novisit(img['src']) im = Image.open(image_response) fh = storage.open('captcha.png', "w") im.save(fh, 'png') fh.close() response_data = base64.urlsafe_b64encode(response.read()) return response_data, storage.url('captcha.png')
def dump_project(self, user, project): mbuilder = MagicMailBuilder(template_mail_cls=InlineCSSTemplateMail) path = "exports/{}/{}-{}.json".format(project.pk, project.slug, self.request.id) try: content = ExportRenderer().render(project_to_dict(project), renderer_context={"indent": 4}) content = content.decode('utf-8') content = ContentFile(content) default_storage.save(path, content) url = default_storage.url(path) except Exception: ctx = { "user": user, "error_subject": _("Error generating project dump"), "error_message": _("Error generating project dump"), "project": project } email = mbuilder.export_error(user, ctx) email.send() logger.error('Error generating dump %s (by %s)', project.slug, user, exc_info=sys.exc_info()) return deletion_date = timezone.now() + datetime.timedelta(seconds=settings.EXPORTS_TTL) ctx = { "url": url, "project": project, "user": user, "deletion_date": deletion_date } email = mbuilder.dump_project(user, ctx) email.send()
def render(self, name, value, attrs=None): final_attrs = self.build_attrs(attrs) element_id = final_attrs.get("id") kwargs = { "upload_to": self.upload_to, "max_width": self.max_width, "max_height": self.max_height, "crop": self.crop, } upload_url = reverse("ajaximage", kwargs=kwargs) # NB convert to string and do not rely on value.url # value.url fails when rendering form with validation errors because form value is not a FieldFile # Use storage.url and file_path - works with FieldFile instances and string formdata file_path = str(value) if value else "" file_url = default_storage.url(file_path) if value else "" file_name = os.path.basename(file_url) output = HTML.format( upload_url=upload_url, file_url=file_url, file_name=file_name, file_path=file_path, element_id=element_id, name=name, ) return mark_safe(unicode(output))
def new_media_object(blog_id, username, password, media): """metaWeblog.newMediaObject(blog_id, username, password, media) => media structure""" authenticate(username, password) path = default_storage.save(os.path.join(UPLOAD_TO, media['name']), ContentFile(media['bits'].data)) return {'url': default_storage.url(path)}
def dump_project(self, user, project): mbuilder = MagicMailBuilder(template_mail_cls=InlineCSSTemplateMail) path = "exports/{}/{}-{}.json".format(project.pk, project.slug, self.request.id) storage_path = default_storage.path(path) try: url = default_storage.url(path) with default_storage.open(storage_path, mode="w") as outfile: render_project(project, outfile) except Exception: ctx = { "user": user, "error_subject": _("Error generating project dump"), "error_message": _("Error generating project dump"), "project": project } email = mbuilder.export_error(user, ctx) email.send() logger.error('Error generating dump %s (by %s)', project.slug, user, exc_info=sys.exc_info()) return deletion_date = timezone.now() + datetime.timedelta(seconds=settings.EXPORTS_TTL) ctx = { "url": url, "project": project, "user": user, "deletion_date": deletion_date } email = mbuilder.dump_project(user, ctx) email.send()
def _unserialize(self, value, as_type): if as_type is not None and isinstance(value, as_type): return value elif value is None: return None elif as_type == int or as_type == float or as_type == decimal.Decimal: return as_type(value) elif as_type == dict or as_type == list: return json.loads(value) elif as_type == bool or value in ('True', 'False'): return value == 'True' elif as_type == File: try: fi = default_storage.open(value[7:], 'r') fi.url = default_storage.url(value[7:]) return fi except OSError: return False elif as_type == datetime: return dateutil.parser.parse(value) elif as_type == date: return dateutil.parser.parse(value).date() elif as_type == time: return dateutil.parser.parse(value).time() elif as_type is not None and issubclass(as_type, Versionable): return as_type.objects.current.get(identity=value) elif as_type is not None and issubclass(as_type, Model): return as_type.objects.get(pk=value) return value
def render(self, name, data, attrs=None): from easy_thumbnails.files import get_thumbnailer from easy_thumbnails.files import Thumbnailer if attrs is None: attrs = {} if not self.form.preview_instance_required or self.instance is not None: images = self.form.get_images(self.instance) options = dict(size=(120, 120), crop=False) html = '<div class="adminboost-preview">' for image in images: thumbnail = get_thumbnailer(image.file).get_thumbnail(options) if isinstance(image.file, Thumbnailer): image_url = default_storage.url(force_unicode(image.file.name)) else: image_url = image.file.url html += '<div class="adminboost-preview-thumbnail">' \ '<a href="%(image_url)s" target="_blank">' \ '<img src="%(thumbnail_url)s"/></a></div>' % { 'image_url': image_url, 'thumbnail_url': thumbnail.url } help_text = self.form.get_preview_help_text(self.instance) if help_text is not None: html += '<p class="help">%s</p>' % force_unicode(help_text) html += '</div>' return mark_safe(unicode(html)) else: return u''
def retrieve(self, request, pk, *args, **kwargs): throttle = throttling.ImportDumpModeRateThrottle() if not throttle.allow_request(request, self): self.throttled(request, throttle.wait()) project = get_object_or_404(self.get_queryset(), pk=pk) self.check_permissions(request, 'export_project', project) dump_format = request.QUERY_PARAMS.get("dump_format", "plain") if settings.CELERY_ENABLED: task = tasks.dump_project.delay(request.user, project, dump_format) tasks.delete_project_dump.apply_async((project.pk, project.slug, task.id, dump_format), countdown=settings.EXPORTS_TTL) return response.Accepted({"export_id": task.id}) if dump_format == "gzip": path = "exports/{}/{}-{}.json.gz".format(project.pk, project.slug, uuid.uuid4().hex) storage_path = default_storage.path(path) with default_storage.open(storage_path, mode="wb") as outfile: services.render_project(project, gzip.GzipFile(fileobj=outfile)) else: path = "exports/{}/{}-{}.json".format(project.pk, project.slug, uuid.uuid4().hex) storage_path = default_storage.path(path) with default_storage.open(storage_path, mode="wb") as outfile: services.render_project(project, outfile) response_data = { "url": default_storage.url(path) } return response.Ok(response_data)
def upload_photos(request): ''' takes all the images coming from the redactor editor and stores it in the database and returns all the files''' if request.FILES.get("upload"): uploaded_file = request.FILES.get("upload") stored_image = Image_File.objects.create(upload=uploaded_file, is_image=True) size = (128, 128) file_name = uploaded_file.name thumb_file_name = 'thumb'+uploaded_file.name temp_file=open(file_name,'w') for i in uploaded_file.chunks(): temp_file.write(i) temp_file.close() im = Image.open(file_name) im.thumbnail(size) im.save(thumb_file_name) imdata = open(thumb_file_name) stored_image.thumbnail.save(thumb_file_name,fle(imdata) ) imdata.close() os.remove(file_name) os.remove(thumb_file_name) uploaded_url = default_storage.url(stored_image.upload.url) uploaded_url = '/'+uploaded_url return HttpResponse(""" <script type='text/javascript'> window.parent.CKEDITOR.tools.callFunction({0}, '{1}'); </script>""".format(request.GET['CKEditorFuncNum'], uploaded_url) )
def file_upload(request): if 'file' in request.FILES: the_file = request.FILES['file'] upload_to = getattr(settings, 'FROALA_UPLOAD_PATH', 'uploads/froala_editor/fies/') path = default_storage.save(os.path.join(upload_to, the_file.name), the_file) link = default_storage.url(path) return HttpResponse(json.dumps({'link': link}), content_type="application/json")
def render(self, name, value, attrs=None): final_attrs = self.build_attrs(attrs) element_id = final_attrs.get('id') kwargs = {'upload_to': self.upload_to, 'max_width': self.max_width, 'max_height': self.max_height, 'crop': self.crop} upload_url = reverse('ajaximage', kwargs=kwargs) # NB convert to string and do not rely on value.url # value.url fails when rendering form with validation errors because # form value is not a FieldFile. Use storage.url and file_path - works # with FieldFile instances and string formdata file_path = str(value) if value else '' file_url = default_storage.url(file_path) if value else '' file_name = os.path.basename(file_url) output = SafeUnicode(self.html.format(upload_url=upload_url, file_url=file_url, file_name=file_name, file_path=file_path, element_id=element_id, name=name)) return mark_safe(output)
def get_thumbnail_url(self, video): if isinstance(video, MetasearchVideo): return video.thumbnail_url thumbnail = None if video.has_thumbnail: thumbnail = video elif video.feed and video.feed.has_thumbnail: thumbnail = video.feed elif video.search and video.search.has_thumbnail: thumbnail = video.search if not thumbnail: return '/images/default_vid.gif' url = default_storage.url( thumbnail.get_resized_thumb_storage_path(*self.size)) if thumbnail._meta.get_latest_by: key = hex(hash(getattr(thumbnail, thumbnail._meta.get_latest_by)))[-8:] return '%s?%s' % (url, key) else: return url
def run_test(self, filename, content='Lorem ipsum dolar sit amet'): content = UnicodeContentFile(content) filename = default_storage.save(filename, content) self.assert_(default_storage.exists(filename)) self.assertEqual(default_storage.size(filename), content.size) now = datetime.utcnow() delta = timedelta(minutes=5) mtime = default_storage.getmtime(filename) self.assert_(mtime > mktime((now - delta).timetuple())) self.assert_(mtime < mktime((now + delta).timetuple())) file = default_storage.open(filename) self.assertEqual(file.size, content.size) fileurl = force_unicode(file).replace('\\', '/') fileurl = urlquote_plus(fileurl, '/') if fileurl.startswith('/'): fileurl = fileurl[1:] self.assertEqual( MEDIA_URL+fileurl, default_storage.url(filename) ) file.close() default_storage.delete(filename) self.assert_(not default_storage.exists(filename))
def attachment(request): #remap sirtrevors filename request.FILES['attachment'] = request.FILES['attachment[file]'] form = AttachmentForm(request.POST, request.FILES) if form.is_valid(): file_ = form.cleaned_data['attachment'] file_name, extension = os.path.splitext(file_.name) safe_name = '{0}{1}'.format(slugify(file_name), extension) name = os.path.join(settings.SIRTREVOR_UPLOAD_PATH, safe_name) data = {'type': file_.content_type} image_types = ['image/png', 'image/jpg', 'image/jpeg', 'image/pjpeg', 'image/gif'] if file_.content_type in image_types: if settings.SIRTREVOR_ATTACHMENT_PROCESSOR is not None: if callable(settings.SIRTREVOR_ATTACHMENT_PROCESSOR): processor = settings.SIRTREVOR_ATTACHMENT_PROCESSOR else: module, func = settings.SIRTREVOR_ATTACHMENT_PROCESSOR.rsplit('.', 1) processor = getattr(importlib.import_module(module), func) file_ = processor(file_) try: data['dimensions'] = Image.open(file_).size except: pass data['path'] = default_storage.save(name, file_) data['url'] = default_storage.url(data['path']) data['name'] = os.path.split(data['path'])[1] data['size'] = file_.size return HttpResponse(json.dumps({'file': data}), content_type = 'application/javascript; charset=utf8') else: return HttpResponse('Error')
def to_python(self, value): value = super(FileOrURLField, self).to_python(value) if self.to == None: return value elif self.to == 'file' and not isinstance(value, UploadedFile): try: resp = requests.get(value) except: raise ValidationError(self.url_fetch_error) if not (200 <= resp.status_code < 400): raise ValidationError(self.url_fetch_error) io = StringIO(unicode(resp.content)) io.seek(0) io.seek(os.SEEK_END) size = io.tell() io.seek(0) return InMemoryUploadedFile( io, None, posixpath.basename(value), resp.headers['content-type'], size, None) elif self.to == 'url' and isinstance(value, UploadedFile): path = default_storage.save( posixpath.join(self.upload_to, value.name), ContentFile(value.read())) if self.no_aws_qs: default_storage.querystring_auth = False return default_storage.url(path) return value
def contextprocessor(request): """ Adds data to all template contexts """ try: url = resolve(request.path_info) except Resolver404: return {} if url.namespace != 'presale': return {} ctx = { 'css_file': None } _html_head = [] _footer = [] if hasattr(request, 'event'): for receiver, response in html_head.send(request.event, request=request): _html_head.append(response) for receiver, response in footer_link.send(request.event, request=request): _footer.append(response) if request.event.settings.presale_css_file: ctx['css_file'] = default_storage.url(request.event.settings.presale_css_file) ctx['event_logo'] = request.event.settings.get('logo_image', as_type=str, default='')[7:] ctx['html_head'] = "".join(_html_head) ctx['footer'] = _footer ctx['site_url'] = settings.SITE_URL return ctx
def get_thumbnail_url(self, video, context): if video.pk is None: return video.thumbnail_url thumbnail = None if video.has_thumbnail: thumbnail = video elif video.feed and video.feed.has_thumbnail: thumbnail = video.feed elif video.search and video.search.has_thumbnail: thumbnail = video.search if not thumbnail: return settings.STATIC_URL + 'localtv/images/default_vid.gif' url = default_storage.url( thumbnail.get_resized_thumb_storage_path(*self.size)) if thumbnail._meta.get_latest_by: key = hex(hash(getattr(thumbnail, thumbnail._meta.get_latest_by)))[-8:] url = '%s?%s' % (url, key) if not self.absolute or url.startswith(('http://', 'https://')): # full URL, return it return url else: # add the domain if 'request' in context: request = context['request'] scheme = 'https' if request.is_secure() else 'http' else: scheme = 'http' domain = Site.objects.get_current().domain return '%s://%s%s' % (scheme, domain, url)
def upload(request): try: if request.method == 'POST': f = request.FILES['data'] pth = request.POST['path'] path = "file_browser/%s/%s" % (pth, f.name) if not default_storage.exists(path): path = default_storage.save(path, ContentFile(f.read())) url = default_storage.url(path) # type of file link_type = "link" try: # checking for image im = Image.open(default_storage.open(path)) im.verify() link_type = "image" except: pass r = { 'url': url, 'type': link_type, 'filename': os.path.splitext(os.path.basename(url))[0], } return HttpResponse(json.dumps(r), content_type="application/json") except BaseException as e: print e return HttpResponseBadRequest()
def image_ratio_ajax(request, upload_to=None, form_class=FileForm): """ Processes ajax post from imagescaler. """ form = form_class(request.POST, request.FILES) if form.is_valid(): uploaded_file = request.FILES['file'] if uploaded_file.content_type in IMAGE_TYPES: file_name, extension = os.path.splitext(uploaded_file.name) safe_name = '{0}{1}'.format(FILENAME_NORMALIZER(file_name), extension) name = os.path.join(upload_to or UPLOAD_PATH, safe_name) path = default_storage.save(name, uploaded_file) full_path = default_storage.path(path) try: os.chmod(full_path, 0660) except Exception: print sys.exc_info() if settings.TINY_PNG_ENABLED is True and uploaded_file.content_type in \ IMAGE_TYPES_FOR_TINY_PNG: compress_image(default_storage.path(path)) size = get_image_dimensions(default_storage.path(path), True) if size: width, height = size return HttpResponse(json.dumps({ 'url': default_storage.url(path), 'filename': path, 'data': {'width': width, 'height': height} })) return HttpResponse(status=403, content='Bad image format') return HttpResponse(status=403)
def student_view_data(self): """ Inform REST api clients about original file location and it's "freshness". Make sure to include `student_view_data=openedxscorm` to URL params in the request. """ if self.index_page_url: return { "last_modified": self.package_meta.get("last_updated", ""), "scorm_data": default_storage.url(self.package_path), "size": self.package_meta.get("size", 0), "index_page": self.index_page_path, } return {}
def store_doab_cover(doab_id, redo=False): """ returns tuple: 1) cover URL, 2) whether newly created (boolean) """ cover_file_name = '/doab/%s/cover' % (doab_id) # if we don't want to redo and the cover exists, return the URL of the cover if not redo and default_storage.exists(cover_file_name): return (default_storage.url(cover_file_name), False) # download cover image to cover_file url = "http://www.doabooks.org/doab?func=cover&rid={0}".format(doab_id) try: r = requests.get( url, allow_redirects=False) # requests doesn't handle ftp redirects. if r.status_code == 302: redirurl = r.headers['Location'] if redirurl.startswith(u'ftp'): springerftp = SPRINGER_COVER.match(redirurl) if springerftp: redirurl = SPRINGER_IMAGE.format(springerftp.groups(1)) r = requests.get(redirurl) else: r = requests.get(url) else: r = requests.get(url) cover_file = ContentFile(r.content) cover_file.content_type = r.headers.get('content-type', '') default_storage.save(cover_file_name, cover_file) return (default_storage.url(cover_file_name), True) except Exception, e: # if there is a problem, return None for cover URL logger.warning('Failed to make cover image for doab_id={}: {}'.format( doab_id, e)) return (None, False)
def predict(file): file_name = default_storage.save(file.name, file) file_url = default_storage.url(file_name) test_image = image.load_img(os.getcwd()+file_url, target_size=(64, 64)) test_image = image.img_to_array(test_image) test_image = np.expand_dims(test_image, axis=0) loaded_model = load_model_and_pretrained_weights() result = loaded_model.predict(test_image) classes_array = ['OTHER', 'animal', 'cartoon', 'chevron', 'floral', 'geometry', 'houndstooth', 'ikat', 'letter_numb', 'plain', 'polka dot', 'scales', 'skull', 'squares', 'stars', 'stripes', 'tribal'] return classes_array[np.where(result[0] == 1)[0][0]]
def get(self, request, *args, **kwargs): file_carving_session = get_object_or_404(FileCarvingSession, pk=kwargs["pk"], archive__isnull=False) if self._redirect_to_files: return HttpResponseRedirect( default_storage.url(file_carving_session.archive.name)) else: return FileResponse( file_carving_session.archive, content_type='application/x-tar', as_attachment=True, filename=file_carving_session.get_archive_name())
def redactor_upload(request, upload_to=None, form_class=ImageForm, response=lambda name, url: url): form = form_class(request.POST, request.FILES) if form.is_valid(): file_ = form.cleaned_data['file'] path = os.path.join(upload_to or UPLOAD_PATH, GENERATE_FILENAME(file_.name)) real_path = default_storage.save(path, file_) return HttpResponse( response(file_.name, default_storage.url(real_path))) return HttpResponse(status=403)
def retrieve(self, request, *args, **kwargs): session_id = kwargs.get('pk') session = get_object_or_404(Session, id=session_id) data = { 'type': 'guacamole' if session.protocol == 'rdp' else 'json', 'src': '', } # 新版本和老版本的文件后缀不同 session_path = session.get_rel_replay_path() # 存在外部存储上的路径 local_path = session.get_local_path() local_path_v1 = session.get_local_path(version=1) # 去default storage中查找 for _local_path in (local_path, local_path_v1, session_path): if default_storage.exists(_local_path): url = default_storage.url(_local_path) data['src'] = url return Response(data) # 去定义的外部storage查找 configs = settings.TERMINAL_REPLAY_STORAGE configs = {k: v for k, v in configs.items() if v['TYPE'] != 'server'} if not configs: return HttpResponseNotFound() target_path = os.path.join(default_storage.base_location, local_path) # 保存到storage的路径 target_dir = os.path.dirname(target_path) if not os.path.isdir(target_dir): os.makedirs(target_dir, exist_ok=True) storage = jms_storage.get_multi_object_storage(configs) ok, err = storage.download(session_path, target_path) if not ok: logger.error("Failed download replay file: {}".format(err)) return HttpResponseNotFound() data['src'] = default_storage.url(local_path) return Response(data)
def predict(request): fileObj = request.FILES['file1'] name = default_storage.save(fileObj.name, fileObj) filePathName = default_storage.url(name) name1 = default_storage.open(name) print(name) print(filePathName) print(name1) finalVid(name) context = {'name': filePathName} return render(request, 'index.html', context)
def student_view_data(self): """ Inform REST api clients about original file location and it's "freshness". Make sure to include `student_view_data=scormxblock` to URL params in the request. """ if self.scorm_file and self.scorm_file_meta: return { 'last_modified': self.scorm_file_meta.get('last_updated', ''), 'scorm_data': default_storage.url(self._file_storage_path()), 'size': self.scorm_file_meta.get('size', 0), 'index_page': self.path_index_page, } return {}
def index_page_url(self): if not self.package_meta or not self.index_page_path: return "" folder = self.extract_folder_path if default_storage.exists( os.path.join(self.extract_folder_base_path, self.index_page_path)): # For backward-compatibility, we must handle the case when the xblock data # is stored in the base folder. folder = self.extract_folder_base_path logger.warning("Serving SCORM content from old-style path: %s", folder) return default_storage.url(os.path.join(folder, self.index_page_path))
def get_root_account_file_url(*, address=None): """Return root account file URL""" if not address: self_configuration = get_self_configuration( exception_class=RuntimeError) address = format_address( ip_address=self_configuration.ip_address, port=self_configuration.port, protocol=self_configuration.protocol, ) return address + default_storage.url(settings.ROOT_ACCOUNT_FILE_PATH)
def get_thumbnail_picture_url(self): from PIL import Image from django.core.files.storage import default_storage as storage if not self.picture: return "" file_path = self.picture.name filename_base, filename_ext = os.path.splitext(file_path) thumbnail_file_path = "%s_thumbnail.jpg" % filename_base if storage.exists(thumbnail_file_path): #if thumbnail version exists, return its URL path return storage.url(thumbnail_file_path) # return original as a fallback return self.picture.url
def get_file(self, request, pk=None): object = get_object_or_404(self.get_queryset(), id=pk) self.check_object_permissions(request, object) file_name = request.query_params.get("file", "") file_directory_within_bucket = "{pk}".format(pk=pk) file_path_within_bucket = os.path.join(file_directory_within_bucket, file_name) return HttpResponseRedirect( redirect_to=default_storage.url(file_path_within_bucket))
def contextprocessor(request): """ Adds data to all template contexts """ if request.path.startswith('/control'): return {} ctx = { 'css_file': None, 'DEBUG': settings.DEBUG, } _html_head = [] _footer = [] if hasattr(request, 'event'): pretix_settings = request.event.settings elif hasattr(request, 'organizer'): pretix_settings = request.organizer.settings else: pretix_settings = GlobalSettingsObject().settings text = str(pretix_settings.get('footer_text', as_type=LazyI18nString)) link = str(pretix_settings.get('footer_link', as_type=LazyI18nString)) if text: if link: _footer.append({'url': link, 'label': text}) else: ctx['footer_text'] = text if hasattr(request, 'event'): for receiver, response in html_head.send(request.event, request=request): _html_head.append(response) for receiver, response in footer_link.send(request.event, request=request): _footer.append(response) if request.event.settings.presale_css_file: ctx['css_file'] = default_storage.url( request.event.settings.presale_css_file) ctx['event_logo'] = request.event.settings.get('logo_image', as_type=str, default='')[7:] ctx['event'] = request.event ctx['html_head'] = "".join(_html_head) ctx['footer'] = _footer ctx['site_url'] = settings.SITE_URL return ctx
def save_image(self, f): # save image width, height = get_image_dimensions(f) print('width', width, 'height', height) default_storage.save( 'ori_images/' + str(self.camera_id) + '/' + f.name, f) file_url = default_storage.url('ori_images/' + str(self.camera_id) + '/' + f.name) camera = CameraInfo.objects.get(camera_id=self.camera_id) image = ImageInfo(ori_file_path=file_url, size=f.size, width=width, height=height, taken_camera=camera,\ taken_time=self.last_active_time) image.save() print(image.image_id) self.image_id = image.image_id
def get_download_url(self, key): """ Return the django storage download URL for the given key. Returns None if no file exists at that location. """ path = self._get_file_path(key) if default_storage.exists(path): storage_path = default_storage.url(path) # Return a fully-qualified URL lms_url = getattr(settings, 'LMS_ROOT_URL', '') return urljoin(lms_url, storage_path) return None
def retrieve(self, request, *args, **kwargs): session_id = kwargs.get('pk') self.session = get_object_or_404(Session, id=session_id) path = self.gen_session_path() if default_storage.exists(path): url = default_storage.url(path) return redirect(url) else: configs = settings.TERMINAL_REPLAY_STORAGE.items() if not configs: return HttpResponseNotFound() for name, config in configs: client = jms_storage.init(config) date = self.session.date_start.strftime('%Y-%m-%d') file_path = os.path.join(date, str(self.session.id) + '.replay.gz') target_path = default_storage.base_location + '/' + path if client and client.has_file(file_path) and \ client.download_file(file_path, target_path): return redirect(default_storage.url(path)) return HttpResponseNotFound()
def get_thumbnail_picture_url(self): if self is None or not self.file: return '' thumbnail_file_path = get_thumb_from_file_name(self.file.name) # try: if storage.exists(thumbnail_file_path): # if thumbnail version exists, return its URL path return "".join([ "http://", get_current_site(1).domain, storage.url(thumbnail_file_path) ])
def _save(self, image, file_name, commit): """ Final saving process, called internally after processing tasks are complete. :param image: Prepared image :type image: django.core.files.uploadedfile.InMemoryUploadedFile :param file_name: Name of the file using which the image is to be saved. :type file_name: str :param commit: If ``True``, the image is saved onto the disk. :type commit: bool :return: URL of the uploaded image ``commit=True``, otherwise a namedtuple of ``(path, image)`` where ``path`` is the absolute path generated for saving the file, and ``image`` is the prepared image. :rtype: str, namedtuple NOTE: copied from original form with 2 modifications below 1) we do not store a unique file name; if the filename exists we skip and just return the url 2) we will inject the new field_image_prefix path if it is provided NOTE: provided by model and passed via ajax javascript to view """ # Defining a universally unique name for the file # to be saved on the disk. # NOTE: this takes away the ability for content creators to control the file name # or quickly return an existing image link. Altering to give better control and functionality over file names # add MARKDOWNX_USE_ORIGINAL_IMAGE_NAME = True to change the default functionality # sas 2020-09-27 : added functionality to use the original file name and just return link if exists if MARKDOWNX_USE_ORIGINAL_IMAGE_NAME: unique_file_name = file_name else: unique_file_name = self.get_unique_file_name(file_name) # sas 2020-09-15 : reading the sanitized image prefix or empty string and appending to path field_image_prefix = self.cleaned_data['field_image_prefix'] or '' full_path = path.join(MARKDOWNX_MEDIA_PATH, field_image_prefix, unique_file_name) if commit: # sas 2020-09-27 : we only want to save if the image doesn't already exist; otherwise we just return the url if MARKDOWNX_USE_ORIGINAL_IMAGE_NAME and default_storage.exists( full_path): pass else: default_storage.save(full_path, image) return default_storage.url(full_path) # If `commit is False`, return the path and in-memory image. image_data = namedtuple('image_data', ['path', 'image']) return image_data(path=full_path, image=image)
def get_formatted_answer(self, instance): serializer_class = self.answer_type_serializer_mapping[ instance.answer_type] if serializer_class is None: representation_val = None elif serializer_class == GeometryField: representation_val = GEOSGeometry(instance.answer, srid=4326) elif serializer_class == ImageField: return self.context["request"].build_absolute_uri( default_storage.url(instance.answer)) elif serializer_class == ImageListField: image_paths = instance.answer.split(",") urls = [] for image_path in image_paths: urls.append(self.context["request"].build_absolute_uri( default_storage.url(image_path))) return urls else: representation_val = instance.answer if representation_val is not None: return serializer_class().to_representation(representation_val) else: return None
def image_upload(request): image_file = request.FILES['file'] image_file = image_optimizer(image_data=image_file, output_size=(778, 519), resize_method='cover') image_directory = f"uploads/{request.user}" image_path = os.path.join(image_directory, image_file.name) default_storage.save(image_path, image_file) s3_upload_url = f"https://codestim.s3.ap-south-1.amazonaws.com/{image_path}" file_url = default_storage.url(image_path) return JsonResponse({'location': f"{file_url}"}, status=201)
def testUrl(self): with self.save_file(): url = default_storage.url("foo.txt") # The URL should contain query string authentication. self.assertTrue(urlsplit(url).query) response = requests.get(url) # The URL should be accessible, but be marked as private. self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b"foo") self.assertEqual(response.headers["cache-control"], "private,max-age=3600") # With the query string removed, the URL should not be accessible. url_unauthenticated = urlunsplit(urlsplit(url)[:3] + ("", "",)) response_unauthenticated = requests.get(url_unauthenticated) self.assertEqual(response_unauthenticated.status_code, 403)
def browse_images(request): result = [] for fname, folder in BROWSE_FOLDERS.items(): files = [(os.path.join(folder, x), x) for x in default_storage.listdir(folder)[1]] result += [{ 'image': default_storage.url(x[0]), 'title': x[1], 'thumb': get_thumbnail(x[0], '150x150', upscale=False).url, 'folder': fname, } for x in files] return HttpResponse(content_type='text/javascript', content=json.dumps(result))
def upload_file(request, upload_path=None, upload_link=None): form = FileForm(request.POST, request.FILES) if form.is_valid(): uploaded_file = form.cleaned_data['file'] path = os.path.join(upload_path or UPLOAD_PATH, uploaded_file.name) image_path = default_storage.save(path, uploaded_file) image_url = default_storage.url(image_path) if upload_link: return JsonResponse({'filelink': image_url}) return JsonResponse({ 'filelink': image_url, 'filename': uploaded_file.name }) return JsonResponse({}, status=403)
def get_archive_path(): # Get root storage location for warcs, based on default_storage. # archive_path should be the location pywb can find warcs, like 'file://generated/' or 'http://perma.s3.amazonaws.com/generated/' # We can get it by requesting the location of a blank file from default_storage. # default_storage may use disk or network storage depending on config, so we look for either a path() or url() try: archive_path = 'file://' + default_storage.path('') + '/' except NotImplementedError: archive_path = default_storage.url('/') archive_path = archive_path.split('?', 1)[0] # remove query params # must be ascii, for some reason, else you'll get # 'unicode' object has no attribute 'get' return archive_path.encode('ascii', 'ignore')
def resolve(self, user, pk) -> tuple: """ Returns a tuple of the asset object, location and download filename of the identified asset. If user does not have access to the asset, an exception is raised. """ asset = self.derive_asset(pk) if not user.has_org_perm(asset.org, self.permission): # pragma: needs cover raise AssetAccessDenied() if not self.is_asset_ready(asset): raise AssetFileNotFound() path = self.derive_path(asset.org, asset.uuid) if not default_storage.exists(path): # pragma: needs cover raise AssetFileNotFound() # create a more friendly download filename remainder, extension = path.rsplit(".", 1) filename = f"{self.key}_{pk}_{slugify(asset.org.name)}.{extension}" # if our storage backend is S3 if settings.DEFAULT_FILE_STORAGE == "storages.backends.s3boto3.S3Boto3Storage": # pragma: needs cover url = default_storage.url( path, parameters=dict( ResponseContentDisposition=f"attachment;filename={filename}" ), http_method="GET") # otherwise, let the backend generate the URL else: url = default_storage.url(path) return asset, url, filename
def testSyncMeta(self): with self.save_file(name="foo/bar.txt", content=b"foo" * 1000): meta = default_storage.meta("foo/bar.txt") self.assertEqual(meta["CacheControl"], "private,max-age=3600") self.assertEqual(meta["ContentType"], "text/plain") self.assertEqual(meta["ContentEncoding"], "gzip") self.assertEqual(meta.get("ContentDisposition"), None) self.assertEqual(meta.get("ContentLanguage"), None) self.assertEqual(meta["Metadata"], {}) self.assertEqual(meta.get("StorageClass"), None) self.assertEqual(meta.get("ServerSideEncryption"), None) # Store new metadata. with self.settings( AWS_S3_BUCKET_AUTH=False, AWS_S3_MAX_AGE_SECONDS=9999, AWS_S3_CONTENT_DISPOSITION=lambda name: "attachment; filename={}".format(name), AWS_S3_CONTENT_LANGUAGE="eo", AWS_S3_METADATA={ "foo": "bar", "baz": lambda name: name, }, AWS_S3_REDUCED_REDUNDANCY=True, AWS_S3_ENCRYPT_KEY=True, ): default_storage.sync_meta() # Check metadata changed. meta = default_storage.meta("foo/bar.txt") self.assertEqual(meta["CacheControl"], "public,max-age=9999") self.assertEqual(meta["ContentType"], "text/plain") self.assertEqual(meta["ContentEncoding"], "gzip") self.assertEqual(meta.get("ContentDisposition"), "attachment; filename=foo/bar.txt") self.assertEqual(meta.get("ContentLanguage"), "eo") self.assertEqual(meta.get("Metadata"), { "foo": "bar", "baz": "foo/bar.txt", }) self.assertEqual(meta["StorageClass"], "REDUCED_REDUNDANCY") self.assertEqual(meta["ServerSideEncryption"], "AES256") # Check ACL changed by removing the query string. url_unauthenticated = urlunsplit( urlsplit(default_storage.url("foo/bar.txt"))[:3] + ( "", "", )) response = requests.get(url_unauthenticated) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b"foo" * 1000)
def fromUrl(url, path): """Download a file from http/s url and place the contents in a file. Arguments: url {string} -- the url of the file path {string} -- the storage path Returns: string -- the path of the file """ response = get(url) ftype = response.headers['content-type'] ext = mimetypes.types_map_inv[ftype] file_name = default_storage.save(path + ext, ContentFile(response.content)) return default_storage.url(file_name)
def image_upload(request): if 'file' in request.FILES: the_file = request.FILES['file'] allowed_types = ['image/jpeg', 'image/jpg', 'image/pjpeg', 'image/x-png', 'image/png', 'image/gif'] if not the_file.content_type in allowed_types: return HttpResponse(json.dumps({'error': _('You can only upload images.')}), content_type="application/json") # Other data on the request.FILES dictionary: # filesize = len(file['content']) # filetype = file['content-type'] upload_to = getattr(settings, 'FROALA_UPLOAD_PATH', 'uploads/froala_editor/images/') path = default_storage.save(os.path.join(upload_to, the_file.name), the_file) link = default_storage.url(path) # return JsonResponse({'link': link}) return HttpResponse(json.dumps({'link': link}), content_type="application/json")
def _save(self, image, file_name): """ Final saving process, called internally after the image had processed. """ # Defining a universally unique name for the file # to be saved on the disk. unique_file_name = self.get_unique_file_name(file_name) full_path = path.join(MARKDOWNX_MEDIA_PATH, unique_file_name) default_storage.save(full_path, image) image_data = namedtuple('image_data', ['url', 'content_type', 'file_name']) return image_data(url=default_storage.url(full_path), content_type=image.content_type, file_name=file_name)
def test_template_media_file(self): t = Template('{{ file_url }}') rendered = t.render( Context({'file_url': default_storage.url('foo.txt')})).strip() self.assertTrue( "https://127.0.0.1:10000/devstoreaccount1/test_private/foo.txt?" in rendered) self.assertTrue("&" in rendered) # check static files still work t = Template('{% load static from staticfiles %}' '{% static "foo.txt" %}') self.assertEqual( t.render(Context({})).strip(), "https://127.0.0.1:10000/devstoreaccount1/test/foo.txt")