def run(self, file_name, source, target, **kwargs): if not file_name: return logger = self.get_logger(**kwargs) source_storage = get_storage_class(source)() target_storage = get_storage_class(target)() lock_id = "%s-lock-%s" % (self.name, hash(file_name)) is_locked = lambda: str(cache.get(lock_id)) == "true" acquire_lock = lambda: cache.set(lock_id, "true", LOCK_EXPIRE) # memcache delete is very slow, so we'd rather set a false value # with a very low expiry time. release_lock = lambda: cache.set(lock_id, "nil", 1) logger.debug("Copying %s source:%s target:%s" % (file_name,source,target)) if is_locked(): logger.debug("%s is already being copied by another worker" % file_name) return acquire_lock() try: if file_name and not target_storage.exists(file_name): target_storage.save(file_name, source_storage.open(file_name)) finally: release_lock()
def test_get_nonexisting_storage_module(self): """ get_storage_class raises an error if the requested module don't exist. """ # Error message may or may not be the fully qualified path. with six.assertRaisesRegex(self, ImportError, "No module named '?(django.core.files.)?non_existing_storage'?"): get_storage_class('django.core.files.non_existing_storage.NonExistingStorage')
def test_get_invalid_storage_module(self): """ get_storage_class raises an error if the requested import don't exist. """ with six.assertRaisesRegex(self, ImproperlyConfigured, "Error importing module storage: \"No module named '?storage'?\""): get_storage_class('storage.NonExistingStorage')
def image_url(attachment, suffix): '''Return url of an image given size(@param suffix) e.g large, medium, small, or generate required thumbnail ''' url = attachment.media_file.url if suffix == 'original': return url else: default_storage = get_storage_class()() fs = get_storage_class('django.core.files.storage.FileSystemStorage')() if suffix in settings.THUMB_CONF: size = settings.THUMB_CONF[suffix]['suffix'] filename = attachment.media_file.name if default_storage.exists(filename): if default_storage.exists(get_path(filename, size)) and\ default_storage.size(get_path(filename, size)) > 0: url = default_storage.url( get_path(filename, size)) else: if default_storage.__class__ != fs.__class__: resize(filename, extension=attachment.extension) else: resize_local_env(filename, extension=attachment.extension) return image_url(attachment, suffix) else: return None return url
def handle(self, *args, **kwargs): try: fs = get_storage_class('django.core.files.storage.FileSystemStorage')() s3 = get_storage_class('storages.backends.s3boto.S3BotoStorage')() except: print _(u"Missing necessary libraries. Try running: pip install -r requirements-s3.pip") sys.exit(1) default_storage = get_storage_class()() if default_storage.__class__ != s3.__class__: print _(u"You must first set your default storage to s3 in your local_settings.py file.") sys.exit(1) classes_to_move = [ (Attachment, 'media_file', attachment_upload_to), (XForm, 'xls', xform_upload_to), ] for cls, file_field, upload_to in classes_to_move: print "Moving %ss to s3..." % cls.__name__ for i in cls.objects.all(): f = getattr(i, file_field) old_filename = f.name if f.name and fs.exists(f.name) and not s3.exists(upload_to(i, f.name)): f.save(fs.path(f.name), fs.open(fs.path(f.name))) print "\t+ '%s'\n\t---> '%s'" % (fs.path(old_filename), f.url) else: print "\t- (f.name=%s, fs.exists(f.name)=%s, not s3.exists(upload_to(i, f.name))=%s)" \ % (f.name, fs.exists(f.name), not s3.exists(upload_to(i, f.name)))
def __init__(self, *args, **kwargs): super(NoArgsCommand, self).__init__(*args, **kwargs) self.uploaded_files = [] self.skipped_files = [] self.local_storage = get_storage_class('django.core.files.storage.FileSystemStorage')( location=getattr(settings, 'STATIC_ROOT', None), base_url=getattr(settings, 'STATIC_URL', None)) self.remote_storage = get_storage_class('storages.backends.s3boto.S3BotoStorage')()
def __init__(self, local, remote, cache_prefix=QUEUED_REMOTE_STORAGE_CACHE_KEY_PREFIX, task=None): self.local_class = local self.local = get_storage_class(self.local_class)() self.remote_class = remote self.remote = get_storage_class(self.remote_class)() self.cache_prefix = cache_prefix # allow users to override the task that uploads the image to the remote # server self.task = task or SaveToRemoteTask
def get_storage(): global STORAGE if STORAGE: return STORAGE if IMAGE_CACHE_STORAGE: storage_class = get_storage_class(IMAGE_CACHE_STORAGE) else: storage_class = get_storage_class() STORAGE = storage_class() return STORAGE
def test_get_nonexisting_storage_module(self): """ get_storage_class raises an error if the requested module don't exist. """ # Error message may or may not be the fully qualified path. with six.assertRaisesRegex( self, ImproperlyConfigured, "Error importing module django.core.files.non_existing_storage: " "\"No module named '?(django.core.files.)?non_existing_storage'?\"", ): get_storage_class("django.core.files.non_existing_storage.NonExistingStorage")
def get_video_transcript_storage(): """ Return the configured django storage backend for video transcripts. """ if hasattr(settings, 'VIDEO_TRANSCRIPTS_SETTINGS'): return get_storage_class( settings.VIDEO_TRANSCRIPTS_SETTINGS.get('STORAGE_CLASS'), )(**settings.VIDEO_TRANSCRIPTS_SETTINGS.get('STORAGE_KWARGS', {})) else: # during edx-platform loading this method gets called but settings are not ready yet # so in that case we will return default(FileSystemStorage) storage class instance return get_storage_class()()
def handle(self, *args, **kwargs): attachments_qs = Attachment.objects.select_related( 'instance', 'instance__xform') if kwargs.get('username'): username = kwargs.get('username') try: user = User.objects.get(username=username) except User.DoesNotExist: raise CommandError( "Error: username %(username)s does not exist" % {'username': username} ) attachments_qs = attachments_qs.filter(instance__user=user) if kwargs.get('id_string'): id_string = kwargs.get('id_string') try: xform = XForm.objects.get(id_string=id_string) except XForm.DoesNotExist: raise CommandError( "Error: Form with id_string %(id_string)s does not exist" % {'id_string': id_string} ) attachments_qs = attachments_qs.filter(instance__xform=xform) fs = get_storage_class('django.core.files.storage.FileSystemStorage')() for att in queryset_iterator(attachments_qs): filename = att.media_file.name default_storage = get_storage_class()() full_path = get_path(filename, settings.THUMB_CONF['small']['suffix']) if kwargs.get('force') is not None: for s in ['small', 'medium', 'large']: fp = get_path(filename, settings.THUMB_CONF[s]['suffix']) if default_storage.exists(fp): default_storage.delete(fp) if not default_storage.exists(full_path): try: if default_storage.__class__ != fs.__class__: resize(filename) else: resize_local_env(filename) if default_storage.exists(get_path( filename, '%s' % settings.THUMB_CONF['small']['suffix'])): print (_(u'Thumbnails created for %(file)s') % {'file': filename}) else: print (_(u'Problem with the file %(file)s') % {'file': filename}) except (IOError, OSError), e: print _(u'Error on %(filename)s: %(error)s') \ % {'filename': filename, 'error': e}
def _save_thumbnails(image, path, size, suffix, filename=None): # If filename is present, resize on s3 fs if filename: default_storage = get_storage_class()() fs = get_storage_class('django.core.files.storage.FileSystemStorage')() image.thumbnail(get_dimensions(image.size, size), Image.ANTIALIAS) image.save(get_path(path, suffix)) default_storage.save(get_path(filename, suffix), fs.open(get_path(path, suffix))) else: image.thumbnail(get_dimensions(image.size, size), Image.ANTIALIAS) image.save(get_path(path, suffix))
def run(self, name, local, remote, cache_key, **kwargs): local_storage = get_storage_class(local)() remote_storage = get_storage_class(remote)() try: remote_storage.save(name, local_storage.open(name)) except: # something went wrong while uploading the file, retry self.retry([name, local, remote, cache_key], **kwargs) return False cache.set(cache_key, True) return True
def resize(filename): default_storage = get_storage_class()() path = default_storage.url(filename) img_file = urllib.urlopen(path) im = StringIO(img_file.read()) img_file.close() image = Image.open(im) conf = settings.THUMB_CONF fs = get_storage_class('django.core.files.storage.FileSystemStorage')() loc_path = fs.path(filename) [_save_thumbnails(image, loc_path, conf[key]['size'], conf[key]['suffix'], filename=filename) for key in conf.keys()]
def resize(filename): default_storage = get_storage_class()() path = default_storage.url(filename) req = requests.get(path) if req.status_code == 200: im = StringIO(req.content) image = Image.open(im) conf = settings.THUMB_CONF fs = get_storage_class('django.core.files.storage.FileSystemStorage')() if not os.path.exists(os.path.abspath(settings.MEDIA_ROOT)): os.makedirs(os.path.abspath(settings.MEDIA_ROOT)) loc_path = fs.path('dummy.%s' % settings.IMG_FILE_TYPE) [_save_thumbnails( image, loc_path, conf[key]['size'], conf[key]['suffix'], filename=filename) for key in settings.THUMB_ORDER]
def test_get_filesystem_storage(self): """ get_storage_class returns the class for a storage backend name/path. """ self.assertEqual( get_storage_class('django.core.files.storage.FileSystemStorage'), FileSystemStorage)
def _get_csv_data(self, filepath): storage = get_storage_class()() csv_file = storage.open(filepath) reader = csv.DictReader(csv_file) data = reader.next() csv_file.close() return data
def response_with_mimetype_and_name( mimetype, name, extension=None, show_date=True, file_path=None, use_local_filesystem=False, full_mime=False): if extension is None: extension = mimetype if not full_mime: mimetype = "application/%s" % mimetype if file_path: try: if not use_local_filesystem: default_storage = get_storage_class()() wrapper = FileWrapper(default_storage.open(file_path)) response = HttpResponse(wrapper, mimetype=mimetype) response['Content-Length'] = default_storage.size(file_path) else: wrapper = FileWrapper(file(file_path)) response = HttpResponse(wrapper, mimetype=mimetype) response['Content-Length'] = os.path.getsize(file_path) except IOError: response = HttpResponseNotFound( _(u"The requested file could not be found.")) else: response = HttpResponse(mimetype=mimetype) response['Content-Disposition'] = disposition_ext_and_date( name, extension, show_date) return response
def create_website_qrcode(self): storage = get_storage_class(settings.DEFAULT_FILE_STORAGE)() img_name = '%s-webite-qrcode.png' % self.id img_file = BytesIO() img = qrcode.make(self.website, image_factory=PymagingImage) img.save(img_file) storage.save(img_name, img_file)
def download_metadata(request, username, id_string, data_id): xform = get_object_or_404(XForm, user__username=username, id_string=id_string) owner = xform.user if username == request.user.username or xform.shared: data = get_object_or_404(MetaData, pk=data_id) file_path = data.data_file.name filename, extension = os.path.splitext(file_path.split('/')[-1]) extension = extension.strip('.') dfs = get_storage_class()() if dfs.exists(file_path): audit = { 'xform': xform.id_string } audit_log( Actions.FORM_UPDATED, request.user, owner, _("Document '%(filename)s' for '%(id_string)s' downloaded.") % { 'id_string': xform.id_string, 'filename': "%s.%s" % (filename, extension) }, audit, request) response = response_with_mimetype_and_name( data.data_file_type, filename, extension=extension, show_date=False, file_path=file_path) return response else: return HttpResponseNotFound() return HttpResponseForbidden(_(u'Permission denied.'))
def update(self, uploaded_file): """uploaded_file is an instance of django UploadedFile object """ #0) initialize file storage file_storage_class = storage.get_storage_class() storage_settings = {} if django_settings.DEFAULT_FILE_STORAGE == \ 'django.core.files.storage.FileSystemStorage': storage_settings = { 'location': self.upload_directory, 'base_url': self.upload_url } file_storage = file_storage_class(**storage_settings) #1) come up with a file name #todo: need better function here to calc name file_name = file_storage.get_available_name(uploaded_file.name) file_storage.save(file_name, uploaded_file) url = file_storage.url(file_name) old_file = self.value old_file = old_file.replace(self.upload_url, '', 1) old_file_path = os.path.join(self.upload_directory, old_file) if os.path.isfile(old_file_path): os.unlink(old_file_path) #saved file path is relative to the upload_directory #so that things could be easily relocated super(ImageValue, self).update(url)
def handle(self, *args, **kwargs): permissions = ('private', 'public-read', 'authenticated-read') if len(args) < 1: raise CommandError(_("Missing permission argument")) permission = args[0] if permission not in permissions: raise CommandError(_( "Expected %s as permission") % ' or '.join(permissions)) try: s3 = get_storage_class('storages.backends.s3boto.S3BotoStorage')() except Exception: self.stderr.write(_( u"Missing necessary libraries. Try running: pip install " "-r requirements-s3.pip")) sys.exit(1) else: all_files = s3.bucket.list() for i, f in enumerate(all_files): f.set_acl(permission) if i % 1000 == 0: self.stdout.write(_( "%s file objects processed" % i)) self.stdout.write(_( "A total of %s file objects processed" % i))
def _get_existing_thumb(self): path = get_image_path(self, '.png') cls = get_storage_class() storage = cls() if not storage.exists(path): return None return path
def download_xlsform(request, username, id_string): xform = get_object_or_404(XForm, user__username=username, id_string=id_string) owner = User.objects.get(username=username) helper_auth_helper(request) if not has_permission(xform, owner, request, xform.shared): return HttpResponseForbidden('Not shared.') file_path = xform.xls.name default_storage = get_storage_class()() if default_storage.exists(file_path): audit = { "xform": xform.id_string } audit_log( Actions.FORM_XLS_DOWNLOADED, request.user, xform.user, _("Downloaded XLS file for form '%(id_string)s'.") % { "id_string": xform.id_string }, audit, request) split_path = file_path.split(os.extsep) extension = 'xls' if len(split_path) > 1: extension = split_path[len(split_path) - 1] response = response_with_mimetype_and_name( 'vnd.ms-excel', id_string, show_date=False, extension=extension, file_path=file_path) return response else: messages.add_message(request, messages.WARNING, _(u'No XLS file for your form ' u'<strong>%(id)s</strong>') % {'id': id_string}) return HttpResponseRedirect("/%s" % username)
def fragment_view(self): """ Returns the view that will be used to render the fragment. """ if not self._fragment_view: self._fragment_view = get_storage_class(self.fragment_view_name)() return self._fragment_view
def setUp(self): self.storage_mock = Mock(wraps=get_storage_class())() self.storage_mock.save = Mock() self.backend = self.template_backend_klass() self.context = {'username': '******', 'joindate': date(2016, 8, 22), 'full_name': 'Foo Bar'}
def download_excel_analyser(request, username, form_id_string): xform = get_object_or_404(XForm, user__username__iexact=username, id_string__exact=form_id_string) owner = User.objects.get(username__iexact=username) helper_auth_helper(request) if not has_permission(xform, owner, request, xform.shared): return HttpResponseForbidden('Not shared.') # Get the XLSForm. xlsform_io= _get_xlsform(request, username, form_id_string) # FIXME: Really don't like this overloading... if isinstance(xlsform_io, HttpResponse): return xlsform_io # Get the data. data_export= Export.objects.filter( xform=xform, export_type=Export.XLS_EXPORT).order_by('-created_on').first() if not data_export: raise Http404('Please generate an XLS export of your data before generating an Excel Analyser copy.') analyser_filename= os.path.splitext(data_export.filename)[0] + '_EXCEL_ANALYSER.xlsx' with get_storage_class()().open(data_export.filepath) as data_file_xlsx: analyser_io= generate_analyser(xlsform_io, data_file_xlsx) response = StreamingHttpResponse(FileWrapper(analyser_io), content_type='application/vnd.ms-excel; charset=utf-8') response['Content-Disposition'] = 'attachment; filename={}'.format(analyser_filename) return response
def get_underlying_storage(): if not hasattr(settings, 'IMPOSTORAGE_BACKEND'): raise ImproperlyConfigured("To use `impostorage` as your file storage " "backend, you must set " "`IMPOSTORAGE_BACKEND` to the dotted path " "to the real storage backend you use.") return get_storage_class(import_path=settings.IMPOSTORAGE_BACKEND)
def _get_xlsform(request, username, form_id_string): xform = get_object_or_404(XForm, user__username__iexact=username, id_string__exact=form_id_string) owner = User.objects.get(username__iexact=username) helper_auth_helper(request) if not has_permission(xform, owner, request, xform.shared): # FIXME: Is there not a 403 exception equivalent to `Http404`? return HttpResponseForbidden('Not shared.') file_path = xform.xls.name default_storage = get_storage_class()() try: if file_path != '' and default_storage.exists(file_path): with default_storage.open(file_path) as xlsform_file: if file_path.endswith('.csv'): xlsform_io = convert_csv_to_xls(xlsform_file.read()) else: xlsform_io= io.BytesIO(xlsform_file.read()) return xlsform_io else: messages.add_message(request, messages.WARNING, _(u'No XLS file for your form ' u'<strong>%(id)s</strong>') % {'id': form_id_string}) return HttpResponseRedirect("/%s" % username) except: return HttpResponseServerError('Error retrieving XLSForm.')
def tearDown(self): signals.saved_file.disconnect(self.get_signal_handler(), sender=Profile) super(GenerationBase, self).tearDown() # Revert the thumbnail storage location. files.DEFAULT_THUMBNAIL_STORAGE = get_storage_class( settings.THUMBNAIL_DEFAULT_STORAGE)()
def _setup(self): self._wrapped = get_storage_class( settings.LOCALSHOP_DISTRIBUTION_STORAGE)()
def __init__(self): storage_class = get_storage_class() self.storage = storage_class() super(MediaStorageMixin, self).__init__()
raise ImproperlyConfigured("IMAGE_CACHE_ROOT not defined.") super(ImageCacheStorage, self).__init__(location, base_url, *args, **kwargs) def path(self, name): if not self.location: raise ImproperlyConfigured("IMAGE_CACHE_ROOT not defined.") return super(ImageCacheStorage, self).path(name) def save(self, name, content, max_length=None): super(ImageCacheStorage, self).save(name, ContentFile(content), max_length=max_length) def get_storage(): global STORAGE if STORAGE: return STORAGE if settings_IMAGE_CACHE_STORAGE: storage_class = get_storage_class(settings_IMAGE_CACHE_STORAGE) else: storage_class = get_storage_class() STORAGE = storage_class() return STORAGE IMAGE_CACHE_STORAGE = get_storage() MEDIA_STORAGE = get_storage_class()() STATIC_STORAGE = get_storage_class(STATICFILES_STORAGE)()
from django.contrib.contenttypes.models import ContentType from functools import reduce try: from django.contrib.contenttypes.fields import GenericForeignKey except ImportError: from django.contrib.contenttypes.generic import GenericForeignKey from django.core.files.storage import get_storage_class from django.utils.translation import ugettext_lazy as _ from .settings import (RELATION_MODELS, RELATIONS, THUMBNAIL_UPLOAD_PATH, THUMBNAIL_STORAGE) from .base import CategoryBase STORAGE = get_storage_class(THUMBNAIL_STORAGE) class Category(CategoryBase): thumbnail = models.FileField( upload_to=THUMBNAIL_UPLOAD_PATH, null=True, blank=True, storage=STORAGE(), ) thumbnail_width = models.IntegerField(blank=True, null=True) thumbnail_height = models.IntegerField(blank=True, null=True) order = models.IntegerField(default=0) alternate_title = models.CharField( blank=True, default="",
import zipfile from io import BytesIO import numpy as np from django.conf import settings from django.core.exceptions import ValidationError from django.core.files.storage import get_storage_class from django.db.models import IntegerField from django.template.defaultfilters import filesizeformat from django.utils.translation import gettext_lazy as _ from src.apps.games.models.abstract_game import AbstractGame from src.contrib.validators import FileValidator from src.contrib.variable_storage_file_field import VariableStorageFileField npz_filestorage_class = get_storage_class(settings.NPZ_FILE_STORAGE) validate_zip = FileValidator(max_size=1024 * 500, content_types=["application/zip"]) def validate_game_npzdata(training_data_file, run): npz_file = training_data_file max_size = 1024 * 500 max_unzipped_size = 1024 * 1024 * 20 data_board_len = run.data_board_len if npz_file.size > max_size: params = { "max_size": filesizeformat(max_size),
def setUp(self): self._storage = base.default_storage base.default_storage = get_storage_class( 'compressor.storage.GzipCompressorFileStorage')() settings.COMPRESS_ENABLED = True
def _setup(self): AVATAR_FILE_STORAGE = getattr(settings, 'AVATAR_FILE_STORAGE', settings.DEFAULT_FILE_STORAGE) self._wrapped = get_storage_class(AVATAR_FILE_STORAGE)()
def generate_export(export_type, extension, username, id_string, export_id=None, filter_query=None, group_delimiter='/', split_select_multiples=True, binary_select_multiples=False): """ Create appropriate export object given the export type """ export_type_func_map = { Export.XLS_EXPORT: 'to_xls_export', Export.CSV_EXPORT: 'to_flat_csv_export', Export.CSV_ZIP_EXPORT: 'to_zipped_csv', Export.SAV_ZIP_EXPORT: 'to_zipped_sav', } xform = XForm.objects.get(user__username__iexact=username, id_string__exact=id_string) # query mongo for the cursor records = query_mongo(username, id_string, filter_query) export_builder = ExportBuilder() export_builder.GROUP_DELIMITER = group_delimiter export_builder.SPLIT_SELECT_MULTIPLES = split_select_multiples export_builder.BINARY_SELECT_MULTIPLES = binary_select_multiples export_builder.set_survey(xform.data_dictionary().survey) prefix = slugify('{}_export__{}__{}'.format(export_type, username, id_string)) temp_file = NamedTemporaryFile(prefix=prefix, suffix=("." + extension)) # get the export function by export type func = getattr(export_builder, export_type_func_map[export_type]) func.__call__(temp_file.name, records, username, id_string, filter_query) # generate filename basename = "%s_%s" % (id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) filename = basename + "." + extension # check filename is unique while not Export.is_filename_unique(xform, filename): filename = increment_index_in_filename(filename) file_path = os.path.join(username, 'exports', id_string, export_type, filename) # TODO: if s3 storage, make private - how will we protect local storage?? storage = get_storage_class()() # seek to the beginning as required by storage classes temp_file.seek(0) export_filename = storage.save(file_path, File(temp_file, file_path)) temp_file.close() dir_name, basename = os.path.split(export_filename) # get or create export object if export_id: export = Export.objects.get(id=export_id) else: export = Export(xform=xform, export_type=export_type) export.filedir = dir_name export.filename = basename export.internal_status = Export.SUCCESSFUL # dont persist exports that have a filter if filter_query is None: export.save() return export
def generate_export(export_type, extension, username, id_string, export_id=None, filter_query=None, group_delimiter='/', split_select_multiples=True, binary_select_multiples=False, start=None, end=None, remove_group_name=False): """ Create appropriate export object given the export type """ # TODO resolve circular import from onadata.apps.viewer.models.export import Export export_type_func_map = { Export.XLS_EXPORT: 'to_xls_export', Export.CSV_EXPORT: 'to_flat_csv_export', Export.CSV_ZIP_EXPORT: 'to_zipped_csv', Export.SAV_ZIP_EXPORT: 'to_zipped_sav', } xform = XForm.objects.get(user__username__iexact=username, id_string__iexact=id_string) records = ParsedInstance.query_data(xform, query=filter_query, start=start, end=end) export_builder = ExportBuilder() export_builder.TRUNCATE_GROUP_TITLE = remove_group_name export_builder.GROUP_DELIMITER = group_delimiter export_builder.SPLIT_SELECT_MULTIPLES = split_select_multiples export_builder.BINARY_SELECT_MULTIPLES = binary_select_multiples export_builder.set_survey(xform.data_dictionary().survey) temp_file = NamedTemporaryFile(suffix=("." + extension)) # get the export function by export type func = getattr(export_builder, export_type_func_map[export_type]) try: func.__call__(temp_file.name, records, username, id_string, filter_query, start=start, end=end) except NoRecordsFoundError: pass # generate filename basename = "%s_%s" % (id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) filename = basename + "." + extension # check filename is unique while not Export.is_filename_unique(xform, filename): filename = increment_index_in_filename(filename) file_path = os.path.join(username, 'exports', id_string, export_type, filename) # TODO: if s3 storage, make private - how will we protect local storage?? storage = get_storage_class()() # seek to the beginning as required by storage classes temp_file.seek(0) export_filename = storage.save(file_path, File(temp_file, file_path)) temp_file.close() dir_name, basename = os.path.split(export_filename) # get or create export object if export_id: export = Export.objects.get(id=export_id) else: export = Export(xform=xform, export_type=export_type) export.filedir = dir_name export.filename = basename export.internal_status = Export.SUCCESSFUL # dont persist exports that have a filter if filter_query is None and start is None and end is None: export.save() return export
def _setup(self): self._wrapped = get_storage_class( 'compressor.storage.BrotliCompressorFileStorage')()
def __init__(self, *args, **kwargs): super(CachedS3BotoStorage, self).__init__(*args, **kwargs) self.local_storage = get_storage_class( 'compressor.storage.CompressorFileStorage')()
def __init__(self, *args, **kwargs): super(CachedS3BotoStorage, self).__init__(*args, **kwargs) self.local_storage = get_storage_class( "django.core.files.storage.FileSystemStorage")()
from django.core.files.base import File, ContentFile from django.core.files.storage import get_storage_class, default_storage, \ Storage from django.db.models.fields.files import ImageFieldFile, FieldFile from django.utils.html import escape from django.utils.safestring import mark_safe from easy_thumbnails import engine, models, utils import datetime import os from django.utils.http import urlquote DEFAULT_THUMBNAIL_STORAGE = get_storage_class( utils.get_setting('DEFAULT_STORAGE'))() def get_thumbnailer(object, relative_name=None): """ Get a :class:`Thumbnailer` for a source file. The ``object`` argument is usually either one of the following: * ``FieldFile`` instance (i.e. a model instance file/image field property). * ``File`` or ``Storage`` instance, and for both of these cases the ``relative_name`` argument must also be provided * A string, which will be used as the relative name (the source will be set to the default storage) For rarer needed cases, it can also be one of the following:
def get(self, request, proxito_path, template_name='404.html'): """ Handler for 404 pages on subdomains. This does a couple things: * Handles directory indexing for URLs that don't end in a slash * Handles directory indexing for README.html (for now) * Handles custom 404 serving For 404's, first search for a 404 page in the current version, then continues with the default version and finally, if none of them are found, the Read the Docs default page (Maze Found) is rendered by Django and served. """ # pylint: disable=too-many-locals log.info('Executing 404 handler. proxito_path=%s', proxito_path) # Parse the URL using the normal urlconf, so we get proper subdomain/translation data _, __, kwargs = url_resolve( proxito_path, urlconf='readthedocs.proxito.urls', ) version_slug = kwargs.get('version_slug') version_slug = self.get_version_from_host(request, version_slug) final_project, lang_slug, version_slug, filename = _get_project_data_from_request( # noqa request, project_slug=kwargs.get('project_slug'), subproject_slug=kwargs.get('subproject_slug'), lang_slug=kwargs.get('lang_slug'), version_slug=version_slug, filename=kwargs.get('filename', ''), ) storage_root_path = final_project.get_storage_path( type_='html', version_slug=version_slug, include_file=False, version_type=self.version_type, ) storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() # First, check for dirhtml with slash for tryfile in ('index.html', 'README.html'): storage_filename_path = os.path.join(storage_root_path, filename, tryfile) log.debug( 'Trying index filename: project=%s version=%s, file=%s', final_project.slug, version_slug, storage_filename_path, ) if storage.exists(storage_filename_path): log.info( 'Redirecting to index file: project=%s version=%s, storage_path=%s', final_project.slug, version_slug, storage_filename_path, ) # Use urlparse so that we maintain GET args in our redirect parts = urlparse(proxito_path) if tryfile == 'README.html': new_path = os.path.join(parts.path, tryfile) else: new_path = parts.path.rstrip('/') + '/' new_parts = parts._replace(path=new_path) redirect_url = new_parts.geturl() # TODO: decide if we need to check for infinite redirect here # (from URL == to URL) return HttpResponseRedirect(redirect_url) # ``redirect_filename`` is the path without ``/<lang>/<version>`` and # without query, starting with a ``/``. This matches our old logic: # https://github.com/readthedocs/readthedocs.org/blob/4b09c7a0ab45cd894c3373f7f07bad7161e4b223/readthedocs/redirects/utils.py#L60 # We parse ``filename`` to remove the query from it schema, netloc, path, params, query, fragments = urlparse(filename) redirect_filename = path # we can't check for lang and version here to decide if we need to add # the ``/`` or not because ``/install.html`` is a valid path to use as # redirect and does not include lang and version on it. It should be # fine always adding the ``/`` to the beginning. redirect_filename = '/' + redirect_filename.lstrip('/') # Check and perform redirects on 404 handler # NOTE: this redirect check must be done after trying files like # ``index.html`` and ``README.html`` to emulate the behavior we had when # serving directly from NGINX without passing through Python. redirect_path, http_status = self.get_redirect( project=final_project, lang_slug=lang_slug, version_slug=version_slug, filename=redirect_filename, full_path=proxito_path, ) if redirect_path and http_status: try: return self.get_redirect_response(request, redirect_path, proxito_path, http_status) except InfiniteRedirectException: # Continue with our normal 404 handling in this case pass # If that doesn't work, attempt to serve the 404 of the current version (version_slug) # Secondly, try to serve the 404 page for the default version # (project.get_default_version()) for version_slug_404 in [ version_slug, final_project.get_default_version() ]: for tryfile in ('404.html', '404/index.html'): storage_root_path = final_project.get_storage_path( type_='html', version_slug=version_slug_404, include_file=False, version_type=self.version_type, ) storage_filename_path = os.path.join(storage_root_path, tryfile) if storage.exists(storage_filename_path): log.info( 'Serving custom 404.html page: [project: %s] [version: %s]', final_project.slug, version_slug_404, ) resp = HttpResponse( storage.open(storage_filename_path).read()) resp.status_code = 404 return resp raise Http404('No custom 404 page found.')
def get_storage(): """ Get the default storage """ return get_storage_class(settings.PDF_STORAGE_CLASS['class'])( **settings.PDF_STORAGE_CLASS['options'])
def get( self, request, project_slug=None, subproject_slug=None, lang_slug=None, version_slug=None, filename='', ): # noqa """Take the incoming parsed URL's and figure out what file to serve.""" version_slug = self.get_version_from_host(request, version_slug) final_project, lang_slug, version_slug, filename = _get_project_data_from_request( # noqa request, project_slug=project_slug, subproject_slug=subproject_slug, lang_slug=lang_slug, version_slug=version_slug, filename=filename, ) log.info( 'Serving docs: project=%s, subproject=%s, lang_slug=%s, version_slug=%s, filename=%s', final_project.slug, subproject_slug, lang_slug, version_slug, filename) # Handle a / redirect when we aren't a single version if all([ lang_slug is None, # External versions/builds will always have a version, # because it is taken from the host name version_slug is None or hasattr(request, 'external_domain'), filename == '', not final_project.single_version, ]): return self.system_redirect(request, final_project, lang_slug, version_slug, filename) if all([ (lang_slug is None or version_slug is None), not final_project.single_version, self.version_type != EXTERNAL, ]): log.warning( 'Invalid URL for project with versions. url=%s, project=%s', filename, final_project.slug) raise Http404('Invalid URL for project with versions') # TODO: un-comment when ready to perform redirect here # redirect_path, http_status = self.get_redirect( # final_project, # lang_slug, # version_slug, # filename, # request.path, # ) # if redirect_path and http_status: # return self.get_redirect_response(request, redirect_path, http_status) # Check user permissions and return an unauthed response if needed if not self.allowed_user(request, final_project, version_slug): return self.get_unauthed_response(request, final_project) storage_path = final_project.get_storage_path( type_='html', version_slug=version_slug, include_file=False, version_type=self.version_type, ) storage = get_storage_class(settings.RTD_BUILD_MEDIA_STORAGE)() # If ``filename`` is ``''`` it leaves a trailing slash path = os.path.join(storage_path, filename) # Handle our backend storage not supporting directory indexes, # so we need to append index.html when appropriate. if path[-1] == '/': # We need to add the index.html before ``storage.url`` since the # Signature and Expire time is calculated per file. path += 'index.html' storage_url = storage.url(path) # this will remove the trailing slash # URL without scheme and domain to perform an NGINX internal redirect parsed_url = urlparse(storage_url)._replace(scheme='', netloc='') final_url = parsed_url.geturl() return self._serve_docs( request, final_project=final_project, path=final_url, )
IntegerField, BooleanField, F, ManyToManyField, OneToOneField, FloatField, FileField) from django.utils import timezone from django.db import transaction from uuid import uuid4 import sqlparse from django.utils.safestring import mark_safe from silk.utils.profile_parser import parse_profile from silk.config import SilkyConfig # Django 1.8 removes commit_on_success, django 1.5 does not have atomic atomic = getattr(transaction, 'atomic', None) or getattr( transaction, 'commit_on_success') silk_storage = get_storage_class(SilkyConfig().SILKY_STORAGE_CLASS)() # Seperated out so can use in tests w/o models def _time_taken(start_time, end_time): d = end_time - start_time return d.seconds * 1000 + d.microseconds / 1000 def time_taken(self): return _time_taken(self.start_time, self.end_time) class CaseInsensitiveDictionary(dict): def __getitem__(self, key): return super(CaseInsensitiveDictionary, self).__getitem__(key.lower())
def _setup(self): self._wrapped = get_storage_class(settings.STATICFILES_STORAGE)()
""" Atlas Views Configuration """ from django.shortcuts import render from .models import Kingdoms, Locations import json from collections import defaultdict from django.conf import settings from django.core.files.storage import get_storage_class storage_class = get_storage_class(settings.STATICFILES_STORAGE) # Adjusting view of map as per the kingdom location def find_view_value(pk): if pk == "1": return ([10, 14, 6]) elif pk == "2": return ([12, 8, 7]) elif pk == "3": return ([35, 18, 6]) elif pk == "5": return ([25, 18, 5]) elif pk == "6": return ([-7, 18, 6]) elif pk == "7": return ([0, 20, 6]) elif pk == "8": return ([14, 20, 6])
def setUp(self): # Re-initialize storage # Various tests override either this setting or various aspects of the storage engine # By resetting it every test case, we avoid this caching (which is a huge benefit in prod) serve.build_media_storage = get_storage_class( settings.RTD_BUILD_MEDIA_STORAGE)() self.eric = fixture.get(User, username='******') self.eric.set_password('eric') self.eric.save() self.project = fixture.get( Project, slug='project', privacy_level=PUBLIC, external_builds_privacy_level=PUBLIC, users=[self.eric], main_language_project=None, ) self.project.versions.update(privacy_level=PUBLIC) self.subproject = fixture.get( Project, slug='subproject', users=[self.eric], main_language_project=None, privacy_level=PUBLIC, external_builds_privacy_level=PUBLIC, ) self.subproject.versions.update(privacy_level=PUBLIC) self.project.add_subproject(self.subproject) self.translation = fixture.get( Project, language='es', slug='translation', users=[self.eric], privacy_level=PUBLIC, external_builds_privacy_level=PUBLIC, main_language_project=self.project, ) self.translation.versions.update(privacy_level=PUBLIC) self.subproject_translation = fixture.get( Project, language='es', slug='subproject-translation', users=[self.eric], main_language_project=self.subproject, privacy_level=PUBLIC, external_builds_privacy_level=PUBLIC, ) self.subproject_translation.versions.update(privacy_level=PUBLIC) self.subproject_alias = fixture.get( Project, language='en', slug='subproject-alias', users=[self.eric], privacy_level=PUBLIC, external_builds_privacy_level=PUBLIC, ) self.subproject_alias.versions.update(privacy_level=PUBLIC) self.project.add_subproject(self.subproject_alias, alias='this-is-an-alias') # These can be set to canonical as needed in specific tests self.domain = fixture.get(Domain, project=self.project, domain='docs1.example.com', https=True) self.domain2 = fixture.get(Domain, project=self.project, domain='docs2.example.com', https=True)
# -*- coding: utf-8 -*- # import os from django.db import models from django.conf import settings from django.core.files.storage import get_storage_class from utils.utils import get_image_path import logging logger = logging.getLogger('solrindexer') global_storage = get_storage_class(settings.GLOBAL_FILE_STORAGE)() def get_image_path2(instance, filename): extra_path = 'special_offer/logos/%s/' % (instance.id or 0) return get_image_path(instance, filename, extra_path=extra_path) class ExtraClass(models.Model): name = models.CharField(max_length=255) key = models.SlugField() description = models.CharField(max_length=255, blank=True) def __unicode__(self): return self.name class SpecialOffer(models.Model): logo_image = models.ImageField(upload_to=get_image_path2, blank=True, null=True, storage=global_storage) title = models.CharField(max_length=255) shop_name = models.CharField(max_length=255)
def __init__(self): storage_class = get_storage_class(settings.PUBLIC_SITE_STORAGE) self.storage = storage_class() super(PublicStorageMixin, self).__init__()
def get_storage(self): if isinstance(self.storage, str): return get_storage_class(self.storage)() else: return self.storage
logger = olympia.core.logger.getLogger('z.apps.stats.views') SERIES_GROUPS = ('day', 'week', 'month') SERIES_GROUPS_DATE = ('date', 'week', 'month') # Backwards compat. SERIES_FORMATS = ('json', 'csv') SERIES = ('downloads', 'usage', 'overview', 'sources', 'os', 'locales', 'statuses', 'versions', 'apps') GLOBAL_SERIES = ('addons_in_use', 'addons_updated', 'addons_downloaded', 'collections_created', 'reviews_created', 'addons_created', 'users_created', 'my_apps') storage = get_storage_class()() @non_atomic_requests def dashboard(request): stats_base_url = reverse('stats.dashboard') view = get_report_view(request) return render(request, 'stats/dashboard.html', {'report': 'site', 'view': view, 'stats_base_url': stats_base_url}) def get_series(model, extra_field=None, source=None, **filters): """ Get a generator of dicts for the stats model given by the filters.
def _setup(self): self._wrapped = get_storage_class(settings.COMPRESS_STORAGE)()
def export_delete_callback(sender, **kwargs): export = kwargs['instance'] storage = get_storage_class()() if export.filepath and storage.exists(export.filepath): storage.delete(export.filepath)
return name def delete(self, name): """ Handle deletion race condition present in Django prior to 1.4 https://code.djangoproject.com/ticket/16108 """ try: super(CompressorFileStorage, self).delete(name) except OSError as e: if e.errno != errno.ENOENT: raise compressor_file_storage = SimpleLazyObject( lambda: get_storage_class("compressor.storage.CompressorFileStorage")()) class GzipCompressorFileStorage(CompressorFileStorage): """ The standard compressor file system storage that gzips storage files additionally to the usual files. """ def save(self, filename, content): filename = super(GzipCompressorFileStorage, self).save(filename, content) orig_path = self.path(filename) compressed_path = "%s.gz" % orig_path with open(orig_path, "rb") as f_in, open(compressed_path, "wb") as f_out:
return name def delete(self, name): """ Handle deletion race condition present in Django prior to 1.4 https://code.djangoproject.com/ticket/16108 """ try: super(CompressorFileStorage, self).delete(name) except OSError as e: if e.errno != errno.ENOENT: raise compressor_file_storage = SimpleLazyObject( lambda: get_storage_class('compressor.storage.CompressorFileStorage')()) class GzipCompressorFileStorage(CompressorFileStorage): """ The standard compressor file system storage that gzips storage files additionally to the usual files. """ def save(self, filename, content): filename = super(GzipCompressorFileStorage, self).save(filename, content) orig_path = self.path(filename) compressed_path = '%s.gz' % orig_path with open(orig_path, 'rb') as f_in, open(compressed_path, 'wb') as f_out:
def test_get_nonexisting_storage_class(self): """ get_storage_class raises an error if the requested class don't exist. """ with self.assertRaises(ImportError): get_storage_class('django.core.files.storage.NonExistingStorage')
def __init__(self): storage_class = get_storage_class(settings.STATICFILES_STORAGE) self.storage = storage_class() super(StaticStorageMixin, self).__init__()