def variativeStorage(): """Return S3Boto3Storage if access key is defined. Fallback to default storage. """ if hasattr(settings, 'AWS_ACCESS_KEY_ID') and settings.AWS_ACCESS_KEY_ID: return S3Boto3Storage() return default_storage
class ShrubsImage(models.Model): shrubs = models.OneToOneField("Shrubs", on_delete=models.CASCADE, primary_key=True) original = models.FileField(storage=S3Boto3Storage(), upload_to=upload_to, null=True, blank=True) def __str__(self): msg = f"shrubs{self.shrubs}" return msg
class ExportRunFile(UIDMixin, TimeStampedModelMixin): """ The ExportRunFile stores additional files to be added to each ExportRun zip archive. """ storage = None if settings.USE_S3: storage = S3Boto3Storage() else: storage = FileSystemStorage( location=settings.EXPORT_RUN_FILES, base_url=settings.EXPORT_RUN_FILES_DOWNLOAD) file = models.FileField(verbose_name="File", storage=storage) directory = models.CharField( max_length=100, null=True, blank=True, help_text="An optional directory name to store the file in.") provider = models.ForeignKey( DataProvider, on_delete=models.CASCADE, related_name="file_provider", null=True, blank=True, help_text="An optional data provider to associate the file with.", ) def save(self, *args, **kwargs): if self.pk: export_run_file = ExportRunFile.objects.get(id=self.id) if export_run_file.file != self.file: export_run_file.file.delete(save=False) super(ExportRunFile, self).save(*args, **kwargs)
def setUpClass(cls): if six.PY2: raise SkipTest("Skipping redundant test") cls.pixelmatch_bin = os.environ.get('PIXELMATCH_BIN') if not cls.pixelmatch_bin: cls.pixelmatch_bin = find_executable('pixelmatch') if not cls.pixelmatch_bin or not os.path.exists(cls.pixelmatch_bin): raise SkipTest("pixelmatch not installed") cls.screenshot_output_dir = os.environ.get('SCREENSHOT_OUTPUT_DIR') super(BaseNestedAdminTestCase, cls).setUpClass() cls.root_temp_dir = tempfile.mkdtemp() if os.environ.get('TRAVIS_BUILD_NUMBER'): # For some reason these tests fail on travis when Django > 1.11 if django.VERSION > (1, 11): raise SkipTest("Issue with travis and Django >= 1.11") cls.path_prefix = "travis_%s" % os.environ['TRAVIS_BUILD_NUMBER'] else: cls.path_prefix = "local" # cls.path_prefix = "local_%s" % datetime.now().strftime('%Y%m%dT%H%M%S') # cls.temp_dir = tempfile.mkdtemp(dir=cls.root_temp_dir) os.makedirs(os.path.join(cls.temp_dir, cls.path_prefix)) if cls.screenshot_output_dir: sceenshot_path = os.path.join(cls.screenshot_output_dir, cls.path_prefix) if not os.path.exists(sceenshot_path): os.makedirs(sceenshot_path) if all(os.environ.get(k) for k in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY']): try: storage = S3Boto3Storage() bucket = storage.bucket # noqa except: pass else: cls.storage = storage cls.all_models = {} cls.all_model_names = {} for root_model in cls.root_models: root_admin = admin_site._registry[root_model] def descend_admin_inlines(admin): data = [admin.model, []] for inline in (getattr(admin, 'inlines', None) or []): data[1].append(descend_admin_inlines(inline)) return data cls.all_models[root_model] = models = descend_admin_inlines(root_admin) def recursive_map_model_names(data): if isinstance(data, list): return [m for m in map(recursive_map_model_names, data)] else: return get_model_name(data) cls.all_model_names[root_model] = recursive_map_model_names(models)
class Photo(models.Model): uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) post = models.ForeignKey(Post, on_delete=models.CASCADE, related_name="photos") original = models.FileField( max_length=2000, storage=S3Boto3Storage(), upload_to=upload_to ) thumbnail = models.FileField( blank=True, max_length=2000, null=True, storage=S3Boto3Storage(), upload_to=upload_to, ) def __str__(self): msg = f'Photo {self.pk} for Post {self.post.pk} - "{self.post.title}"' return msg
def get_manifest(): """ Download and parse manifest from arxiv S3 bucket. """ connection = S3Boto3Storage().connection obj = connection.Object("arxiv", "src/arXiv_src_manifest.xml") s = obj.get(RequestPayer="requester")["Body"].read() return parse_manifest(s)
def get_storage(): """Get the custom storage for files. """ if getattr(settings, 'AWS_ACCESS_KEY_ID', None): from storages.backends.s3boto3 import S3Boto3Storage return S3Boto3Storage() return DefaultStorage()
class BusinessTier(models.Model): name = models.CharField(max_length=255, unique=True) url = models.TextField(null=True) logo = models.ImageField( upload_to=image_directory_path, null=True, blank=True, storage=S3Boto3Storage(bucket=settings.AWS_STORAGE_BUCKET_NAME))
class Photo(models.Model): uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) post = models.ForeignKey(Post, on_delete=models.CASCADE, related_name="photos") original = models.FileField(storage=S3Boto3Storage())
class QRCode(models.Model): link = models.OneToOneField(Link, on_delete=models.CASCADE, primary_key=True) original = models.FileField( storage=S3Boto3Storage(), upload_to=upload_to, null=True, blank=True ) class Meta: verbose_name_plural = "qrcode"
def get_manifest(): """ Download and parse manifest from arxiv S3 bucket. """ connection = S3Boto3Storage().connection obj = connection.Object('arxiv', 'src/arXiv_src_manifest.xml') s = obj.get(RequestPayer='requester')['Body'].read() return parse_manifest(s)
def send_notification(self): predictions = self.predictions(self.schedule) if not predictions: return self.datetime_sent = timezone.now() self.save() from_email = settings.DEFAULT_FROM_EMAIL text_content = '' if settings.USE_MANDRILL: connection = get_connection( 'anymail.backends.mandrill.MandrillBackend') else: connection = get_connection() context = { 'origin': self.schedule.origin.name, 'destination': self.schedule.destination.name, 'departure': arrow.get(self.schedule.departure) .format('MMMM D, HH:mm'), 'arrival': arrow.get(self.schedule.arrival) .format('MMMM D, HH:mm'), 'departure_time': arrow.get(self.schedule.departure) .format('HH:mm'), 'arrival_time': arrow.get(self.schedule.arrival) .format('HH:mm'), } context.update(predictions) subject = settings.BROADCASTS_SUBJECT.format( origin=context['origin'], destination=context['destination'], departure=context['departure']) template_path = settings.BROADCASTS_TEMPLATE_PATH s3 = S3Boto3Storage() if s3.exists(template_path): template_html = s3.open(template_path).read() else: raise ImproperlyConfigured("Unable to get template from S3") template = Template(template_html) login_url = os.environ.get('BROADCASTS_LOGIN_URL', '/route/{route}?token={token}') for user in self.users.all(): token = TokenBackend.create_token(user) context['login_url'] = login_url.format(route=self.schedule.code, token=token.code) html_content = template.render(Context(context)) to_email = user.email msg = EmailMultiAlternatives(subject, text_content, from_email, [to_email], connection=connection) msg.attach_alternative(html_content, "text/html") msg.send() connection.close()
def _setup(self): storage = DefaultStorage() public_bucket = settings.AWS_STORAGE_BUCKET_NAME if public_bucket: # pragma: no cover storage = S3Boto3Storage(bucket=public_bucket, querystring_auth=False) self._wrapped = storage
class CallEvaluation(CreateModifyOn): call = models.OneToOneField(Call, on_delete=models.PROTECT) panel_date = models.DateField(help_text='Date on which the panel review meeting will take place') post_panel_management_table = models.FileField(storage=S3Boto3Storage(), upload_to=post_panel_management_table_rename, help_text='File in which the panel review information is contained', blank=True, null=True) closed_date = models.DateTimeField(blank=True, null=True) closed_user = models.ForeignKey(User, help_text='User by which the Call Evaluation was closed', blank=True, null=True, on_delete=models.PROTECT) history = HistoricalRecords() def __str__(self): return f'CallEvaluation: {self.id} for call: {self.call.little_name()}' @staticmethod def comment_object(): from comments.models import CallEvaluationComment return CallEvaluationComment @staticmethod def attachment_object(): return None def comments(self): return self.callevaluationcomment_set.all().order_by('created_on') def attachments(self): return [] def is_closed(self): return self.closed_date is not None def is_open(self): return self.closed_date is None def close(self, user_closing_call_evaluation): """ It creates the projects and closes the call. """ created_projects = [] with transaction.atomic(): for proposal in Proposal.objects.filter(call=self.call).filter( proposalevaluation__board_decision=ProposalEvaluation.BOARD_DECISION_FUND).order_by('?'): project = Project.create_from_proposal(proposal, len(created_projects) + 1) created_projects.append(project) self.closed_date = timezone.now() self.closed_user = user_closing_call_evaluation self.save() return created_projects
class Proposta(models.Model): titulo = models.CharField(max_length=110, null=False) numero = models.IntegerField(null=False) imagem = models.ImageField(null=False, upload_to='propostas', storage=S3Boto3Storage()) em_uso = models.BooleanField(default=False) def __str__(self): return self.titulo
def __init__(self, bucket=settings.AWS_USER_DATA_BUCKET_NAME, verbose_name=None, name=None, upload_to='', storage=None, acl='public-read', **kwargs): self.bucket_name = bucket if settings.USE_AMAZON_S3: storage = S3Boto3Storage(bucket_name=self.bucket_name, default_acl=acl, querystring_auth=False) super(S3EnabledFileField, self).__init__(verbose_name, name, upload_to, storage, **kwargs)
class PrivateS3BotoStorage(S3Boto3Storage): """ Private storage bucket for S3 """ access_key_names = ['AWS_PRIVATE_S3_ACCESS_KEY_ID', 'AWS_PRIVATE_ACCESS_KEY_ID'] + S3Boto3Storage.access_key_names secret_key_names = ['AWS_PRIVATE_S3_SECRET_ACCESS_KEY', 'AWS_PRIVATE_SECRET_ACCESS_KEY'] + S3Boto3Storage.secret_key_names # Since this class inherits the default storage, it shares many parameters with the base class. # Thus, redefine the setting name that is used to read these values, so almost all settings are not shared. access_key = setting('AWS_PRIVATE_S3_ACCESS_KEY_ID', setting('AWS_PRIVATE_ACCESS_KEY_ID', S3Boto3Storage().access_key)) secret_key = setting('AWS_PRIVATE_S3_SECRET_ACCESS_KEY', setting('AWS_PRIVATE_SECRET_ACCESS_KEY', S3Boto3Storage().secret_key)) file_overwrite = setting('AWS_PRIVATE_S3_FILE_OVERWRITE', False) # false, differ from base class object_parameters = setting('AWS_PRIVATE_S3_OBJECT_PARAMETERS', {}) bucket_name = setting('AWS_PRIVATE_STORAGE_BUCKET_NAME') auto_create_bucket = setting('AWS_PRIVATE_AUTO_CREATE_BUCKET', False) default_acl = setting('AWS_PRIVATE_DEFAULT_ACL', 'private') # differ from base class bucket_acl = setting('AWS_PRIVATE_BUCKET_ACL', default_acl) querystring_auth = setting('AWS_PRIVATE_QUERYSTRING_AUTH', True) querystring_expire = setting('AWS_PRIVATE_QUERYSTRING_EXPIRE', 3600) signature_version = setting('AWS_PRIVATE_S3_SIGNATURE_VERSION') reduced_redundancy = setting('AWS_PRIVATE_REDUCED_REDUNDANCY', False) location = setting('AWS_PRIVATE_LOCATION', '') encryption = setting('AWS_PRIVATE_S3_ENCRYPTION', False) custom_domain = setting('AWS_PRIVATE_S3_CUSTOM_DOMAIN') addressing_style = setting('AWS_PRIVATE_S3_ADDRESSING_STYLE') secure_urls = setting('AWS_PRIVATE_S3_SECURE_URLS', True) file_name_charset = setting('AWS_PRIVATE_S3_FILE_NAME_CHARSET', 'utf-8') gzip = setting('AWS_PRIVATE_IS_GZIPPED', S3Boto3Storage().gzip) # fallback to default preload_metadata = setting('AWS_PRIVATE_PRELOAD_METADATA', False) url_protocol = setting('AWS_PRIVATE_S3_URL_PROTOCOL', S3Boto3Storage().url_protocol) # fallback to default endpoint_url = setting('AWS_PRIVATE_S3_ENDPOINT_URL', None) region_name = setting('AWS_PRIVATE_S3_REGION_NAME', S3Boto3Storage().region_name) # fallback to default use_ssl = setting('AWS_PRIVATE_S3_USE_SSL', True) def url(self, name, *args, **kwargs): if appconfig.PRIVATE_STORAGE_S3_REVERSE_PROXY or not self.querystring_auth: # There is no direct URL possible, return our streaming view instead. return reverse('serve_private_file', kwargs={'path': name}) else: # The S3Boto3Storage can generate a presigned URL that is temporary available. return super(PrivateS3BotoStorage, self).url(name, *args, **kwargs)
class Photo(models.Model): owner = models.ForeignKey(User, on_delete=models.CASCADE) group = models.ForeignKey(Group, on_delete=models.CASCADE) date_added = models.DateTimeField(auto_now_add=True) caption = models.CharField(max_length=60, blank=True) image_width = models.PositiveIntegerField() image_height = models.PositiveIntegerField() image = ProcessedImageField( upload_to='images', processors=[Transpose()], storage=S3Boto3Storage(bucket=settings.AWS_SECURE_STORAGE_BUCKET_NAME)) thumbnail = ProcessedImageField( upload_to='image_thumbnails', processors=[Transpose(), ResizeToFit(400, 400)], storage=S3Boto3Storage(bucket=settings.AWS_SECURE_STORAGE_BUCKET_NAME)) def __str__(self): return self.image.name
class Avatar(models.Model): profile = models.OneToOneField("Profile", on_delete=models.CASCADE, primary_key=True) original = models.FileField(storage=S3Boto3Storage(), upload_to=upload_to, null=True, blank=True) def __str__(self): msg = f"{self.profile}" return msg
class Medium(CreateModifyOn): project = models.ForeignKey( Project, help_text='Project that this medium belongs to', on_delete=models.PROTECT) received_date = models.DateField( help_text='Date that the medium was received') photographer = models.ForeignKey( PhysicalPerson, help_text='Person who took the photo/video', on_delete=models.PROTECT) license = models.ForeignKey(License, help_text='License', on_delete=models.PROTECT, null=True, blank=True) copyright = models.CharField( max_length=1024, help_text= 'Owner of copyright if it is not the photographer (e.g. institution)', null=True, blank=True) file = models.FileField(storage=S3Boto3Storage(), upload_to=medium_file_rename, validators=[*management_file_validator()]) file_md5 = models.CharField(max_length=32, null=True, blank=True) blog_posts = models.ManyToManyField( BlogPost, help_text='Which blog posts this image belongs to', blank=True) descriptive_text = models.TextField( help_text= 'Description of this media, if provided. Where was it taken, context, etc.', null=True, blank=True) class Meta: verbose_name_plural = 'Media' def __str__(self): return f'{self.project}-{self.photographer}' def save(self, *args, **kwargs): self.file_md5 = calculate_md5_from_file_field(self.file) super().save(*args, **kwargs) def delete(self, *args, **kwargs): with transaction.atomic(): MediumDeleted.objects.create(**{'original_id': self.id}) delete_result = super().delete(*args, **kwargs) return delete_result
def __init__(self, bucket=settings.AWS_USER_DATA_BUCKET_NAME, thumb_sizes=THUMB_SIZES, verbose_name=None, name=None, width_field=None, height_field=None, legacy_filenames=True, acl='public-read', **kwargs): self.thumb_sizes = thumb_sizes self.bucket_name = bucket self.legacy_filenames = legacy_filenames if settings.USE_AMAZON_S3: kwargs['storage'] = S3Boto3Storage(bucket_name=self.bucket_name, default_acl=acl, querystring_auth=False) super(S3EnabledImageField, self).__init__(verbose_name, name, width_field, height_field, **kwargs)
class Lecture(models.Model): number = models.PositiveIntegerField() topic = models.CharField(max_length=255) presentation = models.FileField(storage=S3Boto3Storage()) course = models.ForeignKey(Course, on_delete=models.CASCADE, related_name="lecture") creator = models.ForeignKey(User, on_delete=models.CASCADE, related_name="lecture_creator") def __str__(self): return f"Lecture №{ self.number } - '{ self.topic }'"
class SubProduct(TimestampedModel): sub_product_code = models.CharField(max_length=20, blank=True, null=True, unique=True) product = models.ForeignKey(Product, related_name='sub_products') name = models.CharField(max_length=50) display_name = models.CharField(max_length=50, blank=True, null=True) description = models.CharField(max_length=100, blank=True, null=True) img_orig = models.ImageField( upload_to=upload_sub_product_orig_image_path, null=True, blank=True, storage=S3Boto3Storage(bucket=settings.AWS_STORAGE_PUBLIC_BUCKET_NAME)) img_thumb = models.ImageField( upload_to=upload_sub_product_thumb_image_path, null=True, blank=True, storage=S3Boto3Storage(bucket=settings.AWS_STORAGE_PUBLIC_BUCKET_NAME)) class Meta: db_table = "sub_product" verbose_name = "Sub Product" verbose_name_plural = "Sub products" def get_img_orig(self): if self.img_orig.name and utils.is_current_settings('production'): return utils.generate_public_s3_access_url(self.img_orig.name) return str(self.img_orig.name) if self.img_orig.name else '' def get_img_thumb(self): if self.img_thumb.name and utils.is_current_settings('production'): return utils.generate_public_s3_access_url(self.img_thumb.name) return str(self.img_thumb.name) if self.img_thumb.name else '' def __str__(self): return "{0} #{1} #{2}".format(self.product.product_category.name, self.product.name, self.name)
class Avatar(models.Model): profile = models.OneToOneField(Profile, on_delete=models.CASCADE, primary_key=True) original = models.FileField(storage=S3Boto3Storage(), upload_to=upload_to, null=True, blank=True) class Meta: verbose_name_plural = "avatar" def __str__(self): return f"Avatar(user = {self.profile})"
def __init__(self, bucket_name, *args, **kwargs): if bucket_name and settings.AWS_ACCESS_KEY_ID: # True S3 access. kwargs['bucket'] = bucket_name self.storage = S3Boto3Storage(*args, **kwargs) self.bucket_name = bucket_name self.s3_client = boto3.client( 's3', aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY) else: # Mocked S3 access. self.storage = FileSystemStorage() self.s3_client = None
class ProjectAttachment(AbstractAttachment): file = models.FileField(storage=S3Boto3Storage(), upload_to=project_attachment_rename, validators=[*management_file_validator()]) project = models.ForeignKey(Project, help_text='Project that this attachment belongs to', on_delete=models.PROTECT) category = models.ForeignKey(ProjectAttachmentCategory, help_text='Category of the attachment', on_delete=models.PROTECT) def set_parent(self, parent): self.project = parent @staticmethod def category_queryset(): return ProjectAttachmentCategory.objects.all()
def read_theme_file_from_s3(file_path): """ Read a theme file from S3. The file_path should be the relative path in the media directory. Example: file_content = read_theme_file_from_s3('themename/templates/default.html') """ # the DEFAULT_S3_PATH is where the media files are stored. file_path = '%s/%s' % (settings.THEME_S3_PATH, str(file_path).lstrip('/')) storage = S3Boto3Storage() f = S3Boto3StorageFile(file_path, 'r', storage) content = f.read() f.close() return content
class Backups(models.Model): location = models.ForeignKey('BackupLocation', on_delete=models.CASCADE) archived = models.BooleanField('Archived (Do Not Delete from AWS)', default=False) file = models.FileField(max_length=255, storage=S3Boto3Storage(location='backups')) glacier_archive_id = models.CharField(max_length=500, blank=True, null=True) off_site_backup_date = models.DateTimeField(blank=True, null=True) last_seen_date = models.DateTimeField() # delete s3 file when model record is deleted def delete(self, *args, **kwargs): self.file.delete() super().delete(*args, **kwargs)
def isfile(self, name): if len(name.split('.')) > 1: return True try: name = self._normalize_name(self._clean_name(name)) if self.isfilecached.get(name) is not None: return self.isfilecached.get(name) f = S3Boto3Storage(name, 'rb', self) if "directory" in f.obj.content_type: isfile = False else: isfile = True except Exception: isfile = False self.isfilecached[name] = isfile return isfile
def test_requires_s3_storage(): HANDLING_VIEW = 123 s3_storage = S3Boto3Storage() file_storage = FileSystemStorage() def _run_test(_storage): view = storage_views.requires_s3_storage(_storage)(HANDLING_VIEW) return view assert _run_test(s3_storage) == HANDLING_VIEW file_view = _run_test(file_storage) assert file_view != HANDLING_VIEW with pytest.raises(Http404): file_view()