Esempio n. 1
0
	def __init__(self):
		self.host = 's3-us-west-2.amazonaws.com'
		self.bucket_name = 'snapshots.stencila.io'
		# See note above about the following settings
		self.url_protocol = 'https'
		self.custom_domain = 's3-us-west-2.amazonaws.com/snapshots.stencila.io'
		self.secure_urls = True
		S3BotoStorage.__init__(self)
Esempio n. 2
0
class CartaMagicPy(models.Model):
    """
    Una carta con idea guardada
    """
    # , storage=S3BotoStorage(bucket=settings.AWS_STORAGE_BUCKET_NAME)
    imagen_url = models.URLField(blank=True)
    nombre_carta_magic = models.CharField(max_length=255,
                                          blank=True,
                                          unique=True)
    imagen = models.ImageField(
        null=True,
        upload_to=ubicar_magicpy,
        storage=S3BotoStorage(bucket=settings.AWS_STORAGE_BUCKET_NAME))
    imagen_base = models.ImageField(
        null=True,
        upload_to=ubicar_img_base,
        storage=S3BotoStorage(bucket=settings.AWS_STORAGE_BUCKET_NAME))
    grupo = models.ForeignKey(GrupoMagicPy, null=True)
    nombre = models.CharField(max_length=50, blank=True)
    descripcion = models.CharField(max_length=600, blank=True)
    ultima_revision = models.DateTimeField(null=True)
    eliminada = models.BooleanField(default=False)

    @property
    def hace_cuanto_revise(self):
        # Devuelve un texto con la diferencia de hoy a la ultima revision

        diferencia_tiempo = datetime.today() - self.ultima_revision

        dias = diferencia_tiempo.days

        if dias == 0:
            return "Hoy"

        meses = float(dias) / 30.00
        if meses > 0:
            return "%s meses - %s dias" % (meses, dias % 30)

        else:
            return "%s dias" % dias

    @property
    def url_imagen_recortada(self):
        # Devuelve la url de la imagen si existe, evita errores

        if self.imagen and hasattr(self.imagen, 'url'):
            return self.imagen.url
        else:
            return None

    class Meta:
        ordering = ["-ultima_revision"]

    def __unicode__(self):
        return self.nombre
Esempio n. 3
0
 def __init__(self, *args, **kwargs):
     CachedFilesMixin.__init__(self, *args, **kwargs)
     kwargs.update(
         bucket=settings.STATIC_BUCKET,
         access_key=settings.AWS_ACCESS_KEY_ID,
         secret_key=settings.AWS_SECRET_ACCESS_KEY,
         headers=settings.STATIC_HEADERS,
         location=settings.STATIC_LOCATION,
         custom_domain=settings.STATIC_DOMAIN,
         gzip=True)
     S3BotoStorage.__init__(self, *args, **kwargs)
Esempio n. 4
0
def import_files():
    """
    Go through the stored files and ensure that each one has a Document model supporting it.
    Create or use categories matching the document folder structure unless the folder is numeric.
    :return:
    """
    storage = S3BotoStorage()
    for key in storage.bucket.list():
        if not dm.Document.objects.filter(
                source_file=key.name):  # No existing metadata object
            title = os.path.splitext(os.path.basename(key.name))[0]
            if title:  # ignore .xxx 'hidden' files
                document = dm.Document(source_file=key.name, title=title)
                document.save()  # save here so relations are possible

                filename, created = dm.DocumentFileName.objects.get_or_create(
                    document=document, name=key.name)
                if created:
                    filename.save()

                path = os.path.split(key.name)[0]
                if path:
                    category_names = path.split(os.path.sep)
                    categories = dm.verify_categories(category_names,
                                                      create_if_absent=True)
                    document.categories.add(categories[-1])
Esempio n. 5
0
class PostTopic(models.Model):
    name = models.CharField(max_length=24)
    image = models.ImageField(storage=S3BotoStorage(),
                              upload_to=post_topic_icon_location)

    def __str__(self):
        return self.name
class SectionAttachment(CreatedModifiedModel):

    title = models.CharField(max_length=255,
                             null=False,
                             blank=True,
                             default='')
    text = models.TextField(null=False, blank=True)
    html = models.TextField(null=False,
                            blank=True,
                            help_text="This is auto generated from Text.")
    file = models.FileField(null=True,
                            blank=True,
                            upload_to='course_section_attachments/',
                            storage=S3BotoStorage(
                                bucket=AWS_STORAGE_BUCKET_NAME,
                                acl=AWS_DEFAULT_ACL))

    def __str__(self):
        return u"%s Text Section" % self.title

    def get_type(self):
        return "attachment"

    def save(self, *args, **kwargs):
        self.html = make_markdown(self.text)
        super(SectionAttachment, self).save(*args, **kwargs)
Esempio n. 7
0
class FeralSpirit(models.Model):
    # , storage=S3BotoStorage(bucket=settings.AWS_STORAGE_BUCKET_NAME)
    fireball = models.ForeignKey(Fireball)
    tipo = models.CharField(max_length=60)
    texto = models.CharField(max_length=150, blank=True)
    url = models.URLField(blank=True)
    imagen = models.ImageField(
        null=True,
        blank=True,
        upload_to=ubicar_imagen_feral,
        storage=S3BotoStorage(bucket=settings.AWS_STORAGE_BUCKET_NAME))
    tema = models.CharField(max_length=150, blank=True)
    contador = models.PositiveIntegerField(default=0)
    ultima_publicacion = models.DateTimeField(auto_now_add=True)
    activo = models.BooleanField(
        default=True)  # Solo los ferals activos se twitean
    eliminado = models.BooleanField(
        default=False)  # los ferals eliminados no salen en contenido_extra

    def aumentar_contador(self):
        self.contador += 1
        self.save()
        return self.contador

    @classmethod
    def ultimo_id(cls):
        # para obtener el id del ultimo feral creado, para nombrar los archivos en el storage
        ultimo_feral = cls.objects.filter(eliminado=False).latest('id')
        return ultimo_feral.id

    def __unicode__(self):
        return "%s : %s " % (self.fireball.nombre, self.tipo)

    class Meta:
        ordering = ['-ultima_publicacion']
class EncodedVideo(pr_models.OwnedPRModel):
    """This is a user Task to view a video."""

    #: used to make the encoding.com job request
    audio_codec = models.CharField(max_length=31, null=False)
    #: The bitrate of the EncodedVideo, in kilobytes per second
    bitrate = models.CharField(max_length=31, null=False)
    #: used to make the encoding.com job request
    output = models.CharField(max_length=31)
    #: used to make the encoding.com job request
    size = models.CharField(max_length=15)
    #: The video that this encoding is for
    video = pr_models.PRForeignKey(Video,
                                   null=False,
                                   related_name='encoded_videos')
    # Different codecs can result in different stream URLs,
    # so capture the codec for when those URLs are generated
    video_codec = models.CharField(max_length=31)
    #: Represents the encoded video stored in S3
    file = models.FileField(upload_to='video/', storage=S3BotoStorage())

    @property
    def http_url(self):
        if self.file.name:
            return self.file.url

    @property
    def url(self):
        if self.file.name:
            return awsutils.CloudFrontStreamingObject(
                self.file.name).generate_url()

    class Meta:
        ordering = ('id', )
Esempio n. 9
0
def main(request, course_prefix, course_suffix):
    secure_file_storage = S3BotoStorage(bucket=AWS_SECURE_STORAGE_BUCKET_NAME,
                                        access_key=AWS_ACCESS_KEY_ID,
                                        secret_key=AWS_SECRET_ACCESS_KEY)

    # Read a list of all reports for that course that are on the server
    dashboard_csv_reports = secure_file_storage.listdir(
        "%s/%s/reports/dashboard/csv" % (course_prefix, course_suffix))
    dashboard_txt_reports = secure_file_storage.listdir(
        "%s/%s/reports/dashboard/txt" % (course_prefix, course_suffix))
    course_quizzes_csv_reports = secure_file_storage.listdir(
        "%s/%s/reports/course_quizzes/csv" % (course_prefix, course_suffix))
    course_quizzes_txt_reports = secure_file_storage.listdir(
        "%s/%s/reports/course_quizzes/txt" % (course_prefix, course_suffix))
    quiz_data_csv_reports = secure_file_storage.listdir(
        "%s/%s/reports/quiz_data/csv" % (course_prefix, course_suffix))
    quiz_data_txt_reports = secure_file_storage.listdir(
        "%s/%s/reports/quiz_data/txt" % (course_prefix, course_suffix))

    dashboard_reports = []

    return render_to_response('reports/main.html', {
        'common_page_data': request.common_page_data,
        'dashboard_reports': dashboard_reports,
        'course_quizzes_reports': course_quizzes_reports,
        'quiz_data_reports': quiz_data_reports
    },
                              context_instance=RequestContext(request))
Esempio n. 10
0
    def _setup(self):
        storage = DefaultStorage()

        private_bucket = settings.AWS_STORAGE_BUCKET_NAME
        if private_bucket:  # pragma: no cover
            storage = S3BotoStorage(bucket=private_bucket)

        self._wrapped = storage
Esempio n. 11
0
 def setUp(self):
     self._file_field = self.model._meta.get_field_by_name(
         'upload_file_minidump')[0]
     self._archive_field = self.model._meta.get_field_by_name('archive')[0]
     self._default_storage = self._file_field.storage
     test_storage = S3BotoStorage()
     self._file_field.storage = test_storage
     self._archive_field.storage = test_storage
Esempio n. 12
0
 def __init__(self, save_to_s3_arg, s3_filepath = ''):
     self.save_to_s3 = save_to_s3_arg
     self.report_str = StringIO()
     self.csv_writer = csv.writer(self.report_str)
         
     if self.save_to_s3:
         self.s3_filepath = s3_filepath
         if AWS_SECURE_STORAGE_BUCKET_NAME == 'local': self.secure_file_storage = default_storage
         else: self.secure_file_storage = S3BotoStorage(bucket=AWS_SECURE_STORAGE_BUCKET_NAME, access_key=AWS_ACCESS_KEY_ID, secret_key=AWS_SECRET_ACCESS_KEY)
Esempio n. 13
0
    def _setup(self):
        storage = DefaultStorage()

        public_bucket = settings.AWS_STORAGE_PUBLIC_BUCKET_NAME
        if public_bucket:  # pragma: no cover
            storage = S3BotoStorage(
                bucket=public_bucket, querystring_auth=False)

        self._wrapped = storage
Esempio n. 14
0
 def __init__(self, course, save_to_s3_arg, s3_filepath):
     self.save_to_s3 = save_to_s3_arg
     if self.save_to_s3:
         if AWS_SECURE_STORAGE_BUCKET_NAME == 'local':
             secure_file_storage = default_storage
         else: 
             secure_file_storage = S3BotoStorage(bucket=AWS_SECURE_STORAGE_BUCKET_NAME, access_key=AWS_ACCESS_KEY_ID, secret_key=AWS_SECRET_ACCESS_KEY)
         
         self.csv_file = secure_file_storage.open(s3_filepath, 'wb')
         self.csv_writer = csv.writer(self.csv_file)
Esempio n. 15
0
class Video(BaseModel):
    url = models.URLField()
    title = models.CharField(max_length=256, blank=True)
    description = models.TextField(blank=True)
    author = models.CharField(max_length=256, blank=True)
    duration = models.TimeField(blank=True)

    file = models.FileField(
        null=True, blank=True,
        storage=S3BotoStorage(bucket="zebra-video"))  # upload_to=s3_file_name,
Esempio n. 16
0
def page_storage():
    if settings.MANGOPAY_PAGE_DEFAULT_STORAGE:
        return default_storage
    else:
        from storages.backends.s3boto import S3BotoStorage
        return S3BotoStorage(acl='private',
                             headers={
                                 'Content-Disposition': 'attachment',
                                 'X-Robots-Tag':
                                 'noindex, nofollow, noimageindex'
                             },
                             bucket=settings.AWS_MEDIA_BUCKET_NAME,
                             custom_domain=settings.AWS_MEDIA_CUSTOM_DOMAIN)
 def compile_microsite_sass(self):
     css_output = compile_sass('main.scss',
                               custom_branding=self._sass_var_override)
     file_name = self.get_value('css_overrides_file')
     if settings.USE_S3_FOR_CUSTOMER_THEMES:
         storage = S3BotoStorage(location="customer_themes", )
         with storage.open(file_name, 'w') as f:
             f.write(css_output.encode('utf-8'))
     else:
         theme_folder = os.path.join(settings.COMPREHENSIVE_THEME_DIRS[0],
                                     'customer_themes')
         theme_file = os.path.join(theme_folder, file_name)
         with open(theme_file, 'w') as f:
             f.write(css_output.encode('utf-8'))
Esempio n. 18
0
def view_csv_grades(request, course_prefix, course_suffix, exam_slug):
    course = request.common_page_data['course']

    try:
        exam = Exam.objects.get(course=course, is_deleted=0, slug=exam_slug)
    except Exam.DoesNotExist:
        raise Http404

    if course.mode == "draft":
        course = course.image

    if exam.mode == "draft":
        exam = exam.image

    graded_students = ExamScore.objects.filter(
        course=course, exam=exam).values('student',
                                         'student__username').distinct()
    fname = course_prefix + "-" + course_suffix + "-" + exam_slug + "-grades-" + datetime.datetime.now(
    ).strftime("%Y-%m-%d-%H:%M:%S") + ".csv"
    outfile = open(FILE_DIR + "/" + fname, "w+")

    could_not_parse = ""

    for s in graded_students:  #yes, there is sql in a loop here.  We'll optimize later
        #print(s)
        score_obj = ExamScore.objects.get(course=course,
                                          exam=exam,
                                          student=s['student'])
        subscores = ExamScoreField.objects.filter(parent=score_obj)
        for field in subscores:
            outstring = '"%s","%s","%s"\n' % (
                s['student__username'], field.field_name, str(field.subscore))
            outfile.write(outstring)

    outfile.write("\n")

    #write to S3
    secure_file_storage = S3BotoStorage(bucket=AWS_SECURE_STORAGE_BUCKET_NAME,
                                        access_key=AWS_ACCESS_KEY_ID,
                                        secret_key=AWS_SECRET_ACCESS_KEY)
    s3file = secure_file_storage.open(
        "/%s/%s/reports/exams/%s" % (course_prefix, course_suffix, fname), 'w')
    outfile.seek(0)
    s3file.write(outfile.read())
    s3file.close()
    outfile.close()
    return HttpResponseRedirect(
        secure_file_storage.url(
            "/%s/%s/reports/exams/%s" % (course_prefix, course_suffix, fname),
            response_headers={'response-content-disposition': 'attachment'}))
Esempio n. 19
0
def upload_files(source_path, root_path='./'):
    """
    Copy files and folders in source path up to storage, preserving the folder structure
    :param source_path: path to search
    :param root_path: part of source_path that should be dropped from the name
    :return:
    """
    storage = S3BotoStorage()
    for dirpath, dirnames, filenames in os.walk(source_path):
        for filename in filenames:
            source_fpath = os.path.join(dirpath, filename)
            target_fpath = source_fpath.lstrip(root_path)
            if os.path.splitext(source_fpath)[1] != '.py':
                print("{0} -> {1}".format(source_fpath, target_fpath))
                with open(source_fpath, 'rb') as f:
                    storage.save(target_fpath, f)
Esempio n. 20
0
def read_media_file_from_s3(file_path):
    """
    Read a media file from S3.
    The file_path should be the relative path in the media directory.

    Example:
    file_content = read_media_file_from_s3('/files/99/Earth-and-Moon.gif')
    """
    # the DEFAULT_S3_PATH is where the media files are stored.
    file_path = '%s/%s' % (settings.DEFAULT_S3_PATH, str(file_path).lstrip('/'))
    storage = S3BotoStorage()
    f = S3BotoStorageFile(file_path, 'r', storage)
    content = f.read()
    f.close()

    return content
Esempio n. 21
0
def read_theme_file_from_s3(file_path):
    """
    Read a theme file from S3.
    The file_path should be the relative path in the media directory.

    Example:
    file_content = read_theme_file_from_s3('themename/templates/default.html')
    """
    # the DEFAULT_S3_PATH is where the media files are stored.
    file_path = '%s/%s' % (settings.THEME_S3_PATH, str(file_path).lstrip('/'))
    storage = S3BotoStorage()
    f = S3BotoStorageFile(file_path, 'r', storage)
    content = f.read()
    f.close()

    return content
Esempio n. 22
0
    def __init__(self, conf, callback, error_callback, parent_logger=None):
        Transporter.__init__(self, callback, error_callback, parent_logger)
        default_acl = 'public-read'
        default_bucket = conf['AWS_STORAGE_BUCKET_NAME']
        default_querystring_auth = False
        if not conf['IS_PUBLIC']:
            default_acl = 'private'
            default_bucket = conf['AWS_STORAGE_BUCKET_PVT_NAME']
            default_querystring_auth = True

        try:
            self.storage = S3BotoStorage(
                acl=default_acl,
                bucket=default_bucket.encode('utf-8'),
                querystring_auth=default_querystring_auth)
        except Exception, e:
            raise ConnectionError(e)
Esempio n. 23
0
    def __init__(self, settings, callback, error_callback, parent_logger=None):
        Transporter.__init__(self, settings, callback, error_callback,
                             parent_logger)

        # Fill out defaults if necessary.
        configured_settings = Set(self.settings.keys())
        if not "bucket_prefix" in configured_settings:
            self.settings["bucket_prefix"] = ""

        # Map the settings to the format expected by S3Storage.
        try:
            self.storage = S3BotoStorage(self.settings["bucket_name"],
                                         self.settings["bucket_prefix"],
                                         self.settings["access_key_id"],
                                         self.settings["secret_access_key"],
                                         "public-read", self.__class__.headers)
        except Exception, e:
            raise ConnectionError(e)
Esempio n. 24
0
class Image(CreatedModifiedModel):
    class Meta:
        ordering = ["-created_on",]

    image = models.ImageField(null=True, blank=True, width_field='width', height_field='height',
                              upload_to=content_file_name,
                              storage=S3BotoStorage(bucket=AWS_STORAGE_BUCKET_NAME, acl=AWS_DEFAULT_ACL))
    width = models.PositiveIntegerField(default=0, null=False, blank=True)
    height = models.PositiveIntegerField(default=0, null=False, blank=True)
    color = models.CharField(default='rgb(255,255,255)', max_length=20, blank=False, null=False)
    display_position = models.CharField(default='center', max_length=10, blank=False, null=False)
    face_data = JSONField(default=None, null=True, blank=True)

    def get_api_url(self):
        return '%s%s:%s/api/v1/images/%s/view/' % (SERVER_PROTOCOL, SERVER_NAME, SERVER_PORT, self.id)

    def __str__(self):
        return '%s-%s' % (self.image, self.created_on)

    def get_face_data(self):
        client = boto3.client('rekognition', region_name='us-east-1', aws_access_key_id=AWS_ACCESS_KEY_ID,
                              aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
        rekog_response = client.detect_faces(
            Image={
                'Bytes': self.image.read(),
            },
            Attributes=[
                'ALL'
            ]
        )
        display_position = 'center'
        face_data = None
        if 'FaceDetails' in rekog_response:
            if len(rekog_response['FaceDetails']) > 0:
                if rekog_response['FaceDetails'][0]['BoundingBox']['Top'] < .33:
                    display_position = 'top'
                elif rekog_response['FaceDetails'][0]['BoundingBox']['Top'] > .6:
                    display_position = 'bottom'

            face_data = rekog_response['FaceDetails']

            self.display_position = display_position
            self.face_data = face_data
            self.save()
Esempio n. 25
0
def upload_image(local_file_path, external_file_path, bucket):
    """
    Uploads an image to amazon S3

    Arguments
        local_file_path: the place on the local file system the file exists
        external_file_path: the file path from the root of the bucket where the file is going
        bucket: the amazon s3 bucket its going into
    """
    # create the connection
    storage = S3BotoStorage()
    conn = storage.connection
    bucket = conn.get_bucket(bucket)

    # new key for bucket
    k = Key(bucket)
    k.key = external_file_path
    k.set_contents_from_filename(local_file_path)
    k.set_acl('public-read')

    # remove the tmp file
    os.remove(local_file_path)
Esempio n. 26
0
def as_signed_url(location, request):
    parts = urlparse(location)
    bucket_name = parts.netloc.split('.')[0]
    key_name = parts.path
    if bucket_name.startswith('s3-'):
        name_parts = key_name.split('/')
        if name_parts and not name_parts[0]:
            name_parts.pop(0)
        bucket_name = name_parts[0]
        key_name = '/'.join(name_parts[1:])
    if key_name.startswith('/'):
        # we rename leading '/' otherwise S3 copy triggers a 404
        # because it creates an URL with '//'.
        key_name = key_name[1:]
    kwargs = {}
    for key in ['access_key', 'secret_key', 'security_token']:
        if key in request.session:
            kwargs[key] = request.session[key]
    if not kwargs:
        LOGGER.error("called `as_signed_url(bucket_name=%s, key_name=%s)`"\
            " with no credentials.", bucket_name, key_name)
    s3_storage = S3BotoStorage(bucket=bucket_name, **kwargs)
    return s3_storage.url(key_name)
Esempio n. 27
0
    def __init__(self, cdn_type, *args, **kwargs):
        self._parent = super(AbstractStorage, self)
        self._cdn_type = cdn_type
        self._cdn_version_str = kwargs.pop('cdn_version_str', None)

        aws_headers = getattr(settings, 'AWS_HEADERS', {})
        headers = aws_headers.copy()

        age = self._cdn_settings('EXPIRY_AGE')
        if age > 0:
            expiry_headers = Util.get_expiry_headers(age=age)
            headers.update(expiry_headers)

        for key, val in headers.items():
            # need to encode into bytes, otherwise boto will url encode
            # the header value (see boto/connection.py/HTTPRequest/authorize)
            headers[key] = val.encode('utf8')

        gzip_types = list(getattr(settings, 'GZIP_CONTENT_TYPES', ()))
        com_types = self._cdn_settings('COMPRESSED_TYPES')
        for type in com_types:
            mime = mimetypes.types_map.get('.' + type, None)
            if mime:
                gzip_types += (mime, )

        # Fix the issue when the mime type is not found
        if 'js' in com_types:
            gzip_types += ('text/javascript', 'application/javascript',
                           'application/x-javascript')

        self._cdn_gzip_storage = S3BotoStorage(*args,
                                               headers=headers,
                                               gzip=True,
                                               gzip_content_types=gzip_types,
                                               **kwargs)
        self._parent.__init__(*args, headers=headers, gzip=False, **kwargs)
Esempio n. 28
0
# TODO See: https://github.com/jschneier/django-storages/issues/47
# Revert the following and use str after the above-mentioned bug is fixed in
# either django-storage-redux or boto
AWS_HEADERS = {
    'Cache-Control':
    six.b('max-age=%d, s-maxage=%d, must-revalidate' %
          (AWS_EXPIRY, AWS_EXPIRY))
}

# URL that handles the media served from MEDIA_ROOT, used for managing
# stored files.

#  See:http://stackoverflow.com/questions/10390244/
from storages.backends.s3boto import S3BotoStorage
StaticRootS3BotoStorage = lambda: S3BotoStorage(location='static')
MediaRootS3BotoStorage = lambda: S3BotoStorage(location='media')
DEFAULT_FILE_STORAGE = 'config.settings.production.MediaRootS3BotoStorage'

MEDIA_URL = 'https://s3.amazonaws.com/%s/media/' % AWS_STORAGE_BUCKET_NAME

# Static Assets
# ------------------------

STATIC_URL = 'https://s3.amazonaws.com/%s/static/' % AWS_STORAGE_BUCKET_NAME
STATICFILES_STORAGE = 'config.settings.production.StaticRootS3BotoStorage'
# See: https://github.com/antonagestam/collectfast
# For Django 1.7+, 'collectfast' should come before
# 'django.contrib.staticfiles'
AWS_PRELOAD_METADATA = True
INSTALLED_APPS = ('collectfast', ) + INSTALLED_APPS
Esempio n. 29
0
"""Custom S3 storage backends to store files in subfolders."""
from storages.backends.s3boto import S3BotoStorage

MediaRootS3BotoStorage = lambda: S3BotoStorage(location='media')
Esempio n. 30
0
import os
from datetime import datetime
from purplebit_site.settings import AWS_STORAGE_BUCKET_NAME
from storages.backends.s3boto import S3BotoStorage
from django.utils.functional import SimpleLazyObject

StaticRootS3BotoStorage = lambda: S3BotoStorage(location='static')

# For media uploads:
S3URL = 'https://s3.amazonaws.com/'
S3Bucket = AWS_STORAGE_BUCKET_NAME
S3BucketBasePath = '/media/'


def stripBaseURL(url):
    if url == "":
        return url
    parts = url.split(S3URL + S3Bucket + S3BucketBasePath)
    if len(parts) != 2 or parts[0] != '':
        raise Exception('stripBaseURL: Illegal baseURL sent')
    return parts[1]


def addBaseURL(url):
    if url == "":
        return url
    return S3URL + S3Bucket + S3BucketBasePath + url
Esempio n. 31
0
from storages.backends.s3boto import S3BotoStorage

StaticS3BotoStorage = lambda: S3BotoStorage(bucket='travelsite',
                                            location='static')
MediaS3BotoStorage = lambda: S3BotoStorage(bucket='travelsite',
                                           location='media')
Esempio n. 32
0
    AWS_PRELOAD_METADATA = True
    AWS_IS_GZIPPED = True

    AWS_EXPIREY = 60 * 60 * 24 * 7
    AWS_HEADERS = {
        'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % (AWS_EXPIREY, AWS_EXPIREY)
    }

    # Using django-pipeline along with S3 storage for staticfiles
    # https://django-pipeline.readthedocs.org/en/latest/storages.html#using-with-other-storages
    from django.contrib.staticfiles.storage import CachedFilesMixin
    from pipeline.storage import PipelineMixin
    from storages.backends.s3boto import S3BotoStorage

    class S3PipelineCachedStorage(PipelineMixin, CachedFilesMixin, S3BotoStorage):
        pass

    # Separate buckets for static files and media files
    AWS_STATIC_STORAGE_BUCKET_NAME = '%s-static' % PROJECT_NAME.lower()
    AWS_MEDIA_STORAGE_BUCKET_NAME = '%s-media' % PROJECT_NAME.lower()

    STATIC_URL = '//%s.s3.amazonaws.com/' % AWS_STATIC_STORAGE_BUCKET_NAME
    MEDIA_URL = '//%s.s3.amazonaws.com/' % AWS_MEDIA_STORAGE_BUCKET_NAME

    StaticRootS3BotoStorage = lambda: S3PipelineGZIPCachedStorage(bucket=AWS_STATIC_STORAGE_BUCKET_NAME)
    MediaRootS3BotoStorage = lambda: S3BotoStorage(bucket=AWS_MEDIA_STORAGE_BUCKET_NAME)

    STATICFILES_STORAGE = 'config.settings.prod.StaticRootS3BotoStorage'
    DEFAULT_FILE_STORAGE = 'config.settings.prod.MediaRootS3BotoStorage'
########## END AMAZON S3 CONFIGURATION
Esempio n. 33
0
    def __init__(self, **kwargs):
        S3BotoStorage.__init__(self, **kwargs)

        self.querystring_auth = True
        self.querystring_expire = 3600
        self.acl = 'private'
Esempio n. 34
0
	def __init__(self):
		self.host = 's3-us-west-2.amazonaws.com'
		self.bucket_name = 'static.stenci.la'
		self.secure_urls = False
		S3BotoStorage.__init__(self)