예제 #1
0
    def upload_file(self, abspath, cloud_filename):
        """
        Uploads a file to the container.
        """
        if not self.test_run:
            headers = None
            contents = open(abspath, "rb")
            size = os.stat(abspath).st_size

            mime_type, encoding = mimetypes.guess_type(abspath)
            if mime_type in CUMULUS.get("GZIP_CONTENT_TYPES", []):
                headers = {'Content-Encoding': 'gzip'}
                contents = get_gzipped_contents(contents)
                size = contents.size

            self.conn.put_object(container=self.container_name,
                                 obj=cloud_filename,
                                 contents=contents,
                                 content_length=size,
                                 etag=None,
                                 content_type=mime_type,
                                 headers=headers)
            # TODO syncheaders
            #sync_headers(cloud_obj)
        self.create_count += 1
        if not self.quiet or self.verbosity > 1:
            print("Uploaded: {0}".format(cloud_filename))
예제 #2
0
    def upload_file(self, abspath, cloud_filename):
        """
        Uploads a file to the container.
        """
        if not self.test_run:
            headers = None
            contents = open(abspath, "rb")
            size = os.stat(abspath).st_size

            mime_type, encoding = mimetypes.guess_type(abspath)
            if mime_type in CUMULUS.get("GZIP_CONTENT_TYPES", []):
                headers = {"Content-Encoding": "gzip"}
                contents = get_gzipped_contents(contents)
                size = contents.size

            self.conn.put_object(container=self.container_name,
                                 obj=cloud_filename,
                                 contents=contents,
                                 content_length=size,
                                 etag=None,
                                 content_type=mime_type,
                                 headers=headers)
            # TODO syncheaders
            #from cumulus.storage import sync_headers
            #sync_headers(cloud_obj)
        self.upload_count += 1
        if not self.quiet or self.verbosity > 1:
            print("Uploaded: {0}".format(cloud_filename))
예제 #3
0
def get_headers(name, content_type):
    headers = {"Content-Type": content_type}
    # gzip the file if its of the right content type
    if content_type in CUMULUS.get("GZIP_CONTENT_TYPES", []):
        headers["Content-Encoding"] = "gzip"
    if CUMULUS["HEADERS"]:
        for pattern, pattern_headers in HEADER_PATTERNS:
            if pattern.match(name):
                headers.update(pattern_headers.copy())
    return headers
def cdn_url(request):
    """
    A context processor to expose the full cdn url in templates.

    """
    cloudfiles_storage = CloudFilesStorage()
    container_url = cloudfiles_storage._get_container_url()
    cdn_url = container_url + CUMULUS.get('STATIC_PREFIX', '')

    return {'CDN_URL': cdn_url}
예제 #5
0
def get_headers(name, content_type):
    headers = {"Content-Type": content_type}
    # gzip the file if its of the right content type
    if content_type in CUMULUS.get("GZIP_CONTENT_TYPES", []):
        headers["Content-Encoding"] = "gzip"
    if CUMULUS["HEADERS"]:
        for pattern, pattern_headers in HEADER_PATTERNS:
            if pattern.match(name):
                headers.update(pattern_headers.copy())
    return headers
예제 #6
0
    def _save(self, name, content):
        """
        Uses the Swiftclient service to write ``content`` to a remote
        file (called ``name``).
        """
        # Checks if the content_type is already set.
        # Otherwise uses the mimetypes library to guess.
        if hasattr(content.file, "content_type"):
            content_type = content.file.content_type
        else:
            mime_type, encoding = mimetypes.guess_type(name)
            content_type = mime_type

        headers = {"Content-Type": content_type}

        # gzip the file if its of the right content type
        if content_type in CUMULUS.get("GZIP_CONTENT_TYPES", []):
            content_encoding = headers["Content-Encoding"] = "gzip"
        else:
            content_encoding = None

        if CUMULUS["USE_PYRAX"]:
            if content_encoding == "gzip":
                content = get_gzipped_contents(content)
            self.connection.store_object(container=self.container_name,
                                         obj_name=name,
                                         data=content.read(),
                                         content_type=content_type,
                                         content_encoding=content_encoding,
                                         ttl=CUMULUS["FILE_TTL"],
                                         etag=None)
            if CUMULUS["HEADERS"]:
                # set headers/object metadata
                metadata = {}
                for pattern, pattern_headers in HEADER_PATTERNS:
                    if pattern.match(name):
                        metadata.update(pattern_headers.copy())
                self.connection.set_object_metadata(container=self.container_name,
                                                    obj=name,
                                                    metadata=metadata,
                                                    prefix='')
        else:
            # TODO gzipped content when using swift client
            self.connection.put_object(self.container_name, name,
                                       content, headers=headers)

        return name
예제 #7
0
    def _save(self, name, content):
        """
        Uses the Swiftclient service to write ``content`` to a remote
        file (called ``name``).
        """
        # Checks if the content_type is already set.
        # Otherwise uses the mimetypes library to guess.
        if hasattr(content.file, "content_type"):
            content_type = content.file.content_type
        else:
            mime_type, encoding = mimetypes.guess_type(name)
            content_type = mime_type

        headers = {"Content-Type": content_type}

        # gzip the file if its of the right content type
        if content_type in CUMULUS.get("GZIP_CONTENT_TYPES", []):
            content_encoding = headers["Content-Encoding"] = "gzip"
        else:
            content_encoding = None

        if CUMULUS["USE_PYRAX"]:
            # TODO set headers
            if content_encoding == "gzip":
                content = get_gzipped_contents(content)
            self.connection.store_object(container=self.container_name,
                                         obj_name=name,
                                         data=content.read(),
                                         content_type=content_type,
                                         content_encoding=content_encoding,
                                         etag=None)
        else:
            # TODO gzipped content when using swift client
            self.connection.put_object(self.container_name,
                                       name,
                                       content,
                                       headers=headers)

        return name
예제 #8
0
    def _save(self, name, content):
        """
        Uses the Swiftclient service to write ``content`` to a remote
        file (called ``name``).
        """
        # Checks if the content_type is already set.
        # Otherwise uses the mimetypes library to guess.
        if hasattr(content.file, "content_type"):
            content_type = content.file.content_type
        else:
            mime_type, encoding = mimetypes.guess_type(name)
            content_type = mime_type

        headers = {"Content-Type": content_type}

        # gzip the file if its of the right content type
        if content_type in CUMULUS.get("GZIP_CONTENT_TYPES", []):
            content_encoding = headers["Content-Encoding"] = "gzip"
        else:
            content_encoding = None

        if CUMULUS["USE_PYRAX"]:
            # TODO set headers
            if content_encoding == "gzip":
                content = get_gzipped_contents(content)
            self.connection.store_object(
                container=self.container_name,
                obj_name=name,
                data=content.read(),
                content_type=content_type,
                content_encoding=content_encoding,
                etag=None,
            )
        else:
            # TODO gzipped content when using swift client
            self.connection.put_object(self.container_name, name, content, headers=headers)

        return name
예제 #9
0
import mimetypes
import pyrax
import re
import swiftclient
from gzip import GzipFile
from StringIO import StringIO

from django.core.files.base import File, ContentFile
from django.core.files.storage import Storage

from cumulus.settings import CUMULUS


HEADER_PATTERNS = tuple((re.compile(p), h) for p, h in CUMULUS.get("HEADERS", {}))


def sync_headers(cloud_obj, headers={}, header_patterns=HEADER_PATTERNS):
    """
    Overwrites the given cloud_obj's headers with the ones given as ``headers`
    and adds additional headers as defined in the HEADERS setting depending on
    the cloud_obj's file name.
    """
    # don't set headers on directories
    content_type = getattr(cloud_obj, "content_type", None)
    if content_type == "application/directory":
        return
    matched_headers = {}
    for pattern, pattern_headers in header_patterns:
        if pattern.match(cloud_obj.name):
            matched_headers.update(pattern_headers.copy())
    # preserve headers already set
예제 #10
0
import re
from gzip import GzipFile

try:
    from cStringIO import StringIO
except ImportError:
    from StringIO import StringIO

from django.core.files.base import File, ContentFile
from django.core.files.storage import Storage

from cumulus.authentication import Auth
from cumulus.settings import CUMULUS


HEADER_PATTERNS = tuple((re.compile(p), h) for p, h in CUMULUS.get("HEADERS", {}))


def get_content_type(name, content):
    """
    Checks if the content_type is already set.
    Otherwise uses the mimetypes library to guess.
    """
    if hasattr(content, "content_type"):
        content_type = content.content_type
    else:
        mime_type, encoding = mimetypes.guess_type(name)
        content_type = mime_type
    return content_type

예제 #11
0
import warnings
from gzip import GzipFile

try:
    from cStringIO import StringIO
except ImportError:
    from StringIO import StringIO

from django.core.files.base import File, ContentFile
from django.core.files.storage import Storage

from cumulus.authentication import Auth
from cumulus.settings import CUMULUS

HEADER_PATTERNS = tuple(
    (re.compile(p), h) for p, h in CUMULUS.get("HEADERS", {}))


def get_content_type(name, content):
    """
    Checks if the content_type is already set.
    Otherwise uses the mimetypes library to guess.
    """
    if hasattr(content, "content_type"):
        content_type = content.content_type
    else:
        mime_type, encoding = mimetypes.guess_type(name)
        content_type = mime_type
    return content_type

import pyrax
import re
import swiftclient
import newrelic
from datetime import datetime

from django.core.files.base import File
from django.core.files.storage import Storage
from django.utils.encoding import force_text

from cumulus.settings import CUMULUS
from cumulus.utils import (get_digest, gzip_content, read_gzipped_content,
                           get_content_type)


HEADER_PATTERNS = tuple((re.compile(p), h) for p, h in CUMULUS.get("HEADERS", {}))


def sync_headers(cloud_obj, headers={}, header_patterns=HEADER_PATTERNS):
    """
    Overwrites the given cloud_obj's headers with the ones given as ``headers`
    and adds additional headers as defined in the HEADERS setting depending on
    the cloud_obj's file name.
    """
    # don't set headers on directories
    content_type = getattr(cloud_obj, "content_type", None)
    if content_type == "application/directory":
        return
    matched_headers = {}
    for pattern, pattern_headers in header_patterns:
        if pattern.match(cloud_obj.name):