コード例 #1
0
    def handle(self, *args, **options):
        local_storage = StaticFilesStorage()
        base_path = local_storage.base_location

        # Ignore files in our ignore patterns
        ignore_patterns = getattr(settings, 'SYNCSTATIC_IGNORE_PATTERNS', None)
        files = set(utils.get_files(local_storage, ignore_patterns=ignore_patterns))

        # Remove any files that went into compilation
        files -= set(settings.PIPELINE_JS['main']['source_filenames'])
        files -= set(settings.PIPELINE_CSS['main']['source_filenames'])

        for file in files:
            print('syncing to s3: %s' % file)
            staticfiles_storage.save(file, local_storage.open(file, 'r'))
コード例 #2
0
    def handle(self, *args, **options):
        local_storage = StaticFilesStorage()
        base_path = local_storage.base_location

        # Ignore files in our ignore patterns
        ignore_patterns = getattr(settings, 'SYNCSTATIC_IGNORE_PATTERNS', None)
        files = set(
            utils.get_files(local_storage, ignore_patterns=ignore_patterns))

        # Remove any files that went into compilation
        files -= set(settings.PIPELINE_JS['main']['source_filenames'])
        files -= set(settings.PIPELINE_CSS['main']['source_filenames'])

        for file in files:
            print('syncing to s3: %s' % file)
            staticfiles_storage.save(file, local_storage.open(file, 'r'))
コード例 #3
0
class S3LocalCachedMixin(object):
    """
    Mixin that adds local caching to S3 storage backend
    """
    def __init__(self, *args, **kwargs):
        super(S3LocalCachedMixin, self).__init__(*args, **kwargs)
        self._local = StaticFilesStorage(location=CONVOY_LOCAL_CACHE_ROOT)

    def save(self, name, content, *args, **kwargs):
        if not hasattr(content, 'chunks'):
            content = ContentFile(content)
        sname = self._save(name, content, *args, **kwargs)
        return sname

    def _save(self, name, content, *args, **kwargs):
        # some configurations of s3 backend mutate the content in place
        # Esp when AWS_IS_GZIPPED = True
        # keep a pre-mutation copy for the local cache so we don't save garbage to disk
        orig_content = copy.copy(content)
        sname = super(S3LocalCachedMixin, self)._save(name, content, *args,
                                                      **kwargs)
        if self._local.exists(name):
            self._local.delete(name)
        lname = self._local._save(name, orig_content, *args, **kwargs)
        return name

    def delete(self, *args, **kwargs):
        if self._local.exists(*args, **kwargs):
            self._local.delete(*args, **kwargs)
        return super(S3LocalCachedMixin, self).delete(*args, **kwargs)

    def open(self, name, *args, **kwargs):
        if self._local.exists(name):
            #print "reading %s from cache" % name
            return self._local.open(name, *args, **kwargs)
        else:
            #print "reading %s from network" % name
            the_file = super(S3LocalCachedMixin,
                             self).open(name, *args, **kwargs)
            #we had a cache miss, save it locally for the future
            self._local.save(name, the_file)
            if hasattr(the_file, "seek"):
                the_file.seek(0)
            return the_file

    def local_path(self, *args, **kwargs):
        return self._local.path(*args, **kwargs)