class S3LocalCachedMixin(object):
    """
    Mixin that adds local caching to S3 storage backend
    """
    def __init__(self, *args, **kwargs):
        super(S3LocalCachedMixin, self).__init__(*args, **kwargs)
        self._local = StaticFilesStorage(location=CONVOY_LOCAL_CACHE_ROOT)

    def save(self, name, content, *args, **kwargs):
        if not hasattr(content, 'chunks'):
            content = ContentFile(content)
        sname = self._save(name, content, *args, **kwargs)
        return sname

    def _save(self, name, content, *args, **kwargs):
        # some configurations of s3 backend mutate the content in place
        # Esp when AWS_IS_GZIPPED = True
        # keep a pre-mutation copy for the local cache so we don't save garbage to disk
        orig_content = copy.copy(content)
        sname = super(S3LocalCachedMixin, self)._save(name, content, *args,
                                                      **kwargs)
        if self._local.exists(name):
            self._local.delete(name)
        lname = self._local._save(name, orig_content, *args, **kwargs)
        return name

    def delete(self, *args, **kwargs):
        if self._local.exists(*args, **kwargs):
            self._local.delete(*args, **kwargs)
        return super(S3LocalCachedMixin, self).delete(*args, **kwargs)

    def open(self, name, *args, **kwargs):
        if self._local.exists(name):
            #print "reading %s from cache" % name
            return self._local.open(name, *args, **kwargs)
        else:
            #print "reading %s from network" % name
            the_file = super(S3LocalCachedMixin,
                             self).open(name, *args, **kwargs)
            #we had a cache miss, save it locally for the future
            self._local.save(name, the_file)
            if hasattr(the_file, "seek"):
                the_file.seek(0)
            return the_file

    def local_path(self, *args, **kwargs):
        return self._local.path(*args, **kwargs)