コード例 #1
0
ファイル: s3.py プロジェクト: benchoufi/django-mediasync
    def put(self, filedata, content_type, remote_path, force=False):

        now = datetime.datetime.utcnow()
        then = now + datetime.timedelta(self.expiration_days)
        expires = then.strftime("%a, %d %b %Y %H:%M:%S GMT")

        if self.aws_prefix:
            remote_path = "%s/%s" % (self.aws_prefix, remote_path)

        (hexdigest, b64digest) = mediasync.checksum(filedata)
        raw_b64digest = b64digest  # store raw b64digest to add as file metadata

        # create initial set of headers
        headers = {
            "x-amz-acl":
            "public-read",
            "Content-Type":
            content_type,
            "Expires":
            expires,
            "Cache-Control":
            'max-age=%d, public' % (self.expiration_days * 24 * 3600),
        }

        key = self._bucket.get_key(remote_path)

        if key is None:
            key = Key(self._bucket, remote_path)

        key_meta = key.get_metadata('mediasync-checksum') or ''
        s3_checksum = key_meta.replace(' ', '+')
        if force or s3_checksum != raw_b64digest:

            key.set_metadata('mediasync-checksum', raw_b64digest)
            key.set_contents_from_string(filedata,
                                         headers=headers,
                                         md5=(hexdigest, b64digest))

            # check to see if file should be gzipped based on content_type
            # also check to see if filesize is greater than 1kb
            if content_type in TYPES_TO_COMPRESS:

                key = Key(self._bucket, "%s.gz" % remote_path)

                filedata = mediasync.compress(filedata)
                (hexdigest, b64digest) = mediasync.checksum(
                    filedata)  # update checksum with compressed data
                headers[
                    "Content-Disposition"] = 'inline; filename="%sgz"' % remote_path.split(
                        '/')[-1]
                headers["Content-Encoding"] = 'gzip'

                key.set_metadata('mediasync-checksum', raw_b64digest)
                key.set_contents_from_string(filedata,
                                             headers=headers,
                                             md5=(hexdigest, b64digest))

            return True
コード例 #2
0
ファイル: s3.py プロジェクト: aburan28/django-mediasync
    def put(self, filedata, content_type, remote_path, force=False):

        now = datetime.datetime.utcnow()
        then = now + datetime.timedelta(self.expiration_days)
        expires = then.strftime("%a, %d %b %Y %H:%M:%S GMT")
        
        if self.aws_prefix:
            remote_path = "%s/%s" % (self.aws_prefix, remote_path)
            
        (hexdigest, b64digest) = mediasync.checksum(filedata)
        raw_b64digest = b64digest # store raw b64digest to add as file metadata

        # create initial set of headers
        headers = {
            "x-amz-acl": "public-read",
            "Content-Type": content_type,
            "Expires": expires,
            "Cache-Control": 'max-age=%d, public' % (self.expiration_days * 24 * 3600),
        }
        
        key = self._bucket.get_key(remote_path)
        
        if key is None:
            key = Key(self._bucket, remote_path)
        
        key_meta = key.get_metadata('mediasync-checksum') or ''
        s3_checksum = key_meta.replace(' ', '+')
        if force or s3_checksum != raw_b64digest:
            
            key.set_metadata('mediasync-checksum', raw_b64digest)
            key.set_contents_from_string(filedata, headers=headers, md5=(hexdigest, b64digest))
        
            # check to see if file should be gzipped based on content_type
            # also check to see if filesize is greater than 1kb
            if content_type in TYPES_TO_COMPRESS:
                # Use a .gzt extension to avoid issues with Safari on OSX
                key = Key(self._bucket, "%s.gzt" % remote_path)
                
                filedata = mediasync.compress(filedata)
                (hexdigest, b64digest) = mediasync.checksum(filedata) # update checksum with compressed data
                headers["Content-Disposition"] = 'inline; filename="%sgzt"' % remote_path.split('/')[-1]
                headers["Content-Encoding"] = 'gzip'
                
                key.set_metadata('mediasync-checksum', raw_b64digest)
                key.set_contents_from_string(filedata, headers=headers, md5=(hexdigest, b64digest))
            
            return True
コード例 #3
0
ファイル: tests.py プロジェクト: JDrosdeck/django-mediasync
    def testSync(self):
        
        # calculate cache control
        cc = "max-age=%i, public" % (self.client.expiration_days * 24 * 3600)
        
        # do a sync then reopen client
        mediasync.sync(self.client, force=True, verbose=False)
        self.client.open()
        conn = self.client.get_connection()
        
        # setup http connection
        http_conn = httplib.HTTPSConnection('s3.amazonaws.com')
        
        # test synced files then delete them
        bucket = conn.get_bucket(self.bucket_name)
        
        static_paths = mediasync.listdir_recursive(os.path.join(PWD, 'media'))
        joined_paths = msettings['JOINED'].iterkeys()
        
        for path in itertools.chain(static_paths, joined_paths):
            
            key = bucket.get_key(path)
            
            if path in msettings['JOINED']:
                args = [PWD, 'media', '_test', path.split('/')[1]]
            else:
                args = [PWD, 'media'] + path.split('/')
            local_content = readfile(os.path.join(*args))

            # compare file content
            self.assertEqual(key.read(), local_content)
            
            # verify checksum
            key_meta = key.get_metadata('mediasync-checksum') or ''
            s3_checksum = key_meta.replace(' ', '+')
            (hexdigest, b64digest) = mediasync.checksum(local_content)
            self.assertEqual(s3_checksum, b64digest)
            
            # do a HEAD request on the file
            http_conn.request('HEAD', "/%s/%s" % (self.bucket_name, path))
            response = http_conn.getresponse()
            response.read()
            
            # verify valid content type
            content_type = mimetypes.guess_type(path)[0] or msettings['DEFAULT_MIMETYPE']
            self.assertEqual(response.getheader("Content-Type", None), content_type)
            
            # check for valid expires headers
            expires = response.getheader("Expires", None)
            self.assertRegexpMatches(expires, EXPIRES_RE)
            
            # check for valid cache control header
            cc_header = response.getheader("Cache-Control", None)
            self.assertEqual(cc_header, cc)
            
            # done with the file, delete it from S3
            key.delete()
            
            if content_type in mediasync.TYPES_TO_COMPRESS:
                
                key = bucket.get_key("%s.gz" % path)
                
                # do a HEAD request on the file
                http_conn.request('HEAD', "/%s/%s.gz" % (self.bucket_name, path))
                response = http_conn.getresponse()
                response.read()
                
                key_meta = key.get_metadata('mediasync-checksum') or ''
                s3_checksum = key_meta.replace(' ', '+')
                self.assertEqual(s3_checksum, b64digest)
                
                key.delete()
        
        http_conn.close()
        
        # wait a moment then delete temporary bucket
        time.sleep(2)
        conn.delete_bucket(self.bucket_name)
        
        # close client
        self.client.close()
コード例 #4
0
ファイル: tests.py プロジェクト: benchoufi/django-mediasync
    def testSync(self):

        # calculate cache control
        cc = "max-age=%i, public" % (self.client.expiration_days * 24 * 3600)

        # do a sync then reopen client
        mediasync.sync(self.client, force=True, verbose=False)
        self.client.open()
        conn = self.client.get_connection()

        # setup http connection
        http_conn = httplib.HTTPSConnection('s3.amazonaws.com')

        # test synced files then delete them
        bucket = conn.get_bucket(self.bucket_name)

        static_paths = mediasync.listdir_recursive(os.path.join(PWD, 'media'))
        joined_paths = msettings['JOINED'].iterkeys()

        for path in itertools.chain(static_paths, joined_paths):

            key = bucket.get_key(path)

            if path in msettings['JOINED']:
                args = [PWD, 'media', '_test', path.split('/')[1]]
            else:
                args = [PWD, 'media'] + path.split('/')
            local_content = readfile(os.path.join(*args))

            # compare file content
            self.assertEqual(key.read(), local_content)

            # verify checksum
            key_meta = key.get_metadata('mediasync-checksum') or ''
            s3_checksum = key_meta.replace(' ', '+')
            (hexdigest, b64digest) = mediasync.checksum(local_content)
            self.assertEqual(s3_checksum, b64digest)

            # do a HEAD request on the file
            http_conn.request('HEAD', "/%s/%s" % (self.bucket_name, path))
            response = http_conn.getresponse()
            response.read()

            # verify valid content type
            content_type = mimetypes.guess_type(
                path)[0] or msettings['DEFAULT_MIMETYPE']
            self.assertEqual(response.getheader("Content-Type", None),
                             content_type)

            # check for valid expires headers
            expires = response.getheader("Expires", None)
            self.assertRegexpMatches(expires, EXPIRES_RE)

            # check for valid cache control header
            cc_header = response.getheader("Cache-Control", None)
            self.assertEqual(cc_header, cc)

            # done with the file, delete it from S3
            key.delete()

            if content_type in mediasync.TYPES_TO_COMPRESS:

                key = bucket.get_key("%s.gz" % path)

                # do a HEAD request on the file
                http_conn.request('HEAD',
                                  "/%s/%s.gz" % (self.bucket_name, path))
                response = http_conn.getresponse()
                response.read()

                key_meta = key.get_metadata('mediasync-checksum') or ''
                s3_checksum = key_meta.replace(' ', '+')
                self.assertEqual(s3_checksum, b64digest)

                key.delete()

        http_conn.close()

        # wait a moment then delete temporary bucket
        time.sleep(2)
        conn.delete_bucket(self.bucket_name)

        # close client
        self.client.close()