Example #1
0
 def testDirectoryListing(self):
     allowed_files = [
         'css/1.css',
         'css/2.css',
         'img/black.png',
         'js/1.js',
         'js/2.js',
     ]
     media_dir = os.path.join(PWD, 'media')
     listed_files = list(mediasync.listdir_recursive(media_dir))
     self.assertListEqual(allowed_files, listed_files)
Example #2
0
 def testDirectoryListing(self):
     allowed_files = [
         'css/1.css',
         'css/2.css',
         'img/black.png',
         'js/1.js',
         'js/2.js',
     ]
     media_dir = os.path.join(PWD, 'media')
     listed_files = list(mediasync.listdir_recursive(media_dir))
     self.assertListEqual(allowed_files, listed_files)
Example #3
0
def sass_receiver(sender, **kwargs):

    sass_cmd = msettings.get("SASS_COMMAND", "sass")

    root = msettings["STATIC_ROOT"]

    for filename in listdir_recursive(root):

        if filename.endswith(".sass") or filename.endswith(".scss"):

            sass_path = os.path.join(root, filename)
            css_path = sass_path[:-4] + "css"

            cmd = "%s %s %s" % (sass_cmd, sass_path, css_path)
            subprocess.call(cmd.split(" "))
Example #4
0
def sass_receiver(sender, **kwargs):

    sass_cmd = msettings.get("SASS_COMMAND", "sass")

    root = msettings['STATIC_ROOT']

    for filename in listdir_recursive(root):

        if filename.endswith('.sass') or filename.endswith('.scss'):

            sass_path = os.path.join(root, filename)
            css_path = sass_path[:-4] + "css"

            cmd = "%s %s %s" % (sass_cmd, sass_path, css_path)
            subprocess.call(cmd.split(' '))
Example #5
0
    def testSync(self):
        
        # calculate cache control
        cc = "max-age=%i, public" % (self.client.expiration_days * 24 * 3600)
        
        # do a sync then reopen client
        mediasync.sync(self.client, force=True, verbose=False)
        self.client.open()
        conn = self.client.get_connection()
        
        # setup http connection
        http_conn = httplib.HTTPSConnection('s3.amazonaws.com')
        
        # test synced files then delete them
        bucket = conn.get_bucket(self.bucket_name)
        
        static_paths = mediasync.listdir_recursive(os.path.join(PWD, 'media'))
        joined_paths = msettings['JOINED'].iterkeys()
        
        for path in itertools.chain(static_paths, joined_paths):
            
            key = bucket.get_key(path)
            
            if path in msettings['JOINED']:
                args = [PWD, 'media', '_test', path.split('/')[1]]
            else:
                args = [PWD, 'media'] + path.split('/')
            local_content = readfile(os.path.join(*args))

            # compare file content
            self.assertEqual(key.read(), local_content)
            
            # verify checksum
            key_meta = key.get_metadata('mediasync-checksum') or ''
            s3_checksum = key_meta.replace(' ', '+')
            (hexdigest, b64digest) = mediasync.checksum(local_content)
            self.assertEqual(s3_checksum, b64digest)
            
            # do a HEAD request on the file
            http_conn.request('HEAD', "/%s/%s" % (self.bucket_name, path))
            response = http_conn.getresponse()
            response.read()
            
            # verify valid content type
            content_type = mimetypes.guess_type(path)[0] or msettings['DEFAULT_MIMETYPE']
            self.assertEqual(response.getheader("Content-Type", None), content_type)
            
            # check for valid expires headers
            expires = response.getheader("Expires", None)
            self.assertRegexpMatches(expires, EXPIRES_RE)
            
            # check for valid cache control header
            cc_header = response.getheader("Cache-Control", None)
            self.assertEqual(cc_header, cc)
            
            # done with the file, delete it from S3
            key.delete()
            
            if content_type in mediasync.TYPES_TO_COMPRESS:
                
                key = bucket.get_key("%s.gz" % path)
                
                # do a HEAD request on the file
                http_conn.request('HEAD', "/%s/%s.gz" % (self.bucket_name, path))
                response = http_conn.getresponse()
                response.read()
                
                key_meta = key.get_metadata('mediasync-checksum') or ''
                s3_checksum = key_meta.replace(' ', '+')
                self.assertEqual(s3_checksum, b64digest)
                
                key.delete()
        
        http_conn.close()
        
        # wait a moment then delete temporary bucket
        time.sleep(2)
        conn.delete_bucket(self.bucket_name)
        
        # close client
        self.client.close()
Example #6
0
    def testSync(self):

        # calculate cache control
        cc = "max-age=%i, public" % (self.client.expiration_days * 24 * 3600)

        # do a sync then reopen client
        mediasync.sync(self.client, force=True, verbose=False)
        self.client.open()
        conn = self.client.get_connection()

        # setup http connection
        http_conn = httplib.HTTPSConnection('s3.amazonaws.com')

        # test synced files then delete them
        bucket = conn.get_bucket(self.bucket_name)

        static_paths = mediasync.listdir_recursive(os.path.join(PWD, 'media'))
        joined_paths = msettings['JOINED'].iterkeys()

        for path in itertools.chain(static_paths, joined_paths):

            key = bucket.get_key(path)

            if path in msettings['JOINED']:
                args = [PWD, 'media', '_test', path.split('/')[1]]
            else:
                args = [PWD, 'media'] + path.split('/')
            local_content = readfile(os.path.join(*args))

            # compare file content
            self.assertEqual(key.read(), local_content)

            # verify checksum
            key_meta = key.get_metadata('mediasync-checksum') or ''
            s3_checksum = key_meta.replace(' ', '+')
            (hexdigest, b64digest) = mediasync.checksum(local_content)
            self.assertEqual(s3_checksum, b64digest)

            # do a HEAD request on the file
            http_conn.request('HEAD', "/%s/%s" % (self.bucket_name, path))
            response = http_conn.getresponse()
            response.read()

            # verify valid content type
            content_type = mimetypes.guess_type(
                path)[0] or msettings['DEFAULT_MIMETYPE']
            self.assertEqual(response.getheader("Content-Type", None),
                             content_type)

            # check for valid expires headers
            expires = response.getheader("Expires", None)
            self.assertRegexpMatches(expires, EXPIRES_RE)

            # check for valid cache control header
            cc_header = response.getheader("Cache-Control", None)
            self.assertEqual(cc_header, cc)

            # done with the file, delete it from S3
            key.delete()

            if content_type in mediasync.TYPES_TO_COMPRESS:

                key = bucket.get_key("%s.gz" % path)

                # do a HEAD request on the file
                http_conn.request('HEAD',
                                  "/%s/%s.gz" % (self.bucket_name, path))
                response = http_conn.getresponse()
                response.read()

                key_meta = key.get_metadata('mediasync-checksum') or ''
                s3_checksum = key_meta.replace(' ', '+')
                self.assertEqual(s3_checksum, b64digest)

                key.delete()

        http_conn.close()

        # wait a moment then delete temporary bucket
        time.sleep(2)
        conn.delete_bucket(self.bucket_name)

        # close client
        self.client.close()
Example #7
0
 def testDirectoryListing(self):
     allowed_files = ["css/1.css", "css/2.css", "css/3.scss", "img/black.png", "js/1.js", "js/2.js"]
     media_dir = os.path.join(PWD, "media")
     listed_files = list(mediasync.listdir_recursive(media_dir))
     self.assertListEqual(allowed_files, listed_files)