Esempio n. 1
0
 def testSyncableFile(self):
     # not syncable
     self.assertFalse(mediasync.is_syncable_file(".test"))
     self.assertFalse(mediasync.is_syncable_file("_test"))
     # syncable
     self.assertTrue(mediasync.is_syncable_file("test"))
     self.assertTrue(mediasync.is_syncable_file("1234"))
Esempio n. 2
0
 def testSyncableFile(self):
     # not syncable
     self.assertFalse(mediasync.is_syncable_file(".test"))
     self.assertFalse(mediasync.is_syncable_file("_test"))
     # syncable
     self.assertTrue(mediasync.is_syncable_file("test"))
     self.assertTrue(mediasync.is_syncable_file("1234"))
    def testSync(self):
        
        # calculate cache control
        cc = "max-age=%i, public" % (self.client.expiration_days * 24 * 3600)
        
        # do a sync then reopen client
        mediasync.sync(self.client, force=True)
        self.client.open()
        conn = self.client.get_connection()
        
        # setup http connection
        http_conn = httplib.HTTPSConnection('s3.amazonaws.com')
        
        # test synced files then delete them
        bucket = conn.get_bucket(self.bucket_name)
        
        static_paths = mediasync.listdir_recursive(os.path.join(PWD, 'media'))
        joined_paths = msettings['JOINED'].iterkeys()
        
        for path in itertools.chain(static_paths, joined_paths):
            
            filename = path.split('/')[-1]
            key = bucket.get_key(path)
            
            if not is_syncable_file(filename):
                
                # file isn't syncable so it shouldn't be on S3
                self.assertIsNone(key)
                
            else:
                
                # file is syncable so make sure it's on S3
                # and has correct permissions and attributes
                
                if path in msettings['JOINED']:
                    args = [PWD, 'media', '_test', path.split('/')[1]]
                else:
                    args = [PWD, 'media'] + path.split('/')
                local_content = readfile(os.path.join(*args))

                # compare file content
                self.assertEqual(key.read(), local_content)
            
                # verify checksum
                key_meta = key.get_metadata('mediasync-checksum') or ''
                s3_checksum = key_meta.replace(' ', '+')
                (hexdigest, b64digest) = mediasync.checksum(local_content)
                self.assertEqual(s3_checksum, b64digest)
            
                # do a HEAD request on the file
                http_conn.request('HEAD', "/%s/%s" % (self.bucket_name, path))
                response = http_conn.getresponse()
                response.read()
            
                # verify valid content type
                content_type = mimetypes.guess_type(path)[0] or msettings['DEFAULT_MIMETYPE']
                self.assertEqual(response.getheader("Content-Type", None), content_type)
            
                # check for valid expires headers
                expires = response.getheader("Expires", None)
                self.assertRegexpMatches(expires, EXPIRES_RE)
            
                # check for valid cache control header
                cc_header = response.getheader("Cache-Control", None)
                self.assertEqual(cc_header, cc)
            
                # done with the file, delete it from S3
                key.delete()
            
                if content_type in mediasync.TYPES_TO_COMPRESS:
                
                    key = bucket.get_key("%s.gz" % path)
                
                    # do a HEAD request on the file
                    http_conn.request('HEAD', "/%s/%s.gz" % (self.bucket_name, path))
                    response = http_conn.getresponse()
                    response.read()
                
                    key_meta = key.get_metadata('mediasync-checksum') or ''
                    s3_checksum = key_meta.replace(' ', '+')
                    self.assertEqual(s3_checksum, b64digest)
                
                    key.delete()
        
        http_conn.close()
        
        # wait a moment then delete temporary bucket
        time.sleep(2)
        conn.delete_bucket(self.bucket_name)
        
        # close client
        self.client.close()