def testSync(self): to_sync = { 'css/1.css': 'text/css', 'css/2.css': 'text/css', 'css/joined.css': 'text/css', 'img/black.png': 'image/png', 'js/1.js': 'application/javascript', 'js/2.js': 'application/javascript', 'js/joined.js': 'application/javascript', } def generate_callback(is_forced): def myput(filedata, content_type, remote_path, force=is_forced): self.assertEqual(content_type, to_sync[remote_path]) self.assertEqual(force, is_forced) if remote_path in msettings['JOINED']: original = readfile(os.path.join(PWD, 'media', '_test', remote_path.split('/')[1])) else: args = [PWD, 'media'] + remote_path.split('/') original = readfile(os.path.join(*args)) self.assertEqual(filedata, original) return myput # normal sync self.client.put_callback = generate_callback(is_forced=False) mediasync.sync(self.client, force=False, verbose=False) # forced sync self.client.put_callback = generate_callback(is_forced=True) mediasync.sync(self.client, force=True, verbose=False)
def testPush(self): def callback(*args): pass self.client.put_callback = callback mediasync.sync(self.client)
def handle(self, *args, **options): force = options.get('force') or False try: mediasync.sync(force=force) except ValueError, ve: raise CommandError('%s\nUsage is mediasync %s' % (ve.message, self.args))
def testSassReceiver(self): pre_sync.connect(sass_receiver) mediasync.sync(self.client, force=True, verbose=False) root = msettings['STATIC_ROOT'] for sass_path in glob.glob(os.path.join(root, "*/*.s[ac]ss")): css_path = sass_path[:-4] + "css" self.assertTrue(os.path.exists(css_path))
def handle(self, *args, **options): msettings['SERVE_REMOTE'] = True force = options.get('force') or False try: mediasync.sync(force=force) except ValueError, ve: raise CommandError('%s\nUsage is mediasync %s' % (ve.message, self.args))
def handle(self, *args, **options): msettings['SERVE_REMOTE'] = True msettings['VERBOSE'] = options.get('verbose') force = options.get('force') or False try: start_time = time.time() mediasync.sync(force=force) end_time = time.time() secs = (end_time - start_time) print 'sync finished in %0.3f seconds' % secs except ValueError, ve: raise CommandError('%s\nUsage is mediasync %s' % (ve.message, self.args))
def testSyncSignals(self): self.client.called_presync = False self.client.called_postsync = False @receiver(pre_sync, weak=False) def presync_receiver(sender, **kwargs): self.assertEqual(self.client, sender) sender.called_presync = True @receiver(post_sync, weak=False) def postsync_receiver(sender, **kwargs): self.assertEqual(self.client, sender) sender.called_postsync = True mediasync.sync(self.client, force=True, verbose=False) self.assertTrue(self.client.called_presync) self.assertTrue(self.client.called_postsync)
def testSync(self): to_sync = { "css/1.css": "text/css", "css/2.css": "text/css", "css/3.scss": msettings["DEFAULT_MIMETYPE"], "css/joined.css": "text/css", "img/black.png": "image/png", "js/1.js": "application/javascript", "js/2.js": "application/javascript", "js/joined.js": "application/javascript", } def generate_callback(is_forced): def myput(filedata, content_type, remote_path, force=is_forced): self.assertEqual(content_type, to_sync[remote_path]) self.assertEqual(force, is_forced) if remote_path in msettings["JOINED"]: original = readfile(os.path.join(PWD, "media", "_test", remote_path.split("/")[1])) else: args = [PWD, "media"] + remote_path.split("/") original = readfile(os.path.join(*args)) self.assertEqual(filedata, original) return myput # normal sync self.client.put_callback = generate_callback(is_forced=False) mediasync.sync(self.client, force=False, verbose=False) # forced sync self.client.put_callback = generate_callback(is_forced=True) mediasync.sync(self.client, force=True, verbose=False)
def testSync(self): # calculate cache control cc = "max-age=%i, public" % (self.client.expiration_days * 24 * 3600) # do a sync then reopen client mediasync.sync(self.client, force=True, verbose=False) self.client.open() conn = self.client.get_connection() # setup http connection http_conn = httplib.HTTPSConnection('s3.amazonaws.com') # test synced files then delete them bucket = conn.get_bucket(self.bucket_name) static_paths = mediasync.listdir_recursive(os.path.join(PWD, 'media')) joined_paths = msettings['JOINED'].iterkeys() for path in itertools.chain(static_paths, joined_paths): key = bucket.get_key(path) if path in msettings['JOINED']: args = [PWD, 'media', '_test', path.split('/')[1]] else: args = [PWD, 'media'] + path.split('/') local_content = readfile(os.path.join(*args)) # compare file content self.assertEqual(key.read(), local_content) # verify checksum key_meta = key.get_metadata('mediasync-checksum') or '' s3_checksum = key_meta.replace(' ', '+') (hexdigest, b64digest) = mediasync.checksum(local_content) self.assertEqual(s3_checksum, b64digest) # do a HEAD request on the file http_conn.request('HEAD', "/%s/%s" % (self.bucket_name, path)) response = http_conn.getresponse() response.read() # verify valid content type content_type = mimetypes.guess_type(path)[0] or msettings['DEFAULT_MIMETYPE'] self.assertEqual(response.getheader("Content-Type", None), content_type) # check for valid expires headers expires = response.getheader("Expires", None) self.assertRegexpMatches(expires, EXPIRES_RE) # check for valid cache control header cc_header = response.getheader("Cache-Control", None) self.assertEqual(cc_header, cc) # done with the file, delete it from S3 key.delete() if content_type in mediasync.TYPES_TO_COMPRESS: key = bucket.get_key("%s.gz" % path) # do a HEAD request on the file http_conn.request('HEAD', "/%s/%s.gz" % (self.bucket_name, path)) response = http_conn.getresponse() response.read() key_meta = key.get_metadata('mediasync-checksum') or '' s3_checksum = key_meta.replace(' ', '+') self.assertEqual(s3_checksum, b64digest) key.delete() http_conn.close() # wait a moment then delete temporary bucket time.sleep(2) conn.delete_bucket(self.bucket_name) # close client self.client.close()
def testSync(self): # calculate cache control cc = "max-age=%i, public" % (self.client.expiration_days * 24 * 3600) # do a sync then reopen client mediasync.sync(self.client, force=True, verbose=False) self.client.open() conn = self.client.get_connection() # setup http connection http_conn = httplib.HTTPSConnection('s3.amazonaws.com') # test synced files then delete them bucket = conn.get_bucket(self.bucket_name) static_paths = mediasync.listdir_recursive(os.path.join(PWD, 'media')) joined_paths = msettings['JOINED'].iterkeys() for path in itertools.chain(static_paths, joined_paths): key = bucket.get_key(path) if path in msettings['JOINED']: args = [PWD, 'media', '_test', path.split('/')[1]] else: args = [PWD, 'media'] + path.split('/') local_content = readfile(os.path.join(*args)) # compare file content self.assertEqual(key.read(), local_content) # verify checksum key_meta = key.get_metadata('mediasync-checksum') or '' s3_checksum = key_meta.replace(' ', '+') (hexdigest, b64digest) = mediasync.checksum(local_content) self.assertEqual(s3_checksum, b64digest) # do a HEAD request on the file http_conn.request('HEAD', "/%s/%s" % (self.bucket_name, path)) response = http_conn.getresponse() response.read() # verify valid content type content_type = mimetypes.guess_type( path)[0] or msettings['DEFAULT_MIMETYPE'] self.assertEqual(response.getheader("Content-Type", None), content_type) # check for valid expires headers expires = response.getheader("Expires", None) self.assertRegexpMatches(expires, EXPIRES_RE) # check for valid cache control header cc_header = response.getheader("Cache-Control", None) self.assertEqual(cc_header, cc) # done with the file, delete it from S3 key.delete() if content_type in mediasync.TYPES_TO_COMPRESS: key = bucket.get_key("%s.gz" % path) # do a HEAD request on the file http_conn.request('HEAD', "/%s/%s.gz" % (self.bucket_name, path)) response = http_conn.getresponse() response.read() key_meta = key.get_metadata('mediasync-checksum') or '' s3_checksum = key_meta.replace(' ', '+') self.assertEqual(s3_checksum, b64digest) key.delete() http_conn.close() # wait a moment then delete temporary bucket time.sleep(2) conn.delete_bucket(self.bucket_name) # close client self.client.close()