def test_if_mod_304(): base_dir = os.path.dirname(__file__) test_dir = os.path.join(base_dir, 'test-data', '304') cache_app = CacheFixtureApp() index_page = CacheFixtureResponseInfo(open(os.path.join(test_dir,'index.html')).read()) page1 = CacheFixtureResponseInfo(open(os.path.join(test_dir,'page1.html')).read()) page2 = CacheFixtureResponseInfo(open(os.path.join(test_dir,'page2.html')).read()) cache_app.map_url('/index.html',index_page) cache_app.map_url('/page1.html',page1) cache_app.map_url('/page2.html',page2) index_page.mod_time = 1000 page1.mod_time = 1000 page2.mod_time = 1000 transcluder = TranscluderMiddleware(cache_app) test_app = TestApp(transcluder) #load up the deptracker result = test_app.get('/index.html', extra_environ={'HTTP_IF_MODIFIED_SINCE' : make_http_time(2000)}) #and test it result = test_app.get('/index.html', extra_environ={'HTTP_IF_MODIFIED_SINCE' : make_http_time(2000)}) assert result.status == 304 result = test_app.get('/index.html', extra_environ={'HTTP_IF_MODIFIED_SINCE' : make_http_time(500)}) assert result.status == 200 page1.mod_time = 3000 result = test_app.get('/index.html', extra_environ={'HTTP_IF_MODIFIED_SINCE' : make_http_time(2000)}) assert result.status == 200
def test_etag_304(): base_dir = os.path.dirname(__file__) test_dir = os.path.join(base_dir, 'test-data', '304') cache_app = CacheFixtureApp() index_page = CacheFixtureResponseInfo(open(os.path.join(test_dir,'index.html')).read()) page1 = CacheFixtureResponseInfo(open(os.path.join(test_dir,'page1.html')).read()) page2 = CacheFixtureResponseInfo(open(os.path.join(test_dir,'page2.html')).read()) cache_app.map_url('/index.html',index_page) cache_app.map_url('/page1.html',page1) cache_app.map_url('/page2.html',page2) index_page.etag = 'index' page1.etag = 'page1' page2.etag = 'page2' transcluder = TranscluderMiddleware(cache_app) test_app = TestApp(transcluder) #load up the deptracker result = test_app.get('/index.html') etag = header_value(result.headers, 'ETAG') assert etag is not None result = test_app.get('/index.html', extra_environ={'HTTP_IF_NONE_MATCH' : etag}) assert result.status == 304 page1.etag = 'page1.new' result = test_app.get('/index.html', extra_environ={'HTTP_IF_NONE_MATCH' : etag}) assert result.status == 200 new_etag = header_value(result.headers, 'ETAG') assert new_etag != etag result = test_app.get('/index.html', extra_environ={'HTTP_IF_NONE_MATCH' : new_etag}) assert result.status == 304
def teXst_parallel_gets(): base_dir = os.path.dirname(__file__) test_dir = os.path.join(base_dir, 'test-data', '304') sleep_time = 1 cache_app = CacheFixtureApp() sleep_app = PausingMiddleware(cache_app, sleep_time) transcluder = TranscluderMiddleware(sleep_app, tasklist = the_tasklist) static_test_app = TestApp(cache_app) test_app = TestApp(transcluder) page_list = ['index.html', 'index2.html', 'page1.html', 'page2.html', 'page2_1.html', 'page3.html', 'page4.html', 'expected5.html'] pages = {} for page in page_list: pages[page] = CacheFixtureResponseInfo(open(os.path.join(test_dir, page)).read()) cache_app.map_url('/' + page, pages[page]) pages[page].etag = page #load up the deptracker start = time.time() result = test_app.get('/index.html') end = time.time() #print "took %s sleep_times" % ((end - start) / sleep_time) assert 2*sleep_time <= end - start < 3*sleep_time, the_tasklist.doprint(2, end - start) etag = header_value(result.headers, 'ETAG') assert etag is not None #test parallel fetch from correct tracked deps start = time.time() result = test_app.get('/index.html', extra_environ={'HTTP_IF_NONE_MATCH' : etag}) end = time.time() #print "took %s sleep_times" % ((end - start) / sleep_time) assert sleep_time <= end - start < 2*sleep_time, the_tasklist.doprint(1, end - start) assert result.status == 304 pages['page1.html'].etag = 'page1.new' start = time.time() result = test_app.get('/index.html', extra_environ={'HTTP_IF_NONE_MATCH' : etag}) end = time.time() #print "took %s sleep_times" % ((end - start) / sleep_time) assert 2*sleep_time <= end - start < 3*sleep_time, the_tasklist.doprint(2, end - start) etag = header_value(result.headers, 'ETAG') assert result.status == 200 # change the content of the index page, this will make it depend on page3 cache_app.map_url('/index.html',pages['index2.html']) start = time.time() result = test_app.get('/index.html', extra_environ={'HTTP_IF_NONE_MATCH' : etag}) end = time.time() #print "took %s sleep_times" % ((end - start) / sleep_time) assert 2*sleep_time <= end - start < 3*sleep_time, the_tasklist.doprint(2, end - start) # change dependency to have a dependency cache_app.map_url('/page2.html', pages['page2_1.html']) start = time.time() result = test_app.get('/index.html', extra_environ={'HTTP_IF_NONE_MATCH' : etag}) expected = static_test_app.get('/expected5.html') html_string_compare(result.body, expected.body) end = time.time() #print "took %s sleep_times" % ((end - start) / sleep_time) assert 2*sleep_time <= end - start < 3*sleep_time, the_tasklist.doprint(2, end - start)