def test_headers(self): client = LibrarianClient() # Upload a file so we can retrieve it. sample_data = 'blah' file_alias_id = client.addFile( 'sample', len(sample_data), StringIO(sample_data), contentType='text/plain') url = client.getURLForAlias(file_alias_id) # Change the date_created to a known value that doesn't match # the disk timestamp. The timestamp on disk cannot be trusted. file_alias = IMasterStore(LibraryFileAlias).get( LibraryFileAlias, file_alias_id) file_alias.date_created = datetime( 2001, 01, 30, 13, 45, 59, tzinfo=pytz.utc) # Commit so the file is available from the Librarian. self.commit() # Fetch the file via HTTP, recording the interesting headers result = urlopen(url) last_modified_header = result.info()['Last-Modified'] cache_control_header = result.info()['Cache-Control'] # URLs point to the same content for ever, so we have a hardcoded # 1 year max-age cache policy. self.failUnlessEqual(cache_control_header, 'max-age=31536000, public') # And we should have a correct Last-Modified header too. self.failUnlessEqual( last_modified_header, 'Tue, 30 Jan 2001 13:45:59 GMT')
def test_restricted_file_headers(self): fileAlias, url = self.get_restricted_file_and_public_url() token = TimeLimitedToken.allocate(url) url = url + "?token=%s" % token # Change the date_created to a known value for testing. file_alias = IMasterStore(LibraryFileAlias).get( LibraryFileAlias, fileAlias) file_alias.date_created = datetime( 2001, 01, 30, 13, 45, 59, tzinfo=pytz.utc) # Commit the update. self.commit() # Fetch the file via HTTP, recording the interesting headers result = urlopen(url) last_modified_header = result.info()['Last-Modified'] cache_control_header = result.info()['Cache-Control'] # No caching for restricted files. self.failUnlessEqual(cache_control_header, 'max-age=0, private') # And we should have a correct Last-Modified header too. self.failUnlessEqual( last_modified_header, 'Tue, 30 Jan 2001 13:45:59 GMT')
def test_headers(self): client = LibrarianClient() # Upload a file so we can retrieve it. sample_data = b'blah' file_alias_id = client.addFile('sample', len(sample_data), BytesIO(sample_data), contentType='text/plain') url = client.getURLForAlias(file_alias_id) # Change the date_created to a known value that doesn't match # the disk timestamp. The timestamp on disk cannot be trusted. file_alias = IMasterStore(LibraryFileAlias).get( LibraryFileAlias, file_alias_id) file_alias.date_created = datetime(2001, 1, 30, 13, 45, 59, tzinfo=pytz.utc) # Commit so the file is available from the Librarian. self.commit() # Fetch the file via HTTP, recording the interesting headers response = requests.get(url) response.raise_for_status() last_modified_header = response.headers['Last-Modified'] cache_control_header = response.headers['Cache-Control'] # URLs point to the same content for ever, so we have a hardcoded # 1 year max-age cache policy. self.assertEqual(cache_control_header, 'max-age=31536000, public') # And we should have a correct Last-Modified header too. self.assertEqual(last_modified_header, 'Tue, 30 Jan 2001 13:45:59 GMT')
def test_restricted_file_headers(self): fileAlias, url = self.get_restricted_file_and_public_url() token = TimeLimitedToken.allocate(url) # Change the date_created to a known value for testing. file_alias = IMasterStore(LibraryFileAlias).get( LibraryFileAlias, fileAlias) file_alias.date_created = datetime(2001, 1, 30, 13, 45, 59, tzinfo=pytz.utc) # Commit the update. self.commit() # Fetch the file via HTTP, recording the interesting headers response = requests.get(url, params={"token": token}) last_modified_header = response.headers['Last-Modified'] cache_control_header = response.headers['Cache-Control'] # No caching for restricted files. self.assertEqual(cache_control_header, 'max-age=0, private') # And we should have a correct Last-Modified header too. self.assertEqual(last_modified_header, 'Tue, 30 Jan 2001 13:45:59 GMT')