class AutomaticWebTest(AutoPopulateTest): """import this if you wan automatic tests to be ran""" tags = AutoPopulateTest.tags | Tags('web', 'generated') def setUp(self): if self.__class__ is AutomaticWebTest: # Prevent direct use of AutomaticWebTest to avoid database caching # issues. return super(AutomaticWebTest, self).setUp() # access to self.app for proper initialization of the authentication # machinery (else some views may fail) self.app def test_one_each_config(self): self.auto_populate(1) for rset in self.iter_automatic_rsets(limit=1): self._test_everything_for(rset) def test_ten_each_config(self): self.auto_populate(10) for rset in self.iter_automatic_rsets(limit=10): self._test_everything_for(rset) def test_startup_views(self): for vid in self.list_startup_views(): with self.admin_access.web_request() as req: with self.subTest(vid=vid): self.view(vid, None, req)
class StaticControllerCacheTC(staticfilespublishermixin, CubicWebTC): tags = CubicWebTC.tags | Tags('static_controller', 'cache', 'http') def test_static_file_are_cached(self): with self._publish_static_files('data/cubicweb.css') as req: self.assertEqual(200, req.status_out) self.assertIn('last-modified', req.headers_out) next_headers = { 'if-modified-since': req.get_response_header('last-modified', raw=True), } with self._publish_static_files('data/cubicweb.css', next_headers) as req: self.assertEqual(304, req.status_out)
class TagTestTC(TestCase): tags = Tags(('one', 'two')) def test_one(self): self.assertTrue(True) @tag('two', 'three') def test_two(self): self.assertTrue(True) @tag('three') def test_three(self): self.assertTrue(True)
class Queens8_TC(TestCase): tags = Tags('slow') size = 8 nb_sols = 92 verbose = 0 def setUp(self): variables = [] domains = {} constraints = [] for i in range(self.size): name = 'Q%d' % i variables.append(name) domains[name] = fd.FiniteDomain([(i, j) for j in range(self.size)]) for q1 in variables: for q2 in variables: if q1 < q2: c = fd.make_expression((q1,q2), '%(q1)s[0] < %(q2)s[0] and ' '%(q1)s[1] != %(q2)s[1] and ' 'abs(%(q1)s[0]-%(q2)s[0]) != ' 'abs(%(q1)s[1]-%(q2)s[1])'%\ {'q1':q1,'q2':q2}) constraints.append(c) self.repo = Repository(variables, domains, constraints, printer=quiet_printer) sys.stdout = StringIO() def tearDown(self): sys.stdout = sys.__stdout__ def testQueensWithEnumerator(self): solver = Solver(EnumeratorDistributor(), printer=quiet_printer) solutions = solver.solve(self.repo, verbose=self.verbose) self.assertEqual(len(solutions), self.nb_sols) def testQueensWithDefaultDistributor(self): solver = Solver(printer=quiet_printer) solutions = solver.solve(self.repo, verbose=self.verbose) self.assertEqual(len(solutions), self.nb_sols)
class DataControllerTC(staticfilespublishermixin, CubicWebTC): tags = CubicWebTC.tags | Tags('static_controller', 'data', 'http') def _check_datafile_ok(self, fname): with self._publish_static_files(fname) as req: self.assertEqual(200, req.status_out) self.assertIn('last-modified', req.headers_out) self.assertIn('expires', req.headers_out) self.assertEqual(req.get_response_header('cache-control'), {'max-age': 604800}) next_headers = { 'if-modified-since': req.get_response_header('last-modified', raw=True), } with self._publish_static_files(fname, next_headers) as req: self.assertEqual(304, req.status_out) def _check_datafile_redirect(self, fname, expected): with self._publish_static_files(fname) as req: self.assertEqual(302, req.status_out) self.assertEqual(req.get_response_header('location'), req.base_url() + expected) def _check_no_datafile(self, fname): with self._publish_static_files(fname) as req: self.assertEqual(404, req.status_out) def test_static_data_mode(self): hash = self.vreg.config.instance_md5_version() self.assertEqual(32, len(hash)) with tempattr(self.vreg.config, 'mode', 'test'): self._check_datafile_ok('data/cubicweb.css') self._check_no_datafile('data/does/not/exist') self._check_no_datafile('data/%s/cubicweb.css' % ('0'*len(hash))) with tempattr(self.vreg.config, 'mode', 'notest'): self.config._init_base_url() # reset config.datadir_url self._check_datafile_redirect('data/cubicweb.css', 'data/%s/cubicweb.css' % hash) self._check_datafile_ok('data/%s/cubicweb.css' % hash) self._check_no_datafile('data/%s/does/not/exist' % hash) self._check_datafile_redirect('data/%s/does/not/exist' % ('0'*len(hash)), 'data/%s/%s/does/not/exist' % (hash, '0'*len(hash)))
class CWTIdentTC(CubicWebTestTC): test_db_id = 'webtest-ident' anonymous_allowed = False tags = CubicWebTestTC.tags | Tags(('auth', )) def test_reponse_denied(self): res = self.webapp.get('/', expect_errors=True) self.assertEqual(http.client.FORBIDDEN, res.status_int) def test_login(self): res = self.webapp.get('/', expect_errors=True) self.assertEqual(http.client.FORBIDDEN, res.status_int) self.login(self.admlogin, self.admpassword) res = self.webapp.get('/') self.assertEqual(http.client.OK, res.status_int) self.logout() res = self.webapp.get('/', expect_errors=True) self.assertEqual(http.client.FORBIDDEN, res.status_int)
class WsgiCWIdentTC(CubicWebServerTC): test_db_id = 'httptest-cwident' anonymous_allowed = False tags = CubicWebServerTC.tags | Tags(('auth', )) def test_response_denied(self): response = self.web_get() self.assertEqual(response.status, http.client.FORBIDDEN) def test_login(self): response = self.web_get() if response.status != http.client.FORBIDDEN: self.skipTest( 'Already authenticated, "test_response_denied" must have failed' ) # login self.web_login(self.admlogin, self.admpassword) response = self.web_get() self.assertEqual(response.status, http.client.OK, response.body) # logout self.web_logout() response = self.web_get() self.assertEqual(response.status, http.client.FORBIDDEN, response.body)
class HTTPCache(TestCase): """Check that the http cache logic works as expected (as far as we understood the RFC) """ tags = TestCase.tags | Tags('http', 'cache') def assertCache(self, expected, status, situation=''): """simple assert for nicer message""" if expected != status: if expected is None: expected = "MODIFIED" if status is None: status = "MODIFIED" msg = 'expected %r got %r' % (expected, status) if situation: msg = "%s - when: %s" % (msg, situation) self.fail(msg) def test_IN_none_OUT_none(self): #: test that no caching is requested when not data is available #: on any side req = _test_cache((), ()) self.assertIsNone(req.status_out) def test_IN_Some_OUT_none(self): #: test that no caching is requested when no data is available #: server (origin) side hin = [ ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), ] req = _test_cache(hin, ()) self.assertIsNone(req.status_out) hin = [ ('if-none-match', 'babar/huitre'), ] req = _test_cache(hin, ()) self.assertIsNone(req.status_out) hin = [ ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), ('if-none-match', 'babar/huitre'), ] req = _test_cache(hin, ()) self.assertIsNone(req.status_out) def test_IN_none_OUT_Some(self): #: test that no caching is requested when no data is provided #: by the client hout = [ ('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), ] req = _test_cache((), hout) self.assertIsNone(req.status_out) hout = [ ('etag', 'babar/huitre'), ] req = _test_cache((), hout) self.assertIsNone(req.status_out) hout = [ ('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), ('etag', 'babar/huitre'), ] req = _test_cache((), hout) self.assertIsNone(req.status_out) @tag('last_modified') def test_last_modified_newer(self): #: test the proper behavior of modification date only # newer hin = [ ('if-modified-since', 'Sat, 13 Apr 2012 14:39:32 GM'), ] hout = [ ('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), ] req = _test_cache(hin, hout) self.assertCache(None, req.status_out, 'origin is newer than client') @tag('last_modified') def test_last_modified_older(self): # older hin = [ ('if-modified-since', 'Sat, 15 Apr 2012 14:39:32 GM'), ] hout = [ ('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), ] req = _test_cache(hin, hout) self.assertCache(304, req.status_out, 'origin is older than client') @tag('last_modified') def test_last_modified_same(self): # same hin = [ ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), ] hout = [ ('last-modified', 'Sat, 14 Apr 2012 14:39:32 GM'), ] req = _test_cache(hin, hout) self.assertCache(304, req.status_out, 'origin is equal to client') @tag('etag') def test_etag_mismatch(self): #: test the proper behavior of etag only # etag mismatch hin = [ ('if-none-match', 'babar'), ] hout = [ ('etag', 'celestine'), ] req = _test_cache(hin, hout) self.assertCache(None, req.status_out, 'etag mismatch') @tag('etag') def test_etag_match(self): # etag match hin = [ ('if-none-match', 'babar'), ] hout = [ ('etag', 'babar'), ] req = _test_cache(hin, hout) self.assertCache(304, req.status_out, 'etag match') self.assertEqual(req.headers_out.getRawHeaders('etag'), ['babar']) # etag match in multiple hin = [ ('if-none-match', 'loutre'), ('if-none-match', 'babar'), ] hout = [ ('etag', 'babar'), ] req = _test_cache(hin, hout) self.assertCache(304, req.status_out, 'etag match in multiple') self.assertEqual(req.headers_out.getRawHeaders('etag'), ['babar']) # client use "*" as etag hin = [ ('if-none-match', '*'), ] hout = [ ('etag', 'babar'), ] req = _test_cache(hin, hout) self.assertCache(304, req.status_out, 'client use "*" as etag') self.assertEqual(req.headers_out.getRawHeaders('etag'), ['babar']) @tag('etag', 'last_modified') def test_both(self): #: test the proper behavior of etag only # both wrong hin = [ ('if-none-match', 'babar'), ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), ] hout = [ ('etag', 'loutre'), ('last-modified', 'Sat, 15 Apr 2012 14:39:32 GM'), ] req = _test_cache(hin, hout) self.assertCache(None, req.status_out, 'both wrong') @tag('etag', 'last_modified') def test_both_etag_mismatch(self): # both etag mismatch hin = [ ('if-none-match', 'babar'), ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), ] hout = [ ('etag', 'loutre'), ('last-modified', 'Sat, 13 Apr 2012 14:39:32 GM'), ] req = _test_cache(hin, hout) self.assertCache(None, req.status_out, 'both but etag mismatch') @tag('etag', 'last_modified') def test_both_but_modified(self): # both but modified hin = [ ('if-none-match', 'babar'), ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), ] hout = [ ('etag', 'babar'), ('last-modified', 'Sat, 15 Apr 2012 14:39:32 GM'), ] req = _test_cache(hin, hout) self.assertCache(None, req.status_out, 'both but modified') @tag('etag', 'last_modified') def test_both_ok(self): # both ok hin = [ ('if-none-match', 'babar'), ('if-modified-since', 'Sat, 14 Apr 2012 14:39:32 GM'), ] hout = [ ('etag', 'babar'), ('last-modified', 'Sat, 13 Apr 2012 14:39:32 GM'), ] req = _test_cache(hin, hout) self.assertCache(304, req.status_out, 'both ok') self.assertEqual(req.headers_out.getRawHeaders('etag'), ['babar']) @tag('etag', 'HEAD') def test_head_verb(self): #: check than FOUND 200 is properly raise without content on HEAD request #: This logic does not really belong here :-/ # modified hin = [ ('if-none-match', 'babar'), ] hout = [ ('etag', 'rhino/really-not-babar'), ] req = _test_cache(hin, hout, method='HEAD') self.assertCache(None, req.status_out, 'modifier HEAD verb') # not modified hin = [ ('if-none-match', 'babar'), ] hout = [ ('etag', 'babar'), ] req = _test_cache(hin, hout, method='HEAD') self.assertCache(304, req.status_out, 'not modifier HEAD verb') self.assertEqual(req.headers_out.getRawHeaders('etag'), ['babar']) @tag('etag', 'POST') def test_post_verb(self): # modified hin = [ ('if-none-match', 'babar'), ] hout = [ ('etag', 'rhino/really-not-babar'), ] req = _test_cache(hin, hout, method='POST') self.assertCache(None, req.status_out, 'modifier HEAD verb') # not modified hin = [ ('if-none-match', 'babar'), ] hout = [ ('etag', 'babar'), ] req = _test_cache(hin, hout, method='POST') self.assertCache(412, req.status_out, 'not modifier HEAD verb')
class StorageTC(CubicWebTC): tempdir = None tags = CubicWebTC.tags | Tags('Storage', 'BFSS') def setup_database(self): self.tempdir = tempfile.mkdtemp() bfs_storage = storages.BytesFileSystemStorage(self.tempdir) self.bfs_storage = bfs_storage storages.set_attribute_storage(self.repo, 'File', 'data', bfs_storage) storages.set_attribute_storage(self.repo, 'BFSSTestable', 'opt_attr', bfs_storage) def tearDown(self): super(StorageTC, self).tearDown() storages.unset_attribute_storage(self.repo, 'File', 'data') del self.bfs_storage shutil.rmtree(self.tempdir) def create_file(self, cnx, content=b'the-data'): return cnx.create_entity('File', data=Binary(content), data_format=u'text/plain', data_name=u'foo.pdf') def fspath(self, cnx, entity): fspath = cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D', {'f': entity.eid})[0][0].getvalue() return fspath.decode('utf-8') def test_bfss_wrong_fspath_usage(self): with self.admin_access.repo_cnx() as cnx: f1 = self.create_file(cnx) cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D', {'f': f1.eid}) with self.assertRaises(NotImplementedError) as cm: cnx.execute('Any fspath(F) WHERE F eid %(f)s', {'f': f1.eid}) self.assertEqual( str(cm.exception), 'This callback is only available for BytesFileSystemStorage ' 'managed attribute. Is FSPATH() argument BFSS managed?') def test_bfss_storage(self): with self.admin_access.web_request() as req: cnx = req.cnx f1 = self.create_file(req) filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) self.assertEqual(len(filepaths), 1, filepaths) expected_filepath = filepaths[0] # file should be read only self.assertFalse(os.access(expected_filepath, os.W_OK)) self.assertEqual(open(expected_filepath).read(), 'the-data') cnx.rollback() self.assertFalse(osp.isfile(expected_filepath)) filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) self.assertEqual(len(filepaths), 0, filepaths) f1 = self.create_file(req) cnx.commit() filepaths = glob(osp.join(self.tempdir, '%s_data_*' % f1.eid)) self.assertEqual(len(filepaths), 1, filepaths) expected_filepath = filepaths[0] self.assertEqual(open(expected_filepath).read(), 'the-data') # add f1 back to the entity cache with req as _cw f1 = req.entity_from_eid(f1.eid) f1.cw_set(data=Binary(b'the new data')) cnx.rollback() self.assertEqual(open(expected_filepath).read(), 'the-data') f1.cw_delete() self.assertTrue(osp.isfile(expected_filepath)) cnx.rollback() self.assertTrue(osp.isfile(expected_filepath)) f1.cw_delete() cnx.commit() self.assertFalse(osp.isfile(expected_filepath)) def test_bfss_sqlite_fspath(self): with self.admin_access.repo_cnx() as cnx: f1 = self.create_file(cnx) expected_filepath = osp.join(self.tempdir, '%s_data_%s' % (f1.eid, f1.data_name)) base, ext = osp.splitext(expected_filepath) self.assertTrue(self.fspath(cnx, f1).startswith(base)) self.assertTrue(self.fspath(cnx, f1).endswith(ext)) def test_bfss_fs_importing_doesnt_touch_path(self): with self.admin_access.repo_cnx() as cnx: cnx.transaction_data['fs_importing'] = True filepath = osp.abspath(__file__) f1 = cnx.create_entity( 'File', data=Binary(filepath.encode(sys.getfilesystemencoding())), data_format=u'text/plain', data_name=u'foo') self.assertEqual(self.fspath(cnx, f1), filepath) def test_source_storage_transparency(self): with self.admin_access.repo_cnx() as cnx: with self.temporary_appobjects(DummyBeforeHook, DummyAfterHook): self.create_file(cnx) def test_source_mapped_attribute_error_cases(self): with self.admin_access.repo_cnx() as cnx: with self.assertRaises(QueryError) as cm: cnx.execute('Any X WHERE X data ~= "hop", X is File') self.assertEqual( str(cm.exception), 'can\'t use File.data (X data ILIKE "hop") in restriction') with self.assertRaises(QueryError) as cm: cnx.execute('Any X, Y WHERE X data D, Y data D, ' 'NOT X identity Y, X is File, Y is File') self.assertEqual(str(cm.exception), "can't use D as a restriction variable") # query returning mix of mapped / regular attributes (only file.data # mapped, not image.data for instance) with self.assertRaises(QueryError) as cm: cnx.execute('Any X WITH X BEING (' ' (Any NULL)' ' UNION ' ' (Any D WHERE X data D, X is File)' ')') self.assertEqual( str(cm.exception), 'query fetch some source mapped attribute, some not') with self.assertRaises(QueryError) as cm: cnx.execute('(Any D WHERE X data D, X is File)' ' UNION ' '(Any D WHERE X title D, X is Bookmark)') self.assertEqual( str(cm.exception), 'query fetch some source mapped attribute, some not') storages.set_attribute_storage( self.repo, 'State', 'name', storages.BytesFileSystemStorage(self.tempdir)) try: with self.assertRaises(QueryError) as cm: cnx.execute( 'Any D WHERE X name D, X is IN (State, Transition)') self.assertEqual( str(cm.exception), 'query fetch some source mapped attribute, some not') finally: storages.unset_attribute_storage(self.repo, 'State', 'name') def test_source_mapped_attribute_advanced(self): with self.admin_access.repo_cnx() as cnx: f1 = self.create_file(cnx) rset = cnx.execute( 'Any X,D WITH D,X BEING (' ' (Any D, X WHERE X eid %(x)s, X data D)' ' UNION ' ' (Any D, X WHERE X eid %(x)s, X data D)' ')', {'x': f1.eid}) self.assertEqual(len(rset), 2) self.assertEqual(rset[0][0], f1.eid) self.assertEqual(rset[1][0], f1.eid) self.assertEqual(rset[0][1].getvalue(), b'the-data') self.assertEqual(rset[1][1].getvalue(), b'the-data') rset = cnx.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D', {'x': f1.eid}) self.assertEqual(len(rset), 1) self.assertEqual(rset[0][0], f1.eid) self.assertEqual(rset[0][1], len('the-data')) rset = cnx.execute( 'Any X,LENGTH(D) WITH D,X BEING (' ' (Any D, X WHERE X eid %(x)s, X data D)' ' UNION ' ' (Any D, X WHERE X eid %(x)s, X data D)' ')', {'x': f1.eid}) self.assertEqual(len(rset), 2) self.assertEqual(rset[0][0], f1.eid) self.assertEqual(rset[1][0], f1.eid) self.assertEqual(rset[0][1], len('the-data')) self.assertEqual(rset[1][1], len('the-data')) with self.assertRaises(QueryError) as cm: cnx.execute('Any X,UPPER(D) WHERE X eid %(x)s, X data D', {'x': f1.eid}) self.assertEqual(str(cm.exception), 'UPPER can not be called on mapped attribute') def test_bfss_fs_importing_transparency(self): with self.admin_access.repo_cnx() as cnx: cnx.transaction_data['fs_importing'] = True filepath = osp.abspath(__file__) f1 = cnx.create_entity( 'File', data=Binary(filepath.encode(sys.getfilesystemencoding())), data_format=u'text/plain', data_name=u'foo') cw_value = f1.data.getvalue() fs_value = open(filepath, 'rb').read() if cw_value != fs_value: self.fail('cw value %r is different from file content' % cw_value) @tag('update') def test_bfss_update_with_existing_data(self): with self.admin_access.repo_cnx() as cnx: f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo') # NOTE: do not use cw_set() which would automatically # update f1's local dict. We want the pure rql version to work cnx.execute('SET F data %(d)s WHERE F eid %(f)s', { 'd': Binary(b'some other data'), 'f': f1.eid }) self.assertEqual(f1.data.getvalue(), b'some other data') cnx.commit() f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', { 'f': f1.eid }).get_entity(0, 0) self.assertEqual(f2.data.getvalue(), b'some other data') @tag('update', 'extension', 'commit') def test_bfss_update_with_different_extension_commited(self): with self.admin_access.repo_cnx() as cnx: f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo.txt') # NOTE: do not use cw_set() which would automatically # update f1's local dict. We want the pure rql version to work cnx.commit() old_path = self.fspath(cnx, f1) self.assertTrue(osp.isfile(old_path)) self.assertEqual(osp.splitext(old_path)[1], '.txt') cnx.execute( 'SET F data %(d)s, F data_name %(dn)s, ' 'F data_format %(df)s WHERE F eid %(f)s', { 'd': Binary(b'some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg' }) cnx.commit() # the new file exists with correct extension # the old file is dead f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', { 'f': f1.eid }).get_entity(0, 0) new_path = self.fspath(cnx, f2) self.assertFalse(osp.isfile(old_path)) self.assertTrue(osp.isfile(new_path)) self.assertEqual(osp.splitext(new_path)[1], '.jpg') @tag('update', 'extension', 'rollback') def test_bfss_update_with_different_extension_rolled_back(self): with self.admin_access.repo_cnx() as cnx: f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo.txt') # NOTE: do not use cw_set() which would automatically # update f1's local dict. We want the pure rql version to work cnx.commit() old_path = self.fspath(cnx, f1) old_data = f1.data.getvalue() self.assertTrue(osp.isfile(old_path)) self.assertEqual(osp.splitext(old_path)[1], '.txt') cnx.execute( 'SET F data %(d)s, F data_name %(dn)s, ' 'F data_format %(df)s WHERE F eid %(f)s', { 'd': Binary(b'some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg' }) cnx.rollback() # the new file exists with correct extension # the old file is dead f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', { 'f': f1.eid }).get_entity(0, 0) new_path = self.fspath(cnx, f2) new_data = f2.data.getvalue() self.assertTrue(osp.isfile(new_path)) self.assertEqual(osp.splitext(new_path)[1], '.txt') self.assertEqual(old_path, new_path) self.assertEqual(old_data, new_data) @tag('update', 'NULL') def test_bfss_update_to_None(self): with self.admin_access.repo_cnx() as cnx: f = cnx.create_entity('Affaire', opt_attr=Binary(b'toto')) cnx.commit() f.cw_set(opt_attr=None) cnx.commit() @tag('fs_importing', 'update') def test_bfss_update_with_fs_importing(self): with self.admin_access.repo_cnx() as cnx: f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo') old_fspath = self.fspath(cnx, f1) cnx.transaction_data['fs_importing'] = True new_fspath = osp.join(self.tempdir, 'newfile.txt') open(new_fspath, 'w').write('the new data') cnx.execute( 'SET F data %(d)s WHERE F eid %(f)s', { 'd': Binary(new_fspath.encode( sys.getfilesystemencoding())), 'f': f1.eid }) cnx.commit() self.assertEqual(f1.data.getvalue(), b'the new data') self.assertEqual(self.fspath(cnx, f1), new_fspath) self.assertFalse(osp.isfile(old_fspath)) @tag('fsimport') def test_clean(self): with self.admin_access.repo_cnx() as cnx: fsimport = storages.fsimport td = cnx.transaction_data self.assertNotIn('fs_importing', td) with fsimport(cnx): self.assertIn('fs_importing', td) self.assertTrue(td['fs_importing']) self.assertNotIn('fs_importing', td) @tag('fsimport') def test_true(self): with self.admin_access.repo_cnx() as cnx: fsimport = storages.fsimport td = cnx.transaction_data td['fs_importing'] = True with fsimport(cnx): self.assertIn('fs_importing', td) self.assertTrue(td['fs_importing']) self.assertTrue(td['fs_importing']) @tag('fsimport') def test_False(self): with self.admin_access.repo_cnx() as cnx: fsimport = storages.fsimport td = cnx.transaction_data td['fs_importing'] = False with fsimport(cnx): self.assertIn('fs_importing', td) self.assertTrue(td['fs_importing']) self.assertFalse(td['fs_importing'])
class ConcatFilesTC(CubicWebTC): tags = CubicWebTC.tags | Tags('static_controller', 'concat') def tearDown(self): super(ConcatFilesTC, self).tearDown() self._cleanup_concat_cache() def _cleanup_concat_cache(self): uicachedir = osp.join(self.config.apphome, 'uicache') for fname in glob.glob(osp.join(uicachedir, 'cache_concat_*')): os.unlink(osp.join(uicachedir, fname)) @contextmanager def _publish_js_files(self, js_files): with self.admin_access.web_request() as req: head = HTMLHead(req) url = head.concat_urls([req.data_url(js_file) for js_file in js_files])[len(req.base_url()):] req._url = url res = self.app_handle_request(req) yield res, req def expected_content(self, js_files): content = b'' for js_file in js_files: dirpath, rid = self.config.locate_resource(js_file) if dirpath is not None: # ignore resources not found with open(osp.join(dirpath, rid), 'rb') as f: content += f.read() + b'\n' return content def test_cache(self): js_files = ('cubicweb.ajax.js', 'jquery.js') with self._publish_js_files(js_files) as (result, req): self.assertNotEqual(404, req.status_out) # check result content self.assertEqual(result, self.expected_content(js_files)) # make sure we kept a cached version on filesystem concat_hander = ConcatFilesHandler(self.config) filepath = concat_hander.build_filepath(js_files) self.assertTrue(osp.isfile(filepath)) def test_invalid_file_in_debug_mode(self): js_files = ('cubicweb.ajax.js', 'dummy.js') # in debug mode, an error is raised self.config.debugmode = True try: with self._publish_js_files(js_files) as (result, req): #print result self.assertEqual(404, req.status_out) finally: self.config.debugmode = False def test_invalid_file_in_production_mode(self): js_files = ('cubicweb.ajax.js', 'dummy.js') with self._publish_js_files(js_files) as (result, req): self.assertNotEqual(404, req.status_out) # check result content self.assertEqual(result, self.expected_content(js_files))
class CubicWebTC(BaseTestCase): """abstract class for test using an apptest environment attributes: * `vreg`, the vregistry * `schema`, self.vreg.schema * `config`, cubicweb configuration * `cnx`, repoapi connection to the repository using an admin user * `session`, server side session associated to `cnx` * `app`, the cubicweb publisher (for web testing) * `repo`, the repository object * `admlogin`, login of the admin user * `admpassword`, password of the admin user * `shell`, create and use shell environment * `anonymous_allowed`: flag telling if anonymous browsing should be allowed """ appid = 'data' configcls = devtools.ApptestConfiguration requestcls = fake.FakeRequest tags = Tags('cubicweb', 'cw_repo') test_db_id = DEFAULT_EMPTY_DB_ID # anonymous is logged by default in cubicweb test cases anonymous_allowed = True @classmethod def setUpClass(cls): test_module_file = sys.modules[cls.__module__].__file__ assert 'config' not in cls.__dict__, ( '%s has a config class attribute before entering setUpClass. ' 'Let CubicWebTC.setUpClass instantiate it and modify it afterwards.' % cls) cls.config = cls.configcls(cls.appid, test_module_file) cls.config.mode = 'test' def __init__(self, *args, **kwargs): self.repo = None self._open_access = set() super(CubicWebTC, self).__init__(*args, **kwargs) def run(self, *args, **kwds): testMethod = getattr(self, self._testMethodName) if isgeneratorfunction(testMethod): raise RuntimeError( '%s appears to be a generative test. This is not handled ' 'anymore, use subTest API instead.' % self) return super(CubicWebTC, self).run(*args, **kwds) # repository connection handling ########################################### def new_access(self, login): """provide a new RepoAccess object for a given user The access is automatically closed at the end of the test.""" access = RepoAccess(self.repo, login, self.requestcls) self._open_access.add(access) return access def _close_access(self): while self._open_access: try: self._open_access.pop() except BadConnectionId: continue # already closed def _init_repo(self): """init the repository and connection to it. """ # get or restore and working db. db_handler = devtools.get_test_db_handler(self.config, self.init_config) db_handler.build_db_cache(self.test_db_id, self.pre_setup_database) db_handler.restore_database(self.test_db_id) self.repo = db_handler.get_repo(startup=True) # get an admin session (without actual login) login = db_handler.config.default_admin_config['login'] self.admin_access = self.new_access(login) # config management ######################################################## @classmethod # XXX could be turned into a regular method def init_config(cls, config): """configuration initialization hooks. You may only want to override here the configuraton logic. Otherwise, consider to use a different :class:`ApptestConfiguration` defined in the `configcls` class attribute. This method will be called by the database handler once the config has been properly bootstrapped. """ admincfg = config.default_admin_config cls.admlogin = admincfg['login'] cls.admpassword = admincfg['password'] # uncomment the line below if you want rql queries to be logged # config.global_set_option('query-log-file', # '/tmp/test_rql_log.' + `os.getpid()`) config.global_set_option('log-file', None) # set default-dest-addrs to a dumb email address to avoid mailbox or # mail queue pollution config.global_set_option('default-dest-addrs', ['whatever']) send_to = '*****@*****.**' % getlogin() config.global_set_option('sender-addr', send_to) config.global_set_option('default-dest-addrs', send_to) config.global_set_option('sender-name', 'cubicweb-test') config.global_set_option('sender-addr', '*****@*****.**') # default_base_url on config class isn't enough for TestServerConfiguration config.global_set_option('base-url', config.default_base_url()) @property def vreg(self): return self.repo.vreg # global resources accessors ############################################### @property def schema(self): """return the application schema""" return self.vreg.schema def set_option(self, optname, value): self.config.global_set_option(optname, value) def set_debug(self, debugmode): server.set_debug(debugmode) def debugged(self, debugmode): return server.debugged(debugmode) # default test setup and teardown ######################################### def setUp(self): assert hasattr(self, 'config'), ( 'It seems that CubicWebTC.setUpClass has not been called. ' 'Missing super() call in %s?' % self.setUpClass) # monkey patch send mail operation so emails are sent synchronously self._patch_SendMailOp() previous_failure = self.__class__.__dict__.get('_repo_init_failed') if previous_failure is not None: self.skipTest('repository is not initialised: %r' % previous_failure) try: self._init_repo() except Exception as ex: self.__class__._repo_init_failed = ex raise self.addCleanup(self._close_access) self.config.set_anonymous_allowed(self.anonymous_allowed) self.setup_database() MAILBOX[:] = [] # reset mailbox def tearDown(self): while self._cleanups: cleanup, args, kwargs = self._cleanups.pop(-1) cleanup(*args, **kwargs) self.repo.turn_repo_off() def _patch_SendMailOp(self): # monkey patch send mail operation so emails are sent synchronously _old_mail_postcommit_event = SendMailOp.postcommit_event SendMailOp.postcommit_event = SendMailOp.sendmails def reverse_SendMailOp_monkey_patch(): SendMailOp.postcommit_event = _old_mail_postcommit_event self.addCleanup(reverse_SendMailOp_monkey_patch) def setup_database(self): """add your database setup code by overriding this method""" @classmethod def pre_setup_database(cls, cnx, config): """add your pre database setup code by overriding this method Do not forget to set the cls.test_db_id value to enable caching of the result. """ # user / session management ############################################### @iclassmethod # XXX turn into a class method def create_user(self, req, login=None, groups=('users', ), password=None, email=None, commit=True, **kwargs): """create and return a new user entity""" if password is None: password = login user = req.create_entity('CWUser', login=login, upassword=password, **kwargs) req.execute( 'SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' % ','.join(repr(str(g)) for g in groups), {'x': user.eid}) if email is not None: req.create_entity('EmailAddress', address=email, reverse_primary_email=user) user.cw_clear_relation_cache('in_group', 'subject') if commit: getattr(req, 'cnx', req).commit() return user # other utilities ######################################################### @contextmanager def temporary_appobjects(self, *appobjects): self.vreg._loadedmods.setdefault(self.__module__, {}) for obj in appobjects: self.vreg.register(obj) registered = getattr(obj, '__registered__', None) if registered: for registry in obj.__registries__: registered(self.vreg[registry]) try: yield finally: for obj in appobjects: self.vreg.unregister(obj) @contextmanager def temporary_permissions(self, *perm_overrides, **perm_kwoverrides): """Set custom schema permissions within context. There are two ways to call this method, which may be used together : * using positional argument(s): .. sourcecode:: python rdef = self.schema['CWUser'].rdef('login') with self.temporary_permissions((rdef, {'read': ()})): ... * using named argument(s): .. sourcecode:: python with self.temporary_permissions(CWUser={'read': ()}): ... Usually the former will be preferred to override permissions on a relation definition, while the latter is well suited for entity types. The allowed keys in the permission dictionary depend on the schema type (entity type / relation definition). Resulting permissions will be similar to `orig_permissions.update(partial_perms)`. """ torestore = [] for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.items()): if isinstance(erschema, str): erschema = self.schema[erschema] for action, actionperms in etypeperms.items(): origperms = erschema.permissions[action] erschema.set_action_permissions(action, actionperms) torestore.append([erschema, action, origperms]) try: yield finally: for erschema, action, permissions in torestore: if action is None: erschema.permissions = permissions else: erschema.set_action_permissions(action, permissions) def assertModificationDateGreater(self, entity, olddate): entity.cw_attr_cache.pop('modification_date', None) self.assertGreater(entity.modification_date, olddate) def assertMessageEqual(self, req, params, expected_msg): msg = req.session.data[params['_cwmsgid']] self.assertEqual(expected_msg, msg) # workflow utilities ####################################################### def assertPossibleTransitions(self, entity, expected): transitions = entity.cw_adapt_to( 'IWorkflowable').possible_transitions() self.assertListEqual(sorted(tr.name for tr in transitions), sorted(expected)) # views and actions registries inspection ################################## def pviews(self, req, rset): return sorted( (a.__regid__, a.__class__) for a in self.vreg['views'].possible_views(req, rset=rset)) def pactions(self, req, rset, skipcategories=('addrelated', 'siteactions', 'useractions', 'footer', 'manage')): return [ (a.__regid__, a.__class__) for a in self.vreg['actions'].poss_visible_objects(req, rset=rset) if a.category not in skipcategories ] def pactions_by_cats(self, req, rset, categories=('addrelated', )): return [ (a.__regid__, a.__class__) for a in self.vreg['actions'].poss_visible_objects(req, rset=rset) if a.category in categories ] def pactionsdict(self, req, rset, skipcategories=('addrelated', 'siteactions', 'useractions', 'footer', 'manage')): res = {} for a in self.vreg['actions'].poss_visible_objects(req, rset=rset): if a.category not in skipcategories: res.setdefault(a.category, []).append(a.__class__) return res def action_submenu(self, req, rset, id): return self._test_action(self.vreg['actions'].select(id, req, rset=rset)) def _test_action(self, action): class fake_menu(list): @property def items(self): return self class fake_box(object): def action_link(self, action, **kwargs): return (action.title, action.url()) submenu = fake_menu() action.fill_menu(fake_box(), submenu) return submenu def list_views_for(self, rset): """returns the list of views that can be applied on `rset`""" req = rset.req only_once_vids = ('primary', 'secondary', 'text') req.data['ex'] = ValueError("whatever") viewsvreg = self.vreg['views'] for vid, views in viewsvreg.items(): if vid[0] == '_': continue if rset.rowcount > 1 and vid in only_once_vids: continue views = [ view for view in views if view.category != 'startupview' and not issubclass(view, notification.NotificationView) and not isinstance(view, class_deprecated) ] if views: try: view = viewsvreg._select_best(views, req, rset=rset) if view is None: raise NoSelectableObject((req, ), {'rset': rset}, views) if view.linkable(): yield view else: not_selected(self.vreg, view) # else the view is expected to be used as subview and should # not be tested directly except NoSelectableObject: continue def list_actions_for(self, rset): """returns the list of actions that can be applied on `rset`""" req = rset.req for action in self.vreg['actions'].possible_objects(req, rset=rset): yield action def list_boxes_for(self, rset): """returns the list of boxes that can be applied on `rset`""" req = rset.req for box in self.vreg['ctxcomponents'].possible_objects(req, rset=rset, view=None): yield box def list_startup_views(self): """returns the list of startup views""" with self.admin_access.web_request() as req: for view in self.vreg['views'].possible_views(req, None): if view.category == 'startupview': yield view.__regid__ else: not_selected(self.vreg, view) # web ui testing utilities ################################################# @property @cached def app(self): """return a cubicweb publisher""" publisher = application.CubicWebPublisher(self.repo, self.config) def raise_error_handler(*args, **kwargs): raise publisher.error_handler = raise_error_handler return publisher @contextmanager def remote_calling(self, fname, *args, **kwargs): """remote json call simulation""" args = [json.dumps(arg) for arg in args] with self.admin_access.web_request(fname=fname, pageid='123', arg=args, **kwargs) as req: ctrl = self.vreg['controllers'].select('ajax', req) yield ctrl.publish(), req def app_handle_request(self, req): return self.app.core_handle(req) def ctrl_publish(self, req, ctrl='edit', rset=None): """call the publish method of the edit controller""" ctrl = self.vreg['controllers'].select(ctrl, req, appli=self.app) try: result = ctrl.publish(rset) req.cnx.commit() except web.Redirect: req.cnx.commit() raise return result @staticmethod def fake_form(formid, field_dict=None, entity_field_dicts=()): """Build _cw.form dictionnary to fake posting of some standard cubicweb form * `formid`, the form id, usually form's __regid__ * `field_dict`, dictionary of name:value for fields that are not tied to an entity * `entity_field_dicts`, list of (entity, dictionary) where dictionary contains name:value for fields that are not tied to the given entity """ assert field_dict or entity_field_dicts, \ 'field_dict and entity_field_dicts arguments must not be both unspecified' if field_dict is None: field_dict = {} form = {'__form_id': formid} fields = [] for field, value in field_dict.items(): fields.append(field) form[field] = value def _add_entity_field(entity, field, value): entity_fields.append(field) form[eid_param(field, entity.eid)] = value for entity, field_dict in entity_field_dicts: if '__maineid' not in form: form['__maineid'] = entity.eid entity_fields = [] form.setdefault('eid', []).append(entity.eid) _add_entity_field(entity, '__type', entity.cw_etype) for field, value in field_dict.items(): _add_entity_field(entity, field, value) if entity_fields: form[eid_param('_cw_entity_fields', entity.eid)] = ','.join(entity_fields) if fields: form['_cw_fields'] = ','.join(sorted(fields)) return form @contextmanager def admin_request_from_url(self, url): """parses `url` and builds the corresponding CW-web request req.form will be setup using the url's query string """ with self.admin_access.web_request(url=url) as req: if isinstance(url, str): url = url.encode( req.encoding) # req.setup_params() expects encoded strings querystring = urlparse(url)[-2] params = parse_qs(querystring) req.setup_params(params) yield req def url_publish(self, url, data=None): """takes `url`, uses application's app_resolver to find the appropriate controller and result set, then publishes the result. To simulate post of www-form-encoded data, give a `data` dictionary containing desired key/value associations. This should pretty much correspond to what occurs in a real CW server except the apache-rewriter component is not called. """ with self.admin_request_from_url(url) as req: if data is not None: req.form.update(data) ctrlid, rset = self.app.url_resolver.process( req, req.relative_path(False)) return self.ctrl_publish(req, ctrlid, rset) def http_publish(self, url, data=None): """like `url_publish`, except this returns a http response, even in case of errors. You may give form parameters using the `data` argument. """ with self.admin_request_from_url(url) as req: if data is not None: req.form.update(data) with real_error_handling(self.app): result = self.app_handle_request(req) return result, req @staticmethod def _parse_location(req, location): try: path, params = location.split('?', 1) except ValueError: path = location params = {} else: def cleanup(p): return (p[0], urlunquote(p[1])) params = dict( cleanup(p.split('=', 1)) for p in params.split('&') if p) if path.startswith(req.base_url()): # may be relative path = path[len(req.base_url()):] return path, params def expect_redirect(self, callback, req): """call the given callback with req as argument, expecting to get a Redirect exception """ try: callback(req) except Redirect as ex: return self._parse_location(req, ex.location) else: self.fail('expected a Redirect exception') def expect_redirect_handle_request(self, req, path='edit'): """call the publish method of the application publisher, expecting to get a Redirect exception """ if req.relative_path(False) != path: req._url = path self.app_handle_request(req) self.assertTrue(300 <= req.status_out < 400, req.status_out) location = req.get_response_header('location') return self._parse_location(req, location) def set_auth_mode(self, authmode, anonuser=None): self.set_option('auth-mode', authmode) self.set_option('anonymous-user', anonuser) if anonuser is None: self.config.anonymous_credential = None else: self.config.anonymous_credential = (anonuser, anonuser) def init_authentication(self, authmode, anonuser=None): self.set_auth_mode(authmode, anonuser) req = self.requestcls(self.vreg, url='login') sh = self.app.session_handler authm = sh.session_manager.authmanager authm.anoninfo = self.vreg.config.anonymous_user() authm.anoninfo = authm.anoninfo[0], {'password': authm.anoninfo[1]} # not properly cleaned between tests self.open_sessions = sh.session_manager._sessions = {} return req def assertAuthSuccess(self, req, nbsessions=1): session = self.app.get_session(req) cnx = session.new_cnx() with cnx: req.set_cnx(cnx) self.assertEqual(len(self.open_sessions), nbsessions, self.open_sessions) self.assertEqual(req.user.login, self.admlogin) self.assertEqual(session.anonymous_session, False) def assertAuthFailure(self, req, nbsessions=0): with self.assertRaises(AuthenticationError): self.app.get_session(req) # +0 since we do not track the opened session self.assertEqual(len(self.open_sessions), nbsessions) clear_cache(req, 'get_authorization') # content validation ####################################################### # validators are used to validate (XML, DTD, whatever) view's content # validators availables are : # DTDValidator : validates XML + declared DTD # SaxOnlyValidator : guarantees XML is well formed # None : do not try to validate anything # validators used must be imported from from.devtools.htmlparser content_type_validators = { # maps MIME type : validator name # # do not set html validators here, we need HTMLValidator for html # snippets # 'text/html': DTDValidator, # 'application/xhtml+xml': DTDValidator, 'application/xml': htmlparser.XMLValidator, 'text/xml': htmlparser.XMLValidator, 'application/json': JsonValidator, 'text/plain': None, 'text/comma-separated-values': None, 'text/x-vcard': None, 'text/calendar': None, 'image/png': None, } # maps vid : validator name (override content_type_validators) vid_validators = dict((vid, htmlparser.VALMAP[valkey]) for vid, valkey in VIEW_VALIDATORS.items()) def view(self, vid, rset=None, req=None, template='main-template', **kwargs): """This method tests the view `vid` on `rset` using `template` If no error occurred while rendering the view, the HTML is analyzed and parsed. :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo` encapsulation the generated HTML """ if req is None: assert rset is not None, 'you must supply at least one of rset or req' req = rset.req req.form['vid'] = vid viewsreg = self.vreg['views'] view = viewsreg.select(vid, req, rset=rset, **kwargs) if template is None: # raw view testing, no template viewfunc = view.render else: kwargs['view'] = view def viewfunc(**k): return viewsreg.main_template(req, template, rset=rset, **kwargs) return self._test_view(viewfunc, view, template, kwargs) def _test_view(self, viewfunc, view, template='main-template', kwargs={}): """this method does the actual call to the view If no error occurred while rendering the view, the HTML is analyzed and parsed. :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo` encapsulation the generated HTML """ try: output = viewfunc(**kwargs) except Exception: # hijack exception: generative tests stop when the exception # is not an AssertionError klass, exc, tcbk = sys.exc_info() try: msg = '[%s in %s] %s' % (klass, view.__regid__, exc) except Exception: msg = '[%s in %s] undisplayable exception' % (klass, view.__regid__) raise AssertionError(msg).with_traceback(sys.exc_info()[-1]) return self._check_html(output, view, template) def get_validator(self, view=None, content_type=None, output=None): if view is not None: try: return self.vid_validators[view.__regid__]() except KeyError: if content_type is None: content_type = view.content_type if content_type is None: content_type = 'text/html' if content_type in ('text/html', 'application/xhtml+xml') and output: if output.startswith(b'<!DOCTYPE html>'): # only check XML well-formness since HTMLValidator isn't html5 # compatible and won't like various other extensions default_validator = htmlparser.XMLSyntaxValidator elif output.startswith(b'<?xml'): default_validator = htmlparser.DTDValidator else: default_validator = htmlparser.HTMLValidator else: default_validator = None validatorclass = self.content_type_validators.get( content_type, default_validator) if validatorclass is None: return return validatorclass() @nocoverage def _check_html(self, output, view, template='main-template'): """raises an exception if the HTML is invalid""" output = output.strip() if isinstance(output, str): # XXX output = output.encode('utf-8') validator = self.get_validator(view, output=output) if validator is None: return output # return raw output if no validator is defined if isinstance(validator, htmlparser.DTDValidator): # XXX remove <canvas> used in progress widget, unknown in html dtd output = re.sub('<canvas.*?></canvas>', '', output) return self.assertWellFormed(validator, output.strip(), context=view.__regid__) def assertWellFormed(self, validator, content, context=None): try: return validator.parse_string(content) except Exception: # hijack exception: generative tests stop when the exception # is not an AssertionError klass, exc, tcbk = sys.exc_info() if context is None: msg = u'[%s]' % (klass, ) else: msg = u'[%s in %s]' % (klass, context) msg = msg.encode(sys.getdefaultencoding(), 'replace') try: str_exc = str(exc) except Exception: str_exc = 'undisplayable exception' msg += str_exc.encode(sys.getdefaultencoding(), 'replace') if content is not None: position = getattr(exc, "position", (0, ))[0] if position: # define filter if isinstance(content, bytes): content = str(content, sys.getdefaultencoding(), 'replace') content = validator.preprocess_data(content) content = content.splitlines() width = int(log(len(content), 10)) + 1 line_template = " %" + ("%i" % width) + "i: %s" # XXX no need to iterate the whole file except to get # the line number content = u'\n'.join( line_template % (idx + 1, line) for idx, line in enumerate(content) if line_context_filter(idx + 1, position)) msg += u'\nfor content:\n%s' % content exc = AssertionError(msg) exc.__traceback__ = tcbk raise exc def assertDocTestFile(self, testfile): # doctest returns tuple (failure_count, test_count) with self.admin_access.shell() as mih: result = mih.process_script(testfile) if result[0] and result[1]: raise self.failureException("doctest file '%s' failed" % testfile) # notifications ############################################################ def assertSentEmail(self, subject, recipients=None, nb_msgs=None): """test recipients in system mailbox for given email subject :param subject: email subject to find in mailbox :param recipients: list of email recipients :param nb_msgs: expected number of entries :returns: list of matched emails """ messages = [ email for email in MAILBOX if email.message.get('Subject') == subject ] if recipients is not None: sent_to = set() for msg in messages: sent_to.update(msg.recipients) self.assertSetEqual(set(recipients), sent_to) if nb_msgs is not None: self.assertEqual(len(MAILBOX), nb_msgs) return messages
class AutoPopulateTest(CubicWebTC): """base class for test with auto-populating of the database""" __abstract__ = True test_db_id = 'autopopulate' tags = CubicWebTC.tags | Tags('autopopulated') pdbclass = CubicWebDebugger # this is a hook to be able to define a list of rql queries # that are application dependent and cannot be guessed automatically application_rql = [] no_auto_populate = () ignored_relations = set() def to_test_etypes(self): return unprotected_entities(self.schema, strict=True) def custom_populate(self, how_many, cnx): pass def post_populate(self, cnx): pass @nocoverage def auto_populate(self, how_many): """this method populates the database with `how_many` entities of each possible type. It also inserts random relations between them """ with self.admin_access.cnx() as cnx: with cnx.security_enabled(read=False, write=False): self._auto_populate(cnx, how_many) cnx.commit() def _auto_populate(self, cnx, how_many): self.custom_populate(how_many, cnx) vreg = self.vreg howmanydict = how_many_dict(self.schema, cnx, how_many, self.no_auto_populate) for etype in unprotected_entities(self.schema): if etype in self.no_auto_populate: continue nb = howmanydict.get(etype, how_many) for rql, args in insert_entity_queries(etype, self.schema, vreg, nb): cnx.execute(rql, args) edict = {} for etype in unprotected_entities(self.schema, strict=True): rset = cnx.execute('%s X' % etype) edict[str(etype)] = set(row[0] for row in rset.rows) existingrels = {} ignored_relations = SYSTEM_RELATIONS | self.ignored_relations for rschema in self.schema.relations(): if rschema.final or rschema in ignored_relations or rschema.rule: continue rset = cnx.execute('DISTINCT Any X,Y WHERE X %s Y' % rschema) existingrels.setdefault(rschema.type, set()).update( (x, y) for x, y in rset) q = make_relations_queries(self.schema, edict, cnx, ignored_relations, existingrels=existingrels) for rql, args in q: try: cnx.execute(rql, args) except ValidationError as ex: # failed to satisfy some constraint print('error in automatic db population', ex) cnx.commit_state = None # reset uncommitable flag self.post_populate(cnx) def iter_individual_rsets(self, etypes=None, limit=None): etypes = etypes or self.to_test_etypes() with self.admin_access.web_request() as req: for etype in etypes: if limit: rql = 'Any X LIMIT %s WHERE X is %s' % (limit, etype) else: rql = 'Any X WHERE X is %s' % etype rset = req.execute(rql) for row in range(len(rset)): if limit and row > limit: break # XXX iirk rset2 = rset.limit(limit=1, offset=row) yield rset2 def iter_automatic_rsets(self, limit=10): """generates basic resultsets for each entity type""" etypes = self.to_test_etypes() if not etypes: return with self.admin_access.web_request() as req: for etype in etypes: yield req.execute('Any X LIMIT %s WHERE X is %s' % (limit, etype)) etype1 = etypes.pop() try: etype2 = etypes.pop() except KeyError: etype2 = etype1 # test a mixed query (DISTINCT/GROUP to avoid getting duplicate # X which make muledit view failing for instance (html validation fails # because of some duplicate "id" attributes) yield req.execute( 'DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is %s, Y is %s' % (etype1, etype2)) # test some application-specific queries if defined for rql in self.application_rql: yield req.execute(rql) def _test_everything_for(self, rset): """this method tries to find everything that can be tested for `rset` and yields a callable test (as needed in generative tests) """ propdefs = self.vreg['propertydefs'] # make all components visible for k, v in propdefs.items(): if k.endswith('visible') and not v['default']: propdefs[k]['default'] = True for view in self.list_views_for(rset): backup_rset = rset.copy(rset.rows, rset.description) with self.subTest( name=self._testname(rset, view.__regid__, 'view')): self.view(view.__regid__, rset, rset.req.reset_headers(), 'main-template') # We have to do this because some views modify the # resultset's syntax tree rset = backup_rset for action in self.list_actions_for(rset): with self.subTest( name=self._testname(rset, action.__regid__, 'action')): self._test_action(action) for box in self.list_boxes_for(rset): w = [].append with self.subTest(name=self._testname(rset, box.__regid__, 'box')): box.render(w) @staticmethod def _testname(rset, objid, objtype): return '%s_%s_%s' % ('_'.join(rset.column_types(0)), objid, objtype)
class QUnitTestCase(cwwebtest.CubicWebTestTC): tags = cwwebtest.CubicWebTestTC.tags | Tags(('qunit', )) # testfile, (dep_a, dep_b) all_js_tests = () timeout_error = RuntimeError def setUp(self): super(QUnitTestCase, self).setUp() self.test_queue = Queue() class MyQUnitResultController(QUnitResultController): tc = self test_queue = self.test_queue self._qunit_controller = MyQUnitResultController self.webapp.app.appli.vreg.register(MyQUnitResultController) self.webapp.app.appli.vreg.register(QUnitView) self.webapp.app.appli.vreg.register(CWDevtoolsStaticController) self.server = webtest.http.StopableWSGIServer.create(self.webapp.app) self.config.global_set_option('base-url', self.server.application_url) def tearDown(self): self.server.shutdown() self.webapp.app.appli.vreg.unregister(self._qunit_controller) self.webapp.app.appli.vreg.unregister(QUnitView) self.webapp.app.appli.vreg.unregister(CWDevtoolsStaticController) super(QUnitTestCase, self).tearDown() def test_javascripts(self): for args in self.all_js_tests: self.assertIn(len(args), (1, 2)) test_file = args[0] if len(args) > 1: depends = args[1] else: depends = () with TemporaryDirectory(): for name, func, args in self._test_qunit(test_file, depends): with self.subTest(name=name): func(*args) def _test_qunit(self, test_file, depends=(), timeout=10): QUnitView.test_file = test_file QUnitView.depends = depends while not self.test_queue.empty(): self.test_queue.get(False) with FirefoxHelper(self.config['base-url'] + '?vid=qunit') as browser: test_count = 0 error = False def runtime_error(*data): with open(browser.log_file) as logf: print(logf.read()) raise RuntimeError(*data) def timeout_failure(test_file, timeout, test_count): with open(browser.log_file) as logf: print(logf.read()) msg = '%s inactivity timeout (%is). %i test results received' % ( test_file, timeout, test_count) raise self.timeout_error(msg) while not error: try: result, test_name, msg = self.test_queue.get( timeout=timeout) test_name = '%s (%s)' % (test_name, test_file) if result is None: break test_count += 1 if result: yield test_name, lambda *args: 1, () else: yield test_name, self.fail, (msg, ) except Empty: error = True yield test_file, timeout_failure, (test_file, timeout, test_count) if test_count <= 0 and not error: yield test_name, runtime_error, ( 'No test yielded by qunit for %s' % test_file, )