def test_check_timestamp(self): 'Test check timestamp' pool = Pool() ModelsqlTimestamp = pool.get('test.modelsql.timestamp') transaction = Transaction() # transaction must be committed between each changes otherwise NOW() # returns always the same timestamp. record, = ModelsqlTimestamp.create([{}]) transaction.commit() timestamp = ModelsqlTimestamp.read([record.id], ['_timestamp'])[0]['_timestamp'] if backend.name() in ('sqlite', 'mysql'): # timestamp precision of sqlite is the second time.sleep(1) ModelsqlTimestamp.write([record], {}) transaction.commit() transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, ModelsqlTimestamp.write, [record], {}) transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, ModelsqlTimestamp.delete, [record]) transaction.timestamp.pop(str(record), None) ModelsqlTimestamp.write([record], {}) transaction.commit() ModelsqlTimestamp.delete([record]) transaction.commit()
class ModelSQLTestCase(unittest.TestCase): 'Test ModelSQL' def setUp(self): install_module('tests') self.modelsql = POOL.get('test.modelsql') self.modelsql_timestamp = POOL.get('test.modelsql.timestamp') @unittest.skipIf( backend.name() == 'sqlite', 'SQLite not concerned because tryton don\'t set "NOT NULL"' 'constraint: "ALTER TABLE" don\'t support NOT NULL constraint' 'without default value') def test0010required_field_missing(self): 'Test error message when a required field is missing' fields = { 'desc': '', 'integer': 0, } for key, value in fields.iteritems(): with Transaction().start(DB_NAME, USER, context=CONTEXT): try: self.modelsql.create([{key: value}]) except UserError, err: # message must not quote key msg = "'%s' not missing but quoted in error: '%s'" % ( key, err.message) self.assertTrue(key not in err.message, msg) continue self.fail('UserError should be caught')
def test0020check_timestamp(self): 'Test check timestamp' # cursor must be committed between each changes otherwise NOW() returns # always the same timestamp. with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: cursor = transaction.cursor record, = self.modelsql_timestamp.create([{}]) cursor.commit() timestamp = self.modelsql_timestamp.read([record.id], ['_timestamp'])[0]['_timestamp'] if backend.name() in ('sqlite', 'mysql'): # timestamp precision of sqlite is the second time.sleep(1) self.modelsql_timestamp.write([record], {}) cursor.commit() transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, self.modelsql_timestamp.write, [record], {}) transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, self.modelsql_timestamp.delete, [record]) transaction.timestamp.pop(str(record), None) self.modelsql_timestamp.write([record], {}) cursor.commit() self.modelsql_timestamp.delete([record]) cursor.commit()
class ModelSQLTestCase(unittest.TestCase): 'Test ModelSQL' @classmethod def setUpClass(cls): activate_module('tests') @unittest.skipIf( backend.name() == 'sqlite', 'SQLite not concerned because tryton don\'t set "NOT NULL"' 'constraint: "ALTER TABLE" don\'t support NOT NULL constraint' 'without default value') @with_transaction() def test_required_field_missing(self): 'Test error message when a required field is missing' pool = Pool() Modelsql = pool.get('test.modelsql') transaction = Transaction() fields = { 'desc': '', 'integer': 0, } for key, value in fields.iteritems(): try: Modelsql.create([{key: value}]) except UserError, err: # message must not quote key msg = "'%s' not missing but quoted in error: '%s'" % ( key, err.message) self.assertTrue(key not in err.message, msg) else: self.fail('UserError should be caught') transaction.rollback()
def test_check_timestamp(self): 'Test check timestamp' pool = Pool() ModelsqlTimestamp = pool.get('test.modelsql.timestamp') transaction = Transaction() # transaction must be committed between each changes otherwise NOW() # returns always the same timestamp. record, = ModelsqlTimestamp.create([{}]) transaction.commit() timestamp = ModelsqlTimestamp.read([record.id], ['_timestamp'])[0]['_timestamp'] if backend.name() == 'sqlite': # timestamp precision of sqlite is the second time.sleep(1) ModelsqlTimestamp.write([record], {}) transaction.commit() transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, ModelsqlTimestamp.write, [record], {}) transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, ModelsqlTimestamp.delete, [record]) transaction.timestamp.pop(str(record), None) ModelsqlTimestamp.write([record], {}) transaction.commit() ModelsqlTimestamp.delete([record]) transaction.commit()
def test0020check_timestamp(self): 'Test check timestamp' # cursor must be committed between each changes otherwise NOW() returns # always the same timestamp. with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: cursor = transaction.cursor record, = self.modelsql_timestamp.create([{}]) cursor.commit() timestamp = self.modelsql_timestamp.read( [record.id], ['_timestamp'])[0]['_timestamp'] if backend.name() in ('sqlite', 'mysql'): # timestamp precision of sqlite is the second time.sleep(1) self.modelsql_timestamp.write([record], {}) cursor.commit() transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, self.modelsql_timestamp.write, [record], {}) transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, self.modelsql_timestamp.delete, [record]) transaction.timestamp.pop(str(record), None) self.modelsql_timestamp.write([record], {}) cursor.commit() self.modelsql_timestamp.delete([record]) cursor.commit()
def _cast(column): class SQLite_DateTime(Function): __slots__ = () _function = 'DATETIME' if backend.name() == 'sqlite': column = SQLite_DateTime(column) return column
def __register__(cls, module_name): """ Init Method :param module_name: Name of the module """ super(TimesheetEmployeeDay, cls).__register__(module_name) query = '"timesheet_by_employee_by_day" AS ' \ 'SELECT timesheet_line.employee, timesheet_line.date, ' \ 'SUM(timesheet_line.hours) AS sum ' \ 'FROM "timesheet_line" ' \ 'GROUP BY timesheet_line.date, timesheet_line.employee;' if backend.name() == 'postgresql': Transaction().cursor.execute('CREATE OR REPLACE VIEW ' + query) elif backend.name() == 'sqlite': Transaction().cursor.execute('CREATE VIEW IF NOT EXISTS ' + query)
def backup_db_cache(name): if DB_CACHE: if not os.path.exists(DB_CACHE): os.makedirs(DB_CACHE) backend_name = backend.name() cache_file = _db_cache_file(DB_CACHE, name, backend_name) if backend_name == 'sqlite': _sqlite_copy(cache_file) elif backend_name == 'postgresql': _pg_dump(cache_file)
def restore_db_cache(name): result = False if DB_CACHE: backend_name = backend.name() cache_file = _db_cache_file(DB_CACHE, name, backend_name) if os.path.exists(cache_file): if backend_name == 'sqlite': result = _sqlite_copy(cache_file, restore=True) elif backend_name == 'postgresql': result = _pg_restore(cache_file) if result: Pool(DB_NAME).init() return result
def _domain_value(self, operator, value): if backend.name() == 'sqlite' and isinstance(value, bool): # json_extract returns 0 for JSON false and 1 for JSON true value = int(value) if isinstance(value, (Select, CombiningQuery)): return value if operator.endswith('in'): return [dumps(v) for v in value] else: value = dumps(value) if self.search_unaccented and operator.endswith('ilike'): database = Transaction().database value = database.unaccent(value) return value
def __register__(cls, module_name): TableHandler = backend.get('TableHandler') cursor = Transaction().cursor # Migration from 3.4.0.6 : Migrate account_id field to string if backend.name() == 'postgresql': cursor.execute( 'SELECT pg_typeof("account_id") ' 'FROM endicia_configuration ' 'LIMIT 1', ) # Check if account_id is integer field is_integer = cursor.fetchone()[0] == 'integer' if is_integer: # Migrate integer field to string table = TableHandler(cursor, cls, module_name) table.alter_type('account_id', 'varchar') super(EndiciaConfiguration, cls).__register__(module_name)
def setup(cls): cursor = Transaction().connection.cursor() if backend.name() != 'postgresql': raise Exception('Database must be postgresql !') # Check for test table for schema in Transaction().database.search_path: cursor.execute('SELECT 1 FROM information_schema.tables ' "WHERE table_name = 'benchmark_table' AND table_schema = '%s'" % schema) if cursor.rowcount: raise Exception('Benchmark table already in, run ' 'teardown and try again') # Create table cursor.execute('CREATE TABLE "benchmark_table" (' 'id integer PRIMARY KEY,' 'some_string varchar(100),' 'some_date date)')
help="Don't run doctest") parser.add_argument("-v", action="count", default=0, dest="verbosity", help="Increase verbosity") parser.add_argument('tests', metavar='test', nargs='*') parser.epilog = ( 'The database name can be specified in the DB_NAME ' 'environment variable.\n' "A database dump cache directory can be specified in the DB_CACHE " "environment variable. Dumps will be used to speed up re-run of tests.") opt = parser.parse_args() config.update_etc(opt.config) if backend.name() == 'sqlite': database_name = ':memory:' else: database_name = 'test_' + str(int(time.time())) os.environ.setdefault('DB_NAME', database_name) from trytond.tests.test_tryton import all_suite, modules_suite if not opt.modules: suite = all_suite(opt.tests) else: suite = modules_suite(opt.tests, doc=opt.doctest) result = unittest.TextTestRunner(verbosity=opt.verbosity, failfast=opt.failfast).run(suite) sys.exit(not result.wasSuccessful())
class HistoryTestCase(unittest.TestCase): 'Test History' def setUp(self): install_module('tests') def tearDown(self): History = POOL.get('test.history') with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: cursor = transaction.cursor table = History.__table__() history_table = History.__table_history__() cursor.execute(*table.delete()) cursor.execute(*history_table.delete()) cursor.commit() def test0010read(self): 'Test read history' History = POOL.get('test.history') # Create some history entry # It is needed to commit to have different timestamps with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(value=1) history.save() history_id = history.id first = history.create_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) history.value = 2 history.save() second = history.write_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) history.value = 3 history.save() third = history.write_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT): for timestamp, value in [ (first, 1), (second, 2), (third, 3), (datetime.datetime.now(), 3), (datetime.datetime.max, 3), ]: with Transaction().set_context(_datetime=timestamp): history = History(history_id) self.assertEqual(history.value, value) with Transaction().set_context(_datetime=datetime.datetime.min): self.assertRaises(UserError, History.read, [history_id]) @unittest.skipUnless( backend.name() == 'postgresql', 'CURRENT_TIMESTAMP as transaction_timestamp is specific to postgresql') def test0020read_same_timestamp(self): 'Test read history with same timestamp' History = POOL.get('test.history') with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(value=1) history.save() history_id = history.id first = history.create_date history.value = 2 history.save() second = history.write_date self.assertEqual(first, second) transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) history.value = 3 history.save() third = history.write_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT): for timestamp, value in [ (first, 2), (third, 3), ]: with Transaction().set_context(_datetime=timestamp): history = History(history_id) self.assertEqual(history.value, value) def test0030history_revisions(self): 'Test history revisions' History = POOL.get('test.history') with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(value=1) history.save() history_id = history.id first = history.create_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) history.value = 2 history.save() second = history.write_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) history.value = 3 history.save() third = history.write_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT): revisions = History.history_revisions([history_id]) self.assertEqual(revisions, [ (third, history_id, u'Administrator'), (second, history_id, u'Administrator'), (first, history_id, u'Administrator'), ]) def test0040restore_history(self): 'Test restore history' History = POOL.get('test.history') with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(value=1) history.save() history_id = history.id first = history.create_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) history.value = 2 history.save() transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT): History.restore_history([history_id], first) history = History(history_id) self.assertEqual(history.value, 1) with Transaction().start(DB_NAME, USER, context=CONTEXT): History.restore_history([history_id], datetime.datetime.min) self.assertRaises(UserError, History.read, [history_id]) with Transaction().start(DB_NAME, USER, context=CONTEXT): History.delete([History(history_id)]) transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT): History.restore_history([history_id], datetime.datetime.max) self.assertRaises(UserError, History.read, [history_id]) def test0041restore_history_before(self): 'Test restore history before' History = POOL.get('test.history') with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(value=1) history.save() history_id = history.id transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) history.value = 2 history.save() second = history.write_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) history.value = 3 history.save() transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT): History.restore_history_before([history_id], second) history = History(history_id) self.assertEqual(history.value, 1) @unittest.skipUnless( backend.name() == 'postgresql', 'CURRENT_TIMESTAMP as transaction_timestamp is specific to postgresql') def test0045restore_history_same_timestamp(self): 'Test restore history with same timestamp' History = POOL.get('test.history') with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(value=1) history.save() history_id = history.id first = history.create_date history.value = 2 history.save() second = history.create_date self.assertEqual(first, second) transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) history.value = 3 history.save() transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT): History.restore_history([history_id], first) history = History(history_id) self.assertEqual(history.value, 2) def test0050ordered_search(self): 'Test ordered search of history models' History = POOL.get('test.history') order = [('value', 'ASC')] with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(value=1) history.save() first_id = history.id first_stamp = history.create_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(value=2) history.save() second_id = history.id second_stamp = history.create_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: first, second = History.search([], order=order) self.assertEqual(first.id, first_id) self.assertEqual(second.id, second_id) first.value = 3 first.save() third_stamp = first.write_date transaction.cursor.commit() results = [ (first_stamp, [first]), (second_stamp, [first, second]), (third_stamp, [second, first]), (datetime.datetime.now(), [second, first]), (datetime.datetime.max, [second, first]), ] with Transaction().start(DB_NAME, USER, context=CONTEXT): for timestamp, instances in results: with Transaction().set_context(_datetime=timestamp): records = History.search([], order=order) self.assertEqual(records, instances) with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: to_delete, _ = History.search([], order=order) self.assertEqual(to_delete.id, second.id) History.delete([to_delete]) transaction.cursor.commit() results = [ (first_stamp, [first]), (second_stamp, [first, second]), (third_stamp, [second, first]), (datetime.datetime.now(), [first]), (datetime.datetime.max, [first]), ] with Transaction().start(DB_NAME, USER, context=CONTEXT): for timestamp, instances in results: with Transaction().set_context(_datetime=timestamp, from_test=True): records = History.search([], order=order) self.assertEqual(records, instances) @unittest.skipUnless( backend.name() == 'postgresql', 'CURRENT_TIMESTAMP as transaction_timestamp is specific to postgresql') def test0060_ordered_search_same_timestamp(self): 'Test ordered search with same timestamp' History = POOL.get('test.history') order = [('value', 'ASC')] with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(value=1) history.save() first_stamp = history.create_date history.value = 4 history.save() second_stamp = history.write_date self.assertEqual(first_stamp, second_stamp) transaction.cursor.commit() results = [ (second_stamp, [history], [4]), (datetime.datetime.now(), [history], [4]), (datetime.datetime.max, [history], [4]), ] with Transaction().start(DB_NAME, USER, context=CONTEXT): for timestamp, instances, values in results: with Transaction().set_context(_datetime=timestamp, last_test=True): records = History.search([], order=order) self.assertEqual(records, instances) self.assertEqual([x.value for x in records], values) def test0070_browse(self): 'Test browsing history' History = POOL.get('test.history') Line = POOL.get('test.history.line') with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(value=1) history.save() history_id = history.id line_a = Line(name='a', history=history) line_a.save() line_a_id = line_a.id line_b = Line(name='b', history=history) line_b.save() line_b_id = line_b.id first_stamp = line_b.create_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) history.value = 2 history.save() Line.delete([Line(line_b_id)]) line_a = Line(line_a_id) line_a.name = 'c' line_a.save() second_stamp = line_a.write_date transaction.cursor.commit() with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: history = History(history_id) self.assertEqual(history.value, 2) self.assertEqual([l.name for l in history.lines], ['c']) with Transaction().set_context(_datetime=first_stamp): history = History(history_id) self.assertEqual(history.value, 1) self.assertEqual([l.name for l in history.lines], ['a', 'b']) with Transaction().set_context(_datetime=second_stamp): history = History(history_id) self.assertEqual(history.value, 2) self.assertEqual([l.name for l in history.lines], ['c']) def test0080_search_cursor_max(self): 'Test search with number of history entries at cursor.IN_MAX' History = POOL.get('test.history') with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: cursor = transaction.cursor history = History(value=-1) history.save() for history.value in range(cursor.IN_MAX + 1): history.save() with transaction.set_context(_datetime=datetime.datetime.max): record, = History.search([]) self.assertEqual(record.value, cursor.IN_MAX) def test0090_search_cursor_max_entries(self): 'Test search for skipping first history entries at cursor.IN_MAX' History = POOL.get('test.history') with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: cursor = transaction.cursor for i in xrange(0, 2): history = History(value=-1) history.save() for history.value in range(cursor.IN_MAX + 1): history.save() with transaction.set_context(_datetime=datetime.datetime.max): records = History.search([]) self.assertEqual({r.value for r in records}, {cursor.IN_MAX}) self.assertEqual(len(records), 2) def test0100_search_cursor_max_histories(self): 'Test search with number of histories at cursor.IN_MAX' History = POOL.get('test.history') with Transaction().start(DB_NAME, USER, context=CONTEXT) as transaction: cursor = transaction.cursor n = cursor.IN_MAX + 1 History.create([{'value': 1}] * n) with transaction.set_context(_datetime=datetime.datetime.max): records = History.search([]) self.assertEqual({r.value for r in records}, {1}) self.assertEqual(len(records), n)
('selects', 'not in', ['baz']), ]) foo_baz = Selection.search([ ('selects', 'not in', ['foo', 'baz']), ]) empty = Selection.search([ ('selects', 'not in', []), ]) self.assertEqual(foo, []) self.assertEqual(baz, [selection]) self.assertEqual(foo_baz, []) self.assertEqual(empty, [selection]) @unittest.skipIf(backend.name() != 'postgresql', 'jsonb only supported by postgresql') class FieldMultiSelectionJSONBTestCase(FieldMultiSelectionTestCase): @classmethod def setUpClass(cls): super().setUpClass() cls.setup_model() @classmethod @with_transaction() def setup_model(cls): connection = Transaction().connection Database = backend.get('Database') if Database().get_version(connection) < (9, 2): return pool = Pool()
def setUpClass(cls): print "====== Tests are running on %s ========" % backend.name()
# New transaction has still empty cache transaction3 = transaction1.new_transaction() self.addCleanup(transaction3.stop) self.assertEqual(cache.get('foo'), None) @with_transaction() def test_memory_cache_expire(self): "Test expired cache" cache_expire.set('foo', "bar") time.sleep(cache_expire.duration.total_seconds()) self.assertEqual(cache_expire.get('foo'), None) @unittest.skipIf(backend.name() == 'sqlite', "SQLite has not channel") class MemoryCacheChannelTestCase(MemoryCacheTestCase): "Test Cache with channel" def setUp(self): super().setUp() clear_timeout = cache_mod._clear_timeout cache_mod._clear_timeout = 0 self.addCleanup(setattr, cache_mod, '_clear_timeout', clear_timeout) def wait_cache_sync(self): time.sleep(1) @unittest.skip("No cache sync on transaction start with channel") def test_memory_cache_sync(self): super().test_memory_cache_sync()
self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1, 'type': 'arabic' }, }]) self.assertDictEqual(dict_.dico_string_keys, { 'a': 'A', 'type': "Type" }) @unittest.skipUnless(backend.name() == 'postgresql', "unaccent works only on postgresql") class FieldDictUnaccentedTestCase(UnaccentedTestCase): "Test Field Dict with unaccented searched" @classmethod def setUpClass(cls): activate_module('tests') super().setUpClass() @with_transaction() def test_search_unaccented_off(self): "Test searches without the unaccented feature" pool = Pool() Dict = pool.get('test.dict_unaccented_off') dict_, = Dict.create([{
class FieldDictTestCase(unittest.TestCase): "Test Field Dict" @classmethod def setUpClass(cls): activate_module('tests') def create_schema(self): DictSchema = Pool().get('test.dict.schema') DictSchema.create([{ 'name': 'a', 'string': 'A', 'type_': 'integer', }, { 'name': 'b', 'string': 'B', 'type_': 'integer', }, { 'name': 'type', 'string': 'Type', 'type_': 'selection', 'selection': ('arabic: Arabic\n' 'hexa: Hexadecimal'), }]) def set_jsonb(self, table): cursor = Transaction().connection.cursor() cursor.execute('ALTER TABLE "%s" ' 'ALTER COLUMN dico TYPE json USING dico::json' % table) @with_transaction() def test_create(self): "Test create dict" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1, 'b': 2 }, }]) self.assertDictEqual(dict_.dico, {'a': 1, 'b': 2}) @with_transaction() def test_create_without_schema(self): "Test create dict without schema" Dict = Pool().get('test.dict') dict_, = Dict.create([{ 'dico': { 'z': 26 }, }]) self.assertDictEqual(dict_.dico, {'z': 26}) @with_transaction() def test_create_without_default(self): "Test create dict without default" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{}]) self.assertEqual(dict_.dico, None) @with_transaction() def test_create_with_default(self): "Test create dict without default" Dict = Pool().get('test.dict_default') self.create_schema() dict_, = Dict.create([{}]) self.assertDictEqual(dict_.dico, {'a': 1}) @with_transaction() def test_create_required_with_value(self): "Test create dict required with value" Dict = Pool().get('test.dict_required') self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1 }, }]) self.assertDictEqual(dict_.dico, {'a': 1}) @with_transaction() def test_create_required_without_value(self): "Test create dict required without value" Dict = Pool().get('test.dict_required') self.create_schema() with self.assertRaises(RequiredValidationError): Dict.create([{}]) @with_transaction() def test_create_required_with_empty(self): "Test create dict required without value" Dict = Pool().get('test.dict_required') self.create_schema() with self.assertRaises(RequiredValidationError): Dict.create([{ 'dico': {}, }]) @with_transaction() def test_create_selection(self): "Test create dict with selection" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{ 'dico': { 'type': 'arabic' }, }]) self.assertDictEqual(dict_.dico, {'type': 'arabic'}) @with_transaction() def test_invalid_selection_schema(self): "Test invalid selection schema" pool = Pool() DictSchema = pool.get('test.dict.schema') with self.assertRaises(SelectionError): DictSchema.create([{ 'name': 'selection', 'string': "Selection", 'type_': 'selection', 'selection': 'foo', }]) @with_transaction() @unittest.skipIf(backend.name() != 'postgresql', 'jsonb only supported by postgresql') def test_create_jsonb(self): "Test create dict as jsonb" connection = Transaction().connection Database = backend.get('Database') if Database().get_version(connection) < (9, 2): return Dict = Pool().get('test.dict_jsonb') self.set_jsonb(Dict._table) dict_, = Dict.create([{ 'dico': { 'a': 1, 'b': 2 }, }]) self.assertDictEqual(dict_.dico, {'a': 1, 'b': 2}) @with_transaction() def test_write(self): "Test write dict" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1, 'b': 2 }, }]) Dict.write([dict_], { 'dico': { 'a': 2 }, }) self.assertDictEqual(dict_.dico, {'a': 2}) @with_transaction() def test_write_wthout_schema(self): "Test write dict without schema" Dict = Pool().get('test.dict') dict_, = Dict.create([{ 'dico': { 'z': 26 }, }]) Dict.write([dict_], { 'dico': { 'y': 1 }, }) self.assertDictEqual(dict_.dico, {'y': 1}) @with_transaction() @unittest.skipIf(backend.name() != 'postgresql', 'jsonb only supported by postgresql') def test_write_jsonb(self): "Test write dict as jsonb" connection = Transaction().connection Database = backend.get('Database') if Database().get_version(connection) < (9, 2): return Dict = Pool().get('test.dict_jsonb') self.set_jsonb(Dict._table) dict_, = Dict.create([{ 'dico': { 'a': 1, 'b': 2 }, }]) Dict.write([dict_], {'dico': {'z': 26}}) self.assertDictEqual(dict_.dico, {'z': 26}) @with_transaction() def test_search_element_equals(self): "Test search dict element equals" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': 'Foo' }, }]) dicts_foo = Dict.search([ ('dico.a', '=', "Foo"), ]) dicts_bar = Dict.search([ ('dico.a', '=', "Bar"), ]) dicts_foo_b = Dict.search([ ('dico.b', '=', "Foo"), ]) self.assertListEqual(dicts_foo, [dict_]) self.assertListEqual(dicts_bar, []) self.assertListEqual(dicts_foo_b, []) @with_transaction() def test_search_element_equals_none(self): "Test search dict element equals None" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': None }, }]) dicts = Dict.search([ ('dico.a', '=', None), ]) self.assertListEqual(dicts, [dict_]) @with_transaction() def test_search_non_element_equals_none(self): "Test search dict non element equals None" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': "Foo" }, }]) dicts = Dict.search([ ('dico.b', '=', None), ]) self.assertListEqual(dicts, [dict_]) @with_transaction() def test_search_element_non_equals(self): "Test search dict element non equals" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': "Foo" }, }]) dicts_foo = Dict.search([ ('dico.a', '!=', "Foo"), ]) dicts_bar = Dict.search([ ('dico.a', '!=', "Bar"), ]) dicts_foo_b = Dict.search([ ('dico.b', '!=', "Foo"), ]) self.assertListEqual(dicts_foo, []) self.assertListEqual(dicts_bar, [dict_]) self.assertListEqual(dicts_foo_b, []) @with_transaction() def test_search_element_non_equals_none(self): "Test search dict element non equals None" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': None }, }]) dicts = Dict.search([ ('dico.a', '!=', None), ]) self.assertListEqual(dicts, []) @with_transaction() def test_search_non_element_non_equals_none(self): "Test search dict non element non equals None" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': "Foo" }, }]) dicts = Dict.search([ ('dico.b', '!=', None), ]) self.assertListEqual(dicts, []) @with_transaction() def test_search_element_equals_true(self): "Test search dict element equals True" pool = Pool() Dict = pool.get('test.dict_noschema') dict1, dict2 = Dict.create([{ 'dico': { 'a': True }, }, { 'dico': { 'a': False }, }]) dicts = Dict.search([ ('dico.a', '=', True), ]) self.assertListEqual(dicts, [dict1]) @with_transaction() def test_search_element_equals_false(self): "Test search dict element equals False" pool = Pool() Dict = pool.get('test.dict_noschema') dict1, dict2 = Dict.create([{ 'dico': { 'a': True }, }, { 'dico': { 'a': False }, }]) dicts = Dict.search([ ('dico.a', '=', False), ]) self.assertListEqual(dicts, [dict2]) @with_transaction() def test_search_element_non_equals_true(self): "Test search dict element non equals True" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': True }, }]) dicts = Dict.search([ ('dico.a', '!=', True), ]) self.assertListEqual(dicts, []) @with_transaction() def test_search_element_in(self): "Test search dict element in" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': "Foo" }, }]) dicts_foo = Dict.search([ ('dico.a', 'in', ["Foo"]), ]) dicts_bar = Dict.search([ ('dico.a', 'in', ["Bar"]), ]) dicts_empty = Dict.search([ ('dico.a', 'in', []), ]) dicts_foo_b = Dict.search([ ('dico.b', 'in', ["Foo"]), ]) self.assertListEqual(dicts_foo, [dict_]) self.assertListEqual(dicts_bar, []) self.assertListEqual(dicts_empty, []) self.assertListEqual(dicts_foo_b, []) @with_transaction() def test_search_element_in_none(self): "Test search dict element in [None]" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': None }, }]) dicts = Dict.search([ ('dico.a', 'in', [None]), ]) self.assertListEqual(dicts, [dict_]) @with_transaction() def test_search_element_not_in(self): "Test search dict element not in" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': "Foo" }, }]) dicts_foo = Dict.search([ ('dico.a', 'not in', ["Foo"]), ]) dicts_bar = Dict.search([ ('dico.a', 'not in', ["Bar"]), ]) dicts_empty = Dict.search([ ('dico.a', 'not in', []), ]) dicts_foo_b = Dict.search([ ('dico.b', 'not in', ["Foo"]), ]) self.assertListEqual(dicts_foo, []) self.assertListEqual(dicts_bar, [dict_]) self.assertListEqual(dicts_empty, [dict_]) self.assertListEqual(dicts_foo_b, []) @with_transaction() def test_search_element_not_in_none(self): "Test search dict element not in [None]" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': None }, }]) dicts = Dict.search([ ('dico.a', 'not in', [None]), ]) self.assertListEqual(dicts, []) @with_transaction() def test_search_element_less(self): "Test search dict element less than" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': 1.1 }, }]) dicts_5 = Dict.search([ ('dico.a', '<', 5), ]) dicts__5 = Dict.search([ ('dico.a', '<', -5), ]) dicts_1_1 = Dict.search([ ('dico.a', '<', 1.1), ]) self.assertListEqual(dicts_5, [dict_]) self.assertListEqual(dicts__5, []) self.assertListEqual(dicts_1_1, []) @with_transaction() def test_search_element_less_equals(self): "Test search dict element less than or equals" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': 1.1 }, }]) dicts_5 = Dict.search([ ('dico.a', '<=', 5), ]) dicts__5 = Dict.search([ ('dico.a', '<=', -5), ]) dicts_1_1 = Dict.search([ ('dico.a', '<=', 1.1), ]) self.assertListEqual(dicts_5, [dict_]) self.assertListEqual(dicts__5, []) self.assertListEqual(dicts_1_1, [dict_]) @with_transaction() def test_search_element_greater(self): "Test search dict element greater than" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': 1.1 }, }]) dicts_5 = Dict.search([ ('dico.a', '>', 5), ]) dicts__5 = Dict.search([ ('dico.a', '>', -5), ]) dicts_1_1 = Dict.search([ ('dico.a', '>', 1.1), ]) self.assertListEqual(dicts_5, []) self.assertListEqual(dicts__5, [dict_]) self.assertListEqual(dicts_1_1, []) @with_transaction() def test_search_element_greater_equals(self): "Test search dict element greater than or equals" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': 1.1 }, }]) dicts_5 = Dict.search([ ('dico.a', '>=', 5), ]) dicts__5 = Dict.search([ ('dico.a', '>=', -5), ]) dicts_1_1 = Dict.search([ ('dico.a', '>=', 1.1), ]) self.assertListEqual(dicts_5, []) self.assertListEqual(dicts__5, [dict_]) self.assertListEqual(dicts_1_1, [dict_]) @with_transaction() def test_search_element_like(self): "Test search dict element like" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': "Bar" }, }]) dicts_bar = Dict.search([ ('dico.a', 'like', "Bar"), ]) dicts_b = Dict.search([ ('dico.a', 'like', "B%"), ]) dicts_foo = Dict.search([ ('dico.a', 'like', "Foo"), ]) dicts_f = Dict.search([ ('dico.a', 'like', "F%"), ]) dicts_b_b = Dict.search([ ('dico.b', 'like', "B%"), ]) self.assertListEqual(dicts_bar, [dict_]) self.assertListEqual(dicts_b, [dict_]) self.assertListEqual(dicts_foo, []) self.assertListEqual(dicts_f, []) self.assertListEqual(dicts_b_b, []) @with_transaction() def test_search_element_ilike(self): "Test search dict element ilike" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': "Bar" }, }]) dicts_bar = Dict.search([ ('dico.a', 'ilike', "bar"), ]) dicts_b = Dict.search([ ('dico.a', 'ilike', "b%"), ]) dicts_foo = Dict.search([ ('dico.a', 'ilike', "foo"), ]) dicts_f = Dict.search([ ('dico.a', 'ilike', "f%"), ]) dicts_b_b = Dict.search([ ('dico.b', 'ilike', "b%"), ]) self.assertListEqual(dicts_bar, [dict_]) self.assertListEqual(dicts_b, [dict_]) self.assertListEqual(dicts_foo, []) self.assertListEqual(dicts_f, []) self.assertListEqual(dicts_b_b, []) @with_transaction() def test_search_element_not_like(self): "Test search dict element not like" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': "Bar" }, }]) dicts_bar = Dict.search([ ('dico.a', 'not like', "Bar"), ]) dicts_b = Dict.search([ ('dico.a', 'not like', "B%"), ]) dicts_foo = Dict.search([ ('dico.a', 'not like', "Foo"), ]) dicts_f = Dict.search([ ('dico.a', 'not like', "F%"), ]) dicts_b_b = Dict.search([ ('dico.b', 'not like', "B%"), ]) self.assertListEqual(dicts_bar, []) self.assertListEqual(dicts_b, []) self.assertListEqual(dicts_foo, [dict_]) self.assertListEqual(dicts_f, [dict_]) self.assertListEqual(dicts_b_b, []) @with_transaction() def test_search_element_not_ilike(self): "Test search dict element not ilike" pool = Pool() Dict = pool.get('test.dict_noschema') dict_, = Dict.create([{ 'dico': { 'a': "Bar" }, }]) dicts_bar = Dict.search([ ('dico.a', 'not ilike', "bar"), ]) dicts_b = Dict.search([ ('dico.a', 'not ilike', "b%"), ]) dicts_foo = Dict.search([ ('dico.a', 'not ilike', "foo"), ]) dicts_f = Dict.search([ ('dico.a', 'not ilike', "f%"), ]) dicts_b_b = Dict.search([ ('dico.b', 'not ilike', "b%"), ]) self.assertListEqual(dicts_bar, []) self.assertListEqual(dicts_b, []) self.assertListEqual(dicts_foo, [dict_]) self.assertListEqual(dicts_f, [dict_]) self.assertListEqual(dicts_b_b, []) @with_transaction() @unittest.skipIf(backend.name() != 'postgresql', 'jsonb only supported by postgresql') def test_search_element_jsonb(self): "Test search dict element on jsonb" connection = Transaction().connection Database = backend.get('Database') if Database().get_version(connection) < (9, 2): return pool = Pool() Dict = pool.get('test.dict_noschema') self.set_jsonb(Dict._table) dict_, = Dict.create([{ 'dico': { 'a': 'Foo' }, }]) dicts = Dict.search([ ('dico.a', '=', "Foo"), ]) self.assertListEqual(dicts, [dict_]) @with_transaction() def test_search_order_element(self): "Test search order by dict element" pool = Pool() Dict = pool.get('test.dict_noschema') for value in [5, 3, 2]: Dict.create([{'dico': {'a': 5 - value, 'b': value}}]) records = Dict.search([], order=[('dico.b', 'ASC')]) values = [r.dico['b'] for r in records] self.assertListEqual(values, [2, 3, 5]) @with_transaction() def test_string(self): "Test string dict" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1, 'type': 'arabic' }, }]) self.assertDictEqual(dict_.dico_string, {'a': 1, 'type': "Arabic"}) @with_transaction() def test_string_keys(self): "Test string keys dict" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1, 'type': 'arabic' }, }]) self.assertDictEqual(dict_.dico_string_keys, { 'a': 'A', 'type': "Type" })
class HistoryTestCase(unittest.TestCase): 'Test History' @classmethod def setUpClass(cls): activate_module('tests') @with_transaction() def tearDown(self): pool = Pool() History = pool.get('test.history') HistoryLine = pool.get('test.history.line') transaction = Transaction() cursor = transaction.connection.cursor() for Model in [History, HistoryLine]: table = Model.__table__() history_table = Model.__table_history__() cursor.execute(*table.delete()) cursor.execute(*history_table.delete()) transaction.commit() @with_transaction() def test_read(self): 'Test read history' pool = Pool() History = pool.get('test.history') transaction = Transaction() # Create some history entry # It is needed to commit to have different timestamps history = History(value=1) history.save() history_id = history.id first = history.create_date transaction.commit() history = History(history_id) history.value = 2 history.save() second = history.write_date transaction.commit() history = History(history_id) history.value = 3 history.save() third = history.write_date transaction.commit() for timestamp, value in [ (first, 1), (second, 2), (third, 3), (datetime.datetime.now(), 3), (datetime.datetime.max, 3), ]: with Transaction().set_context(_datetime=timestamp): history = History(history_id) self.assertEqual(history.value, value) with Transaction().set_context(_datetime=datetime.datetime.min): self.assertRaises(UserError, History.read, [history_id]) @unittest.skipUnless( backend.name() == 'postgresql', 'CURRENT_TIMESTAMP as transaction_timestamp is specific to postgresql') @with_transaction() def test_read_same_timestamp(self): 'Test read history with same timestamp' pool = Pool() History = pool.get('test.history') transaction = Transaction() history = History(value=1) history.save() history_id = history.id first = history.create_date history.value = 2 history.save() second = history.write_date self.assertEqual(first, second) transaction.commit() history = History(history_id) history.value = 3 history.save() third = history.write_date transaction.commit() for timestamp, value in [ (first, 2), (third, 3), ]: with Transaction().set_context(_datetime=timestamp): history = History(history_id) self.assertEqual(history.value, value) @with_transaction() def test_history_revisions(self): 'Test history revisions' pool = Pool() History = pool.get('test.history') transaction = Transaction() history = History(value=1) history.save() history_id = history.id first = history.create_date transaction.commit() history = History(history_id) history.value = 2 history.save() second = history.write_date transaction.commit() history = History(history_id) history.value = 3 history.save() third = history.write_date transaction.commit() revisions = History.history_revisions([history_id]) self.assertEqual(revisions, [ (third, history_id, 'Administrator'), (second, history_id, 'Administrator'), (first, history_id, 'Administrator'), ]) @with_transaction() def test_restore_history(self): 'Test restore history' pool = Pool() History = pool.get('test.history') transaction = Transaction() history = History(value=1) history.save() history_id = history.id first = history.create_date transaction.commit() history = History(history_id) history.value = 2 history.save() transaction.commit() History.restore_history([history_id], first) history = History(history_id) self.assertEqual(history.value, 1) transaction.rollback() History.restore_history([history_id], datetime.datetime.min) self.assertRaises(UserError, History.read, [history_id]) transaction.rollback() History.delete([History(history_id)]) transaction.commit() History.restore_history([history_id], datetime.datetime.max) self.assertRaises(UserError, History.read, [history_id]) @with_transaction() def test_restore_history_before(self): 'Test restore history before' pool = Pool() History = pool.get('test.history') transaction = Transaction() history = History(value=1) history.save() history_id = history.id transaction.commit() history = History(history_id) history.value = 2 history.save() second = history.write_date transaction.commit() history = History(history_id) history.value = 3 history.save() transaction.commit() History.restore_history_before([history_id], second) history = History(history_id) self.assertEqual(history.value, 1) @unittest.skipUnless( backend.name() == 'postgresql', 'CURRENT_TIMESTAMP as transaction_timestamp is specific to postgresql') @with_transaction() def test_restore_history_same_timestamp(self): 'Test restore history with same timestamp' pool = Pool() History = pool.get('test.history') transaction = Transaction() history = History(value=1) history.save() history_id = history.id first = history.create_date history.value = 2 history.save() second = history.create_date self.assertEqual(first, second) transaction.commit() history = History(history_id) history.value = 3 history.save() transaction.commit() History.restore_history([history_id], first) history = History(history_id) self.assertEqual(history.value, 2) @with_transaction() def test_ordered_search(self): 'Test ordered search of history models' pool = Pool() History = pool.get('test.history') transaction = Transaction() order = [('value', 'ASC')] history = History(value=1) history.save() first_id = history.id first_stamp = history.create_date transaction.commit() history = History(value=2) history.save() second_id = history.id second_stamp = history.create_date transaction.commit() first, second = History.search([], order=order) self.assertEqual(first.id, first_id) self.assertEqual(second.id, second_id) first.value = 3 first.save() third_stamp = first.write_date transaction.commit() results = [ (first_stamp, [first]), (second_stamp, [first, second]), (third_stamp, [second, first]), (datetime.datetime.now(), [second, first]), (datetime.datetime.max, [second, first]), ] for timestamp, instances in results: with Transaction().set_context(_datetime=timestamp): records = History.search([], order=order) self.assertEqual(records, instances) transaction.rollback() to_delete, _ = History.search([], order=order) self.assertEqual(to_delete.id, second.id) History.delete([to_delete]) transaction.commit() results = [ (first_stamp, [first]), (second_stamp, [first, second]), (third_stamp, [second, first]), (datetime.datetime.now(), [first]), (datetime.datetime.max, [first]), ] for timestamp, instances in results: with Transaction().set_context(_datetime=timestamp, from_test=True): records = History.search([], order=order) self.assertEqual(records, instances) transaction.rollback() @unittest.skipUnless( backend.name() == 'postgresql', 'CURRENT_TIMESTAMP as transaction_timestamp is specific to postgresql') @with_transaction() def test_ordered_search_same_timestamp(self): 'Test ordered search with same timestamp' pool = Pool() History = pool.get('test.history') transaction = Transaction() order = [('value', 'ASC')] history = History(value=1) history.save() first_stamp = history.create_date history.value = 4 history.save() second_stamp = history.write_date self.assertEqual(first_stamp, second_stamp) transaction.commit() results = [ (second_stamp, [history], [4]), (datetime.datetime.now(), [history], [4]), (datetime.datetime.max, [history], [4]), ] for timestamp, instances, values in results: with Transaction().set_context(_datetime=timestamp, last_test=True): records = History.search([], order=order) self.assertEqual(records, instances) self.assertEqual([x.value for x in records], values) transaction.rollback() @with_transaction() def test_ordered_search_nested(self): "Test ordered search nested" pool = Pool() History = pool.get('test.history') HistoryLine = pool.get('test.history.line') transaction = Transaction() order = [('history.value', 'ASC')] history = History(value=1) history.save() history2 = History(value=2) history2.save() line = HistoryLine(history=history) line.save() line2 = HistoryLine(history=history2) line2.save() first_stamp = line2.create_date transaction.commit() history.value = 3 history.save() second_stamp = history.write_date transaction.commit() results = [ (first_stamp, [line, line2]), (second_stamp, [line2, line]), ] for timestamp, instances in results: with Transaction().set_context(_datetime=timestamp): records = HistoryLine.search([], order=order) self.assertListEqual(records, instances) @with_transaction() def test_browse(self): 'Test browsing history' pool = Pool() History = pool.get('test.history') Line = pool.get('test.history.line') transaction = Transaction() history = History(value=1) history.save() history_id = history.id line_a = Line(name='a', history=history) line_a.save() line_a_id = line_a.id line_b = Line(name='b', history=history) line_b.save() line_b_id = line_b.id first_stamp = line_b.create_date history.stamp = first_stamp history.save() transaction.commit() history = History(history_id) history.value = 2 history.save() Line.delete([Line(line_b_id)]) line_a = Line(line_a_id) line_a.name = 'c' line_a.save() second_stamp = line_a.write_date transaction.commit() history = History(history_id) self.assertEqual(history.value, 2) self.assertEqual([l.name for l in history.lines], ['c']) self.assertEqual(history.stamp, first_stamp) self.assertEqual([l.name for l in history.lines_at_stamp], ['a', 'b']) with Transaction().set_context(_datetime=first_stamp): history = History(history_id) self.assertEqual(history.value, 1) self.assertEqual([l.name for l in history.lines], ['a', 'b']) with Transaction().set_context(_datetime=second_stamp): history = History(history_id) self.assertEqual(history.value, 2) self.assertEqual([l.name for l in history.lines], ['c']) self.assertEqual(history.stamp, first_stamp) self.assertEqual([l.name for l in history.lines_at_stamp], ['a', 'b']) @with_transaction() def test_search_cursor_max(self): 'Test search with number of history entries at database.IN_MAX' pool = Pool() History = pool.get('test.history') transaction = Transaction() database = transaction.database history = History(value=-1) history.save() for history.value in range(database.IN_MAX + 1): history.save() with transaction.set_context(_datetime=datetime.datetime.max): record, = History.search([]) self.assertEqual(record.value, database.IN_MAX) @with_transaction() def test_search_cursor_max_entries(self): 'Test search for skipping first history entries at database.IN_MAX' pool = Pool() History = pool.get('test.history') transaction = Transaction() database = transaction.database for i in range(0, 2): history = History(value=-1) history.save() for history.value in range(database.IN_MAX + 1): history.save() with transaction.set_context(_datetime=datetime.datetime.max): records = History.search([]) self.assertEqual({r.value for r in records}, {database.IN_MAX}) self.assertEqual(len(records), 2) @with_transaction() def test_search_cursor_max_histories(self): 'Test search with number of histories at database.IN_MAX' pool = Pool() History = pool.get('test.history') transaction = Transaction() database = transaction.database n = database.IN_MAX + 1 History.create([{'value': 1}] * n) with transaction.set_context(_datetime=datetime.datetime.max): records = History.search([]) self.assertEqual({r.value for r in records}, {1}) self.assertEqual(len(records), n)
import os import time from trytond import backend # import pdb; pdb.set_trace() try: backend_name = backend.name() except TypeError: backend_name = backend.name if backend_name == 'sqlite': database_name = ':memory:' else: database_name = 'test_' + str(int(time.time())) os.environ.setdefault('DB_NAME', database_name)
class ModelSQLTestCase(unittest.TestCase): 'Test ModelSQL' @classmethod def setUpClass(cls): activate_module('tests') @with_transaction() def test_read(self): "Test simple read" pool = Pool() Model = pool.get('test.modelsql.read') foo, bar = Model.create([{'name': "Foo"}, {'name': "Bar"}]) values = Model.read([foo.id, bar.id], ['name']) self.assertEqual(sorted(values, key=lambda v: v['id']), [{ 'id': foo.id, 'name': "Foo" }, { 'id': bar.id, 'name': "Bar" }]) @with_transaction() def test_read_context_id(self): "Test read with ID in context of field" pool = Pool() Model = pool.get('test.modelsql.read.context_id') record, = Model.create([{'name': "Record"}]) values = Model.read([record.id], ['name']) self.assertEqual(values, [{'id': record.id, 'name': "Record"}]) @with_transaction() def test_read_related_2one(self): "Test read with related Many2One" pool = Pool() Model = pool.get('test.modelsql.read') Target = pool.get('test.modelsql.read.target') target, = Target.create([{'name': "Target"}]) record, = Model.create([{'target': target.id}]) values = Model.read([record.id], ['target.name']) self.assertEqual(values, [{ 'id': record.id, 'target.': { 'id': target.id, 'name': "Target", }, }]) @with_transaction() def test_read_related_2one_empty(self): "Test read with empty related Many2One" pool = Pool() Model = pool.get('test.modelsql.read') record, = Model.create([{'target': None}]) values = Model.read([record.id], ['target.name']) self.assertEqual(values, [{ 'id': record.id, 'target.': None, }]) @with_transaction() def test_read_related_reference(self): "Test read with related Reference" pool = Pool() Model = pool.get('test.modelsql.read') Target = pool.get('test.modelsql.read.target') target, = Target.create([{'name': "Target"}]) record, = Model.create([{'reference': str(target)}]) values = Model.read([record.id], ['reference.name']) self.assertEqual(values, [{ 'id': record.id, 'reference.': { 'id': target.id, 'name': "Target", }, }]) @with_transaction() def test_read_related_reference_empty(self): "Test read with empty related Reference" pool = Pool() Model = pool.get('test.modelsql.read') record, = Model.create([{'name': "Foo", 'reference': None}]) values = Model.read([record.id], ['reference.name']) self.assertEqual(values, [{ 'id': record.id, 'reference.': None, }]) @with_transaction() def test_read_related_2many(self): "Test read with related One2Many" pool = Pool() Model = pool.get('test.modelsql.read') Target = pool.get('test.modelsql.read.target') target, = Target.create([{'name': "Target"}]) record, = Model.create([{'targets': [('add', [target.id])]}]) values = Model.read([record.id], ['targets.name']) self.assertEqual(values, [{ 'id': record.id, 'targets.': [{ 'id': target.id, 'name': "Target", }], }]) @with_transaction() def test_read_related_2many_empty(self): "Test read with empty related One2Many" pool = Pool() Model = pool.get('test.modelsql.read') record, = Model.create([{'targets': None}]) values = Model.read([record.id], ['targets.name']) self.assertEqual(values, [{ 'id': record.id, 'targets.': [], }]) @with_transaction() def test_read_related_2many_multiple(self): "Test read with multiple related One2Many" pool = Pool() Model = pool.get('test.modelsql.read') Target = pool.get('test.modelsql.read.target') target1, target2 = Target.create([{ 'name': "Target 1" }, { 'name': "Target 2" }]) record, = Model.create([{ 'targets': [('add', [target1.id, target2.id])] }]) values = Model.read([record.id], ['targets.name']) self.assertEqual(values, [{ 'id': record.id, 'targets.': [{ 'id': target1.id, 'name': "Target 1", }, { 'id': target2.id, 'name': "Target 2", }], }]) @with_transaction() def test_read_related_mixed(self): "Test read mixed related" pool = Pool() Model = pool.get('test.modelsql.read') Target = pool.get('test.modelsql.read.target') target1, target2, target3 = Target.create([{ 'name': "Target 1" }, { 'name': "Target 2" }, { 'name': "Target 3" }]) record1, record2 = Model.create([{ 'name': "Foo", 'target': target1.id, 'targets': [('add', [target1.id, target2.id])], }, { 'name': "Bar", 'reference': str(target2), 'targets': [('add', [target3.id])], }]) values = Model.read( [record1.id, record2.id], ['name', 'target', 'target.name', 'targets', 'targets.name']) self.assertEqual(sorted(values, key=lambda v: v['id']), [{ 'id': record1.id, 'name': "Foo", 'target': target1.id, 'target.': { 'id': target1.id, 'name': "Target 1", }, 'targets': (target1.id, target2.id), 'targets.': [{ 'id': target1.id, 'name': "Target 1", }, { 'id': target2.id, 'name': "Target 2", }], }, { 'id': record2.id, 'name': "Bar", 'target': None, 'target.': None, 'targets': (target3.id, ), 'targets.': [{ 'id': target3.id, 'name': "Target 3", }], }]) @with_transaction() def test_read_related_nested(self): "Test read with nested related" pool = Pool() Model = pool.get('test.modelsql.read') Target = pool.get('test.modelsql.read.target') target, = Target.create([{'name': "Target"}]) record, = Model.create([{ 'name': "Record", 'targets': [('add', [target.id])] }]) values = Model.read([record.id], ['targets.parent.name']) self.assertEqual(values, [{ 'id': record.id, 'targets.': [{ 'id': target.id, 'parent.': { 'id': record.id, 'name': "Record", }, }], }]) @unittest.skipIf( backend.name() == 'sqlite', 'SQLite not concerned because tryton don\'t set "NOT NULL"' 'constraint: "ALTER TABLE" don\'t support NOT NULL constraint' 'without default value') @with_transaction() def test_required_field_missing(self): 'Test error message when a required field is missing' pool = Pool() Modelsql = pool.get('test.modelsql') transaction = Transaction() fields = { 'desc': '', 'integer': 0, } for key, value in fields.items(): try: Modelsql.create([{key: value}]) except RequiredValidationError as err: # message must not quote key msg = "'%s' not missing but quoted in error: '%s'" % ( key, err.message) self.assertTrue(key not in err.message, msg) else: self.fail('RequiredValidationError should be caught') transaction.rollback() @with_transaction() def test_check_timestamp(self): 'Test check timestamp' pool = Pool() ModelsqlTimestamp = pool.get('test.modelsql.timestamp') transaction = Transaction() # transaction must be committed between each changes otherwise NOW() # returns always the same timestamp. record, = ModelsqlTimestamp.create([{}]) transaction.commit() timestamp = ModelsqlTimestamp.read([record.id], ['_timestamp'])[0]['_timestamp'] if backend.name() == 'sqlite': # timestamp precision of sqlite is the second time.sleep(1) ModelsqlTimestamp.write([record], {}) transaction.commit() transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, ModelsqlTimestamp.write, [record], {}) transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, ModelsqlTimestamp.delete, [record]) transaction.timestamp.pop(str(record), None) ModelsqlTimestamp.write([record], {}) transaction.commit() ModelsqlTimestamp.delete([record]) transaction.commit() @with_transaction() def test_create_field_set(self): 'Test field.set in create' pool = Pool() Model = pool.get('test.modelsql.field_set') with patch.object(Model, 'set_field') as setter: records = Model.create([{'field': 1}]) setter.assert_called_with(records, 'field', 1) # Different values are not grouped with patch.object(Model, 'set_field') as setter: records = Model.create([{'field': 1}, {'field': 2}]) setter.assert_has_calls([ call([records[0]], 'field', 1), call([records[1]], 'field', 2), ]) # Same values are grouped in one call with patch.object(Model, 'set_field') as setter: records = Model.create([{'field': 1}, {'field': 1}]) setter.assert_called_with(records, 'field', 1) # Mixed values are grouped per value with patch.object(Model, 'set_field') as setter: records = Model.create([{'field': 1}, {'field': 2}, {'field': 1}]) setter.assert_has_calls([ call([records[0], records[2]], 'field', 1), call([records[1]], 'field', 2), ]) @with_transaction() def test_integrity_error_with_created_record(self): "Test integrity error with created record" pool = Pool() ParentModel = pool.get('test.modelsql.one2many') TargetModel = pool.get('test.modelsql.one2many.target') # Create target record without required name # to ensure create_records is filled to prevent raising # foreign_model_missing record = ParentModel(name="test") record.targets = [TargetModel()] with self.assertRaises(RequiredValidationError) as cm: record.save() err = cm.exception self.assertIn(TargetModel.name.string, err.message) self.assertIn(TargetModel.__doc__, err.message) @with_transaction() def test_null_ordering(self): 'Test NULL ordering' pool = Pool() NullOrder = pool.get('test.modelsql.null_order') NullOrder.create([{ 'integer': 1, }, { 'integer': 3, }, { 'integer': None, }]) integers = NullOrder.search([], order=[('integer', 'ASC NULLS FIRST')]) self.assertListEqual([i.integer for i in integers], [None, 1, 3]) integers = NullOrder.search([], order=[('integer', 'DESC NULLS FIRST')]) self.assertListEqual([i.integer for i in integers], [None, 3, 1]) integers = NullOrder.search([], order=[('integer', 'ASC NULLS LAST')]) self.assertListEqual([i.integer for i in integers], [1, 3, None]) integers = NullOrder.search([], order=[('integer', 'DESC NULLS LAST')]) self.assertListEqual([i.integer for i in integers], [3, 1, None]) @with_transaction() def test_delete_translations(self): "Test delete record trigger delete of translations" pool = Pool() Model = pool.get('test.modelsql.translation') Translation = pool.get('ir.translation') record, = Model.create([{'name': "Translation"}]) with patch.object(Translation, 'delete_ids') as delete_ids: Model.delete([record]) delete_ids.assert_called_with('test.modelsql.translation', 'model', [record.id]) @with_transaction() def test_constraint_check(self): "Test check constraint" pool = Pool() Model = pool.get('test.modelsql.check') record, = Model.create([{'value': 50}]) self.assertTrue(record.id) @with_transaction() def test_constraint_check_null(self): "Test check constraint with null" pool = Pool() Model = pool.get('test.modelsql.check') record, = Model.create([{'value': None}]) self.assertTrue(record.id) @with_transaction() def test_constraint_check_invalid(self): "Test invalid check constraint" pool = Pool() Model = pool.get('test.modelsql.check') with self.assertRaises(SQLConstraintError): Model.create([{'value': 10}]) @with_transaction() def test_constraint_unique(self): "Test unique constraint" pool = Pool() Model = pool.get('test.modelsql.unique') records = Model.create([{'value': 1}, {'value': 2}]) self.assertEqual(len(records), 2) @with_transaction() def test_constraint_unique_null(self): "Test unique constraint with null" pool = Pool() Model = pool.get('test.modelsql.unique') records = Model.create([{'value': None}, {'value': None}]) self.assertEqual(len(records), 2) @with_transaction() def test_constraint_unique_invalid(self): "Test invalid unique constraint" pool = Pool() Model = pool.get('test.modelsql.unique') with self.assertRaises(SQLConstraintError): Model.create([{'value': 42}, {'value': 42}]) @with_transaction() def test_constraint_exclude(self): "Test exclude constraint" pool = Pool() Model = pool.get('test.modelsql.exclude') records = Model.create([{'value': 1}, {'value': 2}]) self.assertEqual(len(records), 2) @with_transaction() def test_constraint_exclude_exclusion(self): "Test exclude constraint exclusion" pool = Pool() Model = pool.get('test.modelsql.exclude') records = Model.create([{'value': -1}, {'value': -1}]) self.assertEqual(len(records), 2) @with_transaction() def test_constraint_exclude_invalid(self): "Test invalid exclude constraint" pool = Pool() Model = pool.get('test.modelsql.exclude') with self.assertRaises(SQLConstraintError): Model.create([{'value': 42}, {'value': 42}]) @unittest.skipIf(backend.name() == 'sqlite', 'SQLite does not have lock at table level but on file') @with_transaction() def test_lock(self): "Test lock" pool = Pool() Model = pool.get('test.modelsql.lock') DatabaseOperationalError = backend.get('DatabaseOperationalError') transaction = Transaction() record_id = Model.create([{}])[0].id transaction.commit() with transaction.new_transaction(): record = Model(record_id) record.lock() with transaction.new_transaction(): record = Model(record_id) with self.assertRaises(DatabaseOperationalError): record.lock()
class FieldDictTestCase(unittest.TestCase): "Test Field Dict" @classmethod def setUpClass(cls): activate_module('tests') def create_schema(self): DictSchema = Pool().get('test.dict.schema') DictSchema.create([{ 'name': 'a', 'string': 'A', 'type_': 'integer', }, { 'name': 'b', 'string': 'B', 'type_': 'integer', }, { 'name': 'type', 'string': 'Type', 'type_': 'selection', 'selection': ('arabic: Arabic\n' 'hexa: Hexadecimal'), }]) def set_jsonb(self, table): cursor = Transaction().connection.cursor() cursor.execute('ALTER TABLE "%s" ' 'ALTER COLUMN dico TYPE json USING dico::json' % table) @with_transaction() def test_create(self): "Test create dict" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1, 'b': 2 }, }]) self.assertDictEqual(dict_.dico, {'a': 1, 'b': 2}) @with_transaction() def test_create_without_schema(self): "Test create dict without schema" Dict = Pool().get('test.dict') dict_, = Dict.create([{ 'dico': { 'z': 26 }, }]) self.assertDictEqual(dict_.dico, {'z': 26}) @with_transaction() def test_create_without_default(self): "Test create dict without default" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{}]) self.assertEqual(dict_.dico, None) @with_transaction() def test_create_with_default(self): "Test create dict without default" Dict = Pool().get('test.dict_default') self.create_schema() dict_, = Dict.create([{}]) self.assertDictEqual(dict_.dico, {'a': 1}) @with_transaction() def test_create_required_with_value(self): "Test create dict required with value" Dict = Pool().get('test.dict_required') self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1 }, }]) self.assertDictEqual(dict_.dico, {'a': 1}) @with_transaction() def test_create_required_without_value(self): "Test create dict required without value" Dict = Pool().get('test.dict_required') self.create_schema() with self.assertRaises(UserError): Dict.create([{}]) @with_transaction() def test_create_required_with_empty(self): "Test create dict required without value" Dict = Pool().get('test.dict_required') self.create_schema() with self.assertRaises(UserError): Dict.create([{ 'dico': {}, }]) @with_transaction() def test_create_selection(self): "Test create dict with selection" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{ 'dico': { 'type': 'arabic' }, }]) self.assertDictEqual(dict_.dico, {'type': 'arabic'}) @with_transaction() @unittest.skipIf(backend.name() != 'postgresql', 'jsonb only suported by postgresql') def test_create_jsonb(self): "Test create dict as jsonb" connection = Transaction().connection Database = backend.get('Database') if Database().get_version(connection) < (9, 2): return Dict = Pool().get('test.dict_jsonb') self.set_jsonb(Dict._table) dict_, = Dict.create([{ 'dico': { 'a': 1, 'b': 2 }, }]) self.assertDictEqual(dict_.dico, {'a': 1, 'b': 2}) @with_transaction() def test_write(self): "Test write dict" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1, 'b': 2 }, }]) Dict.write([dict_], { 'dico': { 'a': 2 }, }) self.assertDictEqual(dict_.dico, {'a': 2}) @with_transaction() def test_write_wthout_schema(self): "Test write dict without schema" Dict = Pool().get('test.dict') dict_, = Dict.create([{ 'dico': { 'z': 26 }, }]) Dict.write([dict_], { 'dico': { 'y': 1 }, }) self.assertDictEqual(dict_.dico, {'y': 1}) @with_transaction() @unittest.skipIf(backend.name() != 'postgresql', 'jsonb only suported by postgresql') def test_write_jsonb(self): "Test write dict as jsonb" connection = Transaction().connection Database = backend.get('Database') if Database().get_version(connection) < (9, 2): return Dict = Pool().get('test.dict_jsonb') self.set_jsonb(Dict._table) dict_, = Dict.create([{ 'dico': { 'a': 1, 'b': 2 }, }]) Dict.write([dict_], {'dico': {'z': 26}}) self.assertDictEqual(dict_.dico, {'z': 26}) @with_transaction() def test_string(self): "Test string dict" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1, 'type': 'arabic' }, }]) self.assertDictEqual(dict_.dico_string, {'a': 1, 'type': "Arabic"}) @with_transaction() def test_string_keys(self): "Test string keys dict" Dict = Pool().get('test.dict') self.create_schema() dict_, = Dict.create([{ 'dico': { 'a': 1, 'type': 'arabic' }, }]) self.assertDictEqual(dict_.dico_string_keys, { 'a': 'A', 'type': "Type" })
class TestEmailQueue(unittest.TestCase): ''' Test Email Queue ''' def setUp(self): """ Set up data used in the tests. this method is called before each test function execution. """ trytond.tests.test_tryton.install_module('email_queue') @patch("smtplib.SMTP") def test_0010_send_mails(self, mock_smtp): """ Tests send_mails functionality. """ EmailQueue = POOL.get('email.queue') with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: # Put some emails in queue f = Faker() for item in xrange(10): EmailQueue.queue_mail(f.email(), f.email(), f.text()) transaction.cursor.commit() self.assertEqual(EmailQueue.search([], count=True), 10) self.assertEqual( EmailQueue.search([('state', '=', 'outbox')], count=True), 10 ) with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: # Run cron method to send mails EmailQueue.send_all() with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: self.assertEqual( EmailQueue.search([('state', '=', 'sent')], count=True), 10 ) @patch("smtplib.SMTP") @clear_email_queue def test_0015_max_attempts(self, mock_smtp): """ After five attempts email state changes to failed. """ EmailQueue = POOL.get('email.queue') # Mock sendmail to raise exception mock_smtp.return_value.sendmail.side_effect = BadSMTPServerException() with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: # Put some emails in queue f = Faker() for item in xrange(10): EmailQueue.queue_mail(f.email(), f.email(), f.text()) transaction.cursor.commit() self.assertEqual( EmailQueue.search([('state', '=', 'outbox')], count=True), 10 ) # Try sending the emails. The first 3 attempts will result in # failures and the email should then be in failed state. for i in xrange(3): with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: # Run cron method to send mails with self.assertRaises(BadSMTPServerException): EmailQueue.send_all() with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: self.assertEqual( EmailQueue.search([('state', '=', 'failed')], count=True), 1 ) self.assertEqual( EmailQueue.search([('state', '=', 'outbox')], count=True), 9 ) # Lets make the smtp server work again ;) now the remaining emails # should be sent well while the failed one remains failed mock_smtp.return_value.sendmail.side_effect = None with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: EmailQueue.send_all() with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: self.assertEqual( EmailQueue.search([('state', '=', 'failed')], count=True), 1 ) self.assertEqual( EmailQueue.search([('state', '=', 'outbox')], count=True), 0 ) self.assertEqual( EmailQueue.search([('state', '=', 'sent')], count=True), 9 ) @unittest.skipIf( backend.name() == 'sqlite', "Skip txn safety test on SQlite" ) @clear_email_queue def test_9999_transaction_safety(self): """ Test the transaction safety of email sender. * This test is expected to work only on postgres * This should be the last test since this breaks the rule to commit within the test creating records """ EmailQueue = POOL.get('email.queue') with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: # Put some emails in queue f = Faker() for item in xrange(10): EmailQueue.queue_mail(f.email(), f.email(), f.text()) transaction.cursor.commit() # A queue is used to handle the ones which errored. searialization_error_q = Queue.Queue(3) # A fake smtp server which just sleeps for 5 seconds when sendmail # is called. smtp_server = stub(sendmail=lambda *args: time.sleep(5)) def threaded_send_email(email, smtp_server): """ A new threaded email sender. This is required because there is no transaction in the new thread that is spawned and sendemail tries to create a new cursor from an existing transaction. So create the new transaction here, refresh the active record objects and call sendmail like the cron would have """ with Transaction().start(DB_NAME, USER, context=CONTEXT): # email active record is from old transaction, so referesh it. email = EmailQueue(email.id) database = backend.get('database') try: # Now send the email email.send(smtp_server) except database.DatabaseOperationalError: # This specific email could not be sent because of a # transaction serialization error searialization_error_q.put(email.id) with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: email1, email2 = EmailQueue.search( [('state', '=', 'outbox')], limit=2 ) t1 = threading.Thread( target=threaded_send_email, args=(email1, smtp_server) ) t2 = threading.Thread( target=threaded_send_email, args=(email2, smtp_server) ) # create another thread with **email1** again. This is expected to # fail, though even t1 might fail and this would succeed. Either # way we dont care because we only make sure that there is 1 # failure and that both email1 and 2 are sent. t3 = threading.Thread( target=threaded_send_email, args=(email1, smtp_server) ) # start all the threads. Since there is a time.sleep of 5 seconds # in the sendmail call, it simulates a case of delayed execution. # thread3 is guaranteed to start within 5 seconds of thread1 and # the error that is asserted also specifically looks for a # concurrency triggered transaction serialisation exception. t1.start() t2.start() t3.start() # Blockingly wait till the threads complete t1.join() t2.join() t3.join() # 1: Assert that the email1's ID is in the serialization_error_q self.assertEqual(searialization_error_q.qsize(), 1) # 1B: Ensure that the ID is of email1 which was the one sent twice self.assertEqual(searialization_error_q.get(), email1.id) # 2: Assert that both email 1 and 2 have the sent state with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: self.assertEqual(EmailQueue(email1.id).state, 'sent') self.assertEqual(EmailQueue(email2.id).state, 'sent') # 3: Assert that there are 8 emails left in outbox with Transaction().start(DB_NAME, USER, CONTEXT) as transaction: self.assertEqual( EmailQueue.search([('state', '=', 'outbox')], count=True), 8 )
help="specify config file") parser.add_argument("-f", "--failfast", action="store_true", dest="failfast", help="Stop the test run on the first error or failure") parser.add_argument("-m", "--modules", action="store_true", dest="modules", default=False, help="Run also modules tests") parser.add_argument("--no-doctest", action="store_false", dest="doctest", default=True, help="Don't run doctest") parser.add_argument("-v", action="count", default=0, dest="verbosity", help="Increase verbosity") parser.add_argument('tests', metavar='test', nargs='*') parser.epilog = ('The database name can be specified in the DB_NAME ' 'environment variable.') opt = parser.parse_args() config.update_etc(opt.config) if backend.name() == 'sqlite': database_name = ':memory:' else: database_name = 'test_' + str(int(time.time())) os.environ.setdefault('DB_NAME', database_name) from trytond.tests.test_tryton import all_suite, modules_suite if not opt.modules: suite = all_suite(opt.tests) else: suite = modules_suite(opt.tests, doc=opt.doctest) result = unittest.TextTestRunner( verbosity=opt.verbosity, failfast=opt.failfast).run(suite) sys.exit(not result.wasSuccessful())
class ModelSQLTestCase(unittest.TestCase): 'Test ModelSQL' @classmethod def setUpClass(cls): activate_module('tests') @unittest.skipIf( backend.name() == 'sqlite', 'SQLite not concerned because tryton don\'t set "NOT NULL"' 'constraint: "ALTER TABLE" don\'t support NOT NULL constraint' 'without default value') @with_transaction() def test_required_field_missing(self): 'Test error message when a required field is missing' pool = Pool() Modelsql = pool.get('test.modelsql') transaction = Transaction() fields = { 'desc': '', 'integer': 0, } for key, value in fields.items(): try: Modelsql.create([{key: value}]) except UserError as err: # message must not quote key msg = "'%s' not missing but quoted in error: '%s'" % ( key, err.message) self.assertTrue(key not in err.message, msg) else: self.fail('UserError should be caught') transaction.rollback() @with_transaction() def test_check_timestamp(self): 'Test check timestamp' pool = Pool() ModelsqlTimestamp = pool.get('test.modelsql.timestamp') transaction = Transaction() # transaction must be committed between each changes otherwise NOW() # returns always the same timestamp. record, = ModelsqlTimestamp.create([{}]) transaction.commit() timestamp = ModelsqlTimestamp.read([record.id], ['_timestamp'])[0]['_timestamp'] if backend.name() == 'sqlite': # timestamp precision of sqlite is the second time.sleep(1) transaction.timestamp[str(record)] = timestamp ModelsqlTimestamp.write([record], {}) transaction.commit() transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, ModelsqlTimestamp.write, [record], {}) transaction.timestamp[str(record)] = timestamp self.assertRaises(ConcurrencyException, ModelsqlTimestamp.delete, [record]) transaction.timestamp[str(record)] = None ModelsqlTimestamp.write([record], {}) transaction.commit() transaction.timestamp.pop(str(record), None) ModelsqlTimestamp.write([record], {}) transaction.commit() ModelsqlTimestamp.delete([record]) transaction.commit() @with_transaction() def test_create_field_set(self): 'Test field.set in create' pool = Pool() Model = pool.get('test.modelsql.field_set') with patch.object(Model, 'set_field') as setter: records = Model.create([{'field': 1}]) setter.assert_called_with(records, 'field', 1) # Different values are not grouped with patch.object(Model, 'set_field') as setter: records = Model.create([{'field': 1}, {'field': 2}]) setter.assert_has_calls([ call([records[0]], 'field', 1), call([records[1]], 'field', 2), ]) # Same values are grouped in one call with patch.object(Model, 'set_field') as setter: records = Model.create([{'field': 1}, {'field': 1}]) setter.assert_called_with(records, 'field', 1) # Mixed values are grouped per value with patch.object(Model, 'set_field') as setter: records = Model.create([{'field': 1}, {'field': 2}, {'field': 1}]) setter.assert_has_calls([ call([records[0], records[2]], 'field', 1), call([records[1]], 'field', 2), ]) @with_transaction() def test_integrity_error_with_created_record(self): "Test integrity error with created record" pool = Pool() ParentModel = pool.get('test.modelsql.one2many') TargetModel = pool.get('test.modelsql.one2many.target') # Create target record without required name # to ensure create_records is filled to prevent raising # foreign_model_missing record = ParentModel(name="test") record.targets = [TargetModel()] with self.assertRaises(UserError) as cm: record.save() err = cm.exception msg = 'The field "%s" on "%s" is required.' % (TargetModel.name.string, TargetModel.__doc__) self.assertEqual(err.message, msg) @with_transaction() def test_null_ordering(self): 'Test NULL ordering' pool = Pool() NullOrder = pool.get('test.modelsql.null_order') NullOrder.create([{ 'integer': 1, }, { 'integer': 3, }, { 'integer': None, }]) integers = NullOrder.search([], order=[('integer', 'ASC NULLS FIRST')]) self.assertListEqual([i.integer for i in integers], [None, 1, 3]) integers = NullOrder.search([], order=[('integer', 'DESC NULLS FIRST')]) self.assertListEqual([i.integer for i in integers], [None, 3, 1]) integers = NullOrder.search([], order=[('integer', 'ASC NULLS LAST')]) self.assertListEqual([i.integer for i in integers], [1, 3, None]) integers = NullOrder.search([], order=[('integer', 'DESC NULLS LAST')]) self.assertListEqual([i.integer for i in integers], [3, 1, None]) @unittest.skip("Translation is disabled") @with_transaction() def test_delete_translations(self): "Test delete record trigger delete of translations" pool = Pool() Model = pool.get('test.modelsql.translation') Translation = pool.get('ir.translation') record, = Model.create([{'name': "Translation"}]) with patch.object(Translation, 'delete_ids') as delete_ids: Model.delete([record]) delete_ids.assert_called_with('test.modelsql.translation', 'model', [record.id]) @with_transaction() def test_constraint_check(self): "Test check constraint" pool = Pool() Model = pool.get('test.modelsql.check') record, = Model.create([{'value': 50}]) self.assertTrue(record.id) @with_transaction() def test_constraint_check_null(self): "Test check constraint with null" pool = Pool() Model = pool.get('test.modelsql.check') record, = Model.create([{'value': None}]) self.assertTrue(record.id) @with_transaction() def test_constraint_check_invalid(self): "Test invalid check constraint" pool = Pool() Model = pool.get('test.modelsql.check') with self.assertRaises(UserError): Model.create([{'value': 10}]) @with_transaction() def test_constraint_unique(self): "Test unique constraint" pool = Pool() Model = pool.get('test.modelsql.unique') records = Model.create([{'value': 1}, {'value': 2}]) self.assertEqual(len(records), 2) @with_transaction() def test_constraint_unique_null(self): "Test unique constraint with null" pool = Pool() Model = pool.get('test.modelsql.unique') records = Model.create([{'value': None}, {'value': None}]) self.assertEqual(len(records), 2) @with_transaction() def test_constraint_unique_invalid(self): "Test invalid unique constraint" pool = Pool() Model = pool.get('test.modelsql.unique') with self.assertRaises(UserError): Model.create([{'value': 42}, {'value': 42}]) @with_transaction() def test_constraint_exclude(self): "Test exclude constraint" pool = Pool() Model = pool.get('test.modelsql.exclude') records = Model.create([{'value': 1}, {'value': 2}]) self.assertEqual(len(records), 2) @with_transaction() def test_constraint_exclude_exclusion(self): "Test exclude constraint exclusion" pool = Pool() Model = pool.get('test.modelsql.exclude') records = Model.create([{'value': 1, 'condition': False}] * 2) self.assertEqual(len(records), 2) @with_transaction() def test_constraint_exclude_exclusion_mixed(self): "Test exclude constraint exclusion mixed" pool = Pool() Model = pool.get('test.modelsql.exclude') records = Model.create([ { 'value': 1, 'condition': False }, { 'value': 1, 'condition': True }, ]) self.assertEqual(len(records), 2) @with_transaction() def test_constraint_exclude_invalid(self): "Test invalid exclude constraint" pool = Pool() Model = pool.get('test.modelsql.exclude') with self.assertRaises(UserError): Model.create([{'value': 42}, {'value': 42}])
class BusTestCase(unittest.TestCase): "Test Bus" @classmethod def setUpClass(cls): activate_module('ir') super().setUpClass() def setUp(self): super().setUp() reset_polling_timeout = bus._long_polling_timeout bus._long_polling_timeout = 1 self.addCleanup(setattr, bus, '_long_polling_timeout', reset_polling_timeout) reset_select_timeout = bus._select_timeout bus._select_timeout = 1 self.addCleanup(setattr, bus, '_select_timeout', reset_select_timeout) def tearDown(self): if DB_NAME in Bus._queues: with Bus._queues_lock: Bus._queues[DB_NAME]['timeout'] = 0 listener = Bus._queues[DB_NAME]['listener'] listener.join() Bus._messages.clear() @with_transaction() def test_notify(self): "Test notify" notify("Test", "Message", user=1) @unittest.skipIf(backend.name() == 'sqlite', 'SQLite has not channel') def test_subscribe_nothing(self): "Test subscribe with nothing" response = Bus.subscribe(DB_NAME, ['user:1']) self.assertEqual(response, {'message': None, 'channel': None}) @unittest.skipIf(backend.name() == 'sqlite', 'SQLite has not channel') def test_subscribe_message(self): "Test subscribe with message" Bus.subscribe(DB_NAME, ['user:1']) transaction = Transaction() with transaction.start(DB_NAME, 1): notify("Test", "Message", user=1) transaction.commit() # Let the listen thread registers the message time.sleep(1) response = Bus.subscribe(DB_NAME, ['user:1']) self.assertTrue(response['message'].pop('message_id')) self.assertEqual( response, { 'message': { 'type': 'notification', 'title': "Test", 'body': "Message", 'priority': 1, }, 'channel': 'user:1', })