def test_traces_async_memory_optimisation(self): """ Identical frames should be saved only once. We should only have a few entries on a 1 second sleep. """ with Profiler(collectors=['traces_async'], db=None) as res: time.sleep(1) entry_count = len(res.collectors[0].entries) self.assertLess(entry_count, 5) # ~3
def test_execution_context_save(self): with Profiler(db=None, collectors=['sql']) as p: for letter in ('a', 'b'): stack_level = profiler.stack_size() with ExecutionContext(letter=letter): self.env.cr.execute('SELECT 1') stack_level = profiler.stack_size() entries = p.collectors[0].entries self.assertEqual(entries[0]['exec_context'][stack_level], {'letter': 'a'}) self.assertEqual(entries[1]['exec_context'][stack_level], {'letter': 'b'})
def test_sync_recorder(self): def a(): b() c() def b(): pass def c(): d() d() def d(): pass with Profiler(description='test', collectors=['traces_sync'], db=None) as p: a() stacks = [r['stack'] for r in p.collectors[0].entries] # map stack frames to their function name, and check stacks_methods = [[frame[2] for frame in stack] for stack in stacks] self.assertEqual( stacks_methods, [ ['a'], ['a', 'b'], ['a'], ['a', 'c'], ['a', 'c', 'd'], ['a', 'c'], ['a', 'c', 'd'], ['a', 'c'], ['a'], [], ['__exit__'], [ '__exit__', 'stop' ] # could be removed by cleaning two last frames, or removing last frames only contained in profiler.py ]) # map stack frames to their line number, and check stacks_lines = [[frame[1] for frame in stack] for stack in stacks] self.assertEqual( stacks_lines[1][0] + 1, stacks_lines[3][0], "Call of b() in a() should be one line before call of c()")
def test_execution_context_nested(self): """ This test checks that an execution can be nested at the same level of the stack. """ with Profiler(db=None, collectors=['sql']) as p: stack_level = profiler.stack_size() with ExecutionContext(letter='a'): self.env.cr.execute('SELECT 1') with ExecutionContext(letter='b'): self.env.cr.execute('SELECT 1') with ExecutionContext(letter='c'): self.env.cr.execute('SELECT 1') self.env.cr.execute('SELECT 1') entries = p.collectors[0].entries self.assertEqual(entries.pop(0)['exec_context'], ((stack_level, {'letter': 'a'}),)) self.assertEqual(entries.pop(0)['exec_context'], ((stack_level, {'letter': 'a'}), (stack_level, {'letter': 'b'}))) self.assertEqual(entries.pop(0)['exec_context'], ((stack_level, {'letter': 'a'}), (stack_level, {'letter': 'c'}))) self.assertEqual(entries.pop(0)['exec_context'], ((stack_level, {'letter': 'a'}),))
def test_frequencies_1ms_sleep(self): """ Check the number of entries generated in 1s at 1kHz we need to artificially change the frame as often as possible to avoid triggering the memory optimisation skipping identical frames """ def sleep_1(): time.sleep(0.0001) def sleep_2(): time.sleep(0.0001) with Profiler(collectors=['traces_async'], db=None) as res: start = time.time() while start + 1 > time.time(): sleep_1() sleep_2() entry_count = len(res.collectors[0].entries) self.assertGreater(entry_count, 700) # ~920
def test_default_recorders(self): with Profiler(db=None) as p: queries_start = self.env.cr.sql_log_count for i in range(10): self.env['res.partner'].create({'name': 'snail%s' % i}) self.env['res.partner'].flush() total_queries = self.env.cr.sql_log_count - queries_start rq = next(r for r in p.collectors if r.name == "sql").entries self.assertEqual(p.init_stack_trace[-1][2], 'test_default_recorders') self.assertEqual(p.init_stack_trace[-1][0].split('/')[-1], 'test_profiler.py') self.assertEqual(len(rq), total_queries) first_query = rq[0] self.assertEqual(first_query['stack'][0][2], 'create') #self.assertIn("self.env['res.partner'].create({", first_query['stack'][0][3]) self.assertGreater(first_query['time'], 0) self.assertEqual(first_query['stack'][-1][2], 'execute') self.assertEqual(first_query['stack'][-1][0].split('/')[-1], 'sql_db.py')
def test_qweb_recorder(self): template = self.env['ir.ui.view'].create({ 'name': 'test', 'type': 'qweb', 'key': 'root', 'arch_db': '''<t t-name="root"> <t t-foreach="{'a': 3, 'b': 2, 'c': 1}" t-as="item"> [<t t-out="item_index"/>: <t t-set="record" t-value="item"/><t t-call="base.dummy"/> <t t-out="item_value"/>] <b t-out="add_one_query()"/></t> </t>''' }) child_template = self.env['ir.ui.view'].create({ 'name': 'test', 'type': 'qweb', 'key': 'dummy', 'arch_db': '<t t-name="dummy"><span t-attf-class="myclass"><t t-out="record"/> <t t-out="add_one_query()"/></span></t>' }) self.env.cr.execute( "INSERT INTO ir_model_data(name, model, res_id, module)" "VALUES ('dummy', 'ir.ui.view', %s, 'base')", [child_template.id]) values = { 'add_one_query': lambda: self.env.cr.execute('SELECT id FROM ir_ui_view LIMIT 1') or 'query' } result = u""" [0: <span class="myclass">a query</span> 3] <b>query</b> [1: <span class="myclass">b query</span> 2] <b>query</b> [2: <span class="myclass">c query</span> 1] <b>query</b> """ # test rendering without profiling rendered = self.env['ir.qweb']._render(template.id, values) self.assertEqual(rendered.strip(), result.strip(), 'Without profiling') # This rendering is used to cache the compiled template method so as # not to have a number of requests that vary according to the modules # installed. with Profiler(description='test', collectors=['qweb'], db=None): self.env['ir.qweb']._render(template.id, values) with Profiler(description='test', collectors=['qweb'], db=None) as p: rendered = self.env['ir.qweb']._render(template.id, values) # check if qweb is ok self.assertEqual(rendered.strip(), result.strip()) # check if the arch of all used templates is includes in the result self.assertEqual( p.collectors[0].entries[0]['results']['archs'], { template.id: template.arch_db, child_template.id: child_template.arch_db, }) # check all directives without duration information for data in p.collectors[0].entries[0]['results']['data']: data.pop('delay') data = p.collectors[0].entries[0]['results']['data'] expected = [ # pylint: disable=bad-whitespace # first template and first directive { 'view_id': template.id, 'xpath': '/t/t', 'directive': """t-foreach="{'a': 3, 'b': 2, 'c': 1}" t-as='item'""", 'query': 0 }, # first pass in the loop { 'view_id': template.id, 'xpath': '/t/t/t[1]', 'directive': "t-out='item_index'", 'query': 0 }, { 'view_id': template.id, 'xpath': '/t/t/t[2]', 'directive': "t-set='record' t-value='item'", 'query': 0 }, { 'view_id': template.id, 'xpath': '/t/t/t[3]', 'directive': "t-call='base.dummy'", 'query': 0 }, # 0 because the template is in ir.ui.view cache # first pass in the loop: content of the child template { 'view_id': child_template.id, 'xpath': '/t/span', 'directive': "t-attf-class='myclass'", 'query': 0 }, { 'view_id': child_template.id, 'xpath': '/t/span/t[1]', 'directive': "t-out='record'", 'query': 0 }, { 'view_id': child_template.id, 'xpath': '/t/span/t[2]', 'directive': "t-out='add_one_query()'", 'query': 1 }, { 'view_id': template.id, 'xpath': '/t/t/t[4]', 'directive': "t-out='item_value'", 'query': 0 }, { 'view_id': template.id, 'xpath': '/t/t/b', 'directive': "t-out='add_one_query()'", 'query': 1 }, # second pass in the loop { 'view_id': template.id, 'xpath': '/t/t/t[1]', 'directive': "t-out='item_index'", 'query': 0 }, { 'view_id': template.id, 'xpath': '/t/t/t[2]', 'directive': "t-set='record' t-value='item'", 'query': 0 }, { 'view_id': template.id, 'xpath': '/t/t/t[3]', 'directive': "t-call='base.dummy'", 'query': 0 }, { 'view_id': child_template.id, 'xpath': '/t/span', 'directive': "t-attf-class='myclass'", 'query': 0 }, { 'view_id': child_template.id, 'xpath': '/t/span/t[1]', 'directive': "t-out='record'", 'query': 0 }, { 'view_id': child_template.id, 'xpath': '/t/span/t[2]', 'directive': "t-out='add_one_query()'", 'query': 1 }, { 'view_id': template.id, 'xpath': '/t/t/t[4]', 'directive': "t-out='item_value'", 'query': 0 }, { 'view_id': template.id, 'xpath': '/t/t/b', 'directive': "t-out='add_one_query()'", 'query': 1 }, # third pass in the loop { 'view_id': template.id, 'xpath': '/t/t/t[1]', 'directive': "t-out='item_index'", 'query': 0 }, { 'view_id': template.id, 'xpath': '/t/t/t[2]', 'directive': "t-set='record' t-value='item'", 'query': 0 }, { 'view_id': template.id, 'xpath': '/t/t/t[3]', 'directive': "t-call='base.dummy'", 'query': 0 }, { 'view_id': child_template.id, 'xpath': '/t/span', 'directive': "t-attf-class='myclass'", 'query': 0 }, { 'view_id': child_template.id, 'xpath': '/t/span/t[1]', 'directive': "t-out='record'", 'query': 0 }, { 'view_id': child_template.id, 'xpath': '/t/span/t[2]', 'directive': "t-out='add_one_query()'", 'query': 1 }, { 'view_id': template.id, 'xpath': '/t/t/t[4]', 'directive': "t-out='item_value'", 'query': 0 }, { 'view_id': template.id, 'xpath': '/t/t/b', 'directive': "t-out='add_one_query()'", 'query': 1 }, ] self.assertEqual(data, expected)
def test_env_profiler_description(self): with Profiler(collectors=[], db=None) as p: self.assertIn('test_env_profiler_description', p.description)
def test_env_profiler_database(self): p = Profiler(collectors=[]) self.assertEqual(p.db, self.env.cr.dbname)
def test_default_values(self): p = Profiler() self.assertEqual(p.db, self.env.cr.dbname)