def test_60_prefetch_model(self): """ Check the prefetching model. """ partners = self.env['res.partner'].search( [('id', 'in', self.partners.ids)], limit=models.PREFETCH_MAX) self.assertTrue(partners) def same_prefetch(a, b): self.assertEqual(set(a._prefetch_ids), set(b._prefetch_ids)) def diff_prefetch(a, b): self.assertNotEqual(set(a._prefetch_ids), set(b._prefetch_ids)) # the recordset operations below use different prefetch sets diff_prefetch(partners, partners.browse()) diff_prefetch(partners, partners[0]) diff_prefetch(partners, partners[:5]) # the recordset operations below share the prefetch set same_prefetch(partners, partners.browse(partners.ids)) same_prefetch(partners, partners.with_user(self.user_demo)) same_prefetch(partners, partners.with_context(active_test=False)) same_prefetch(partners, partners[:10].with_prefetch(partners._prefetch_ids)) # iteration and relational fields should use the same prefetch set self.assertEqual(type(partners).country_id.type, 'many2one') self.assertEqual(type(partners).bank_ids.type, 'one2many') self.assertEqual(type(partners).category_id.type, 'many2many') vals0 = { 'name': 'Empty relational fields', 'country_id': False, 'bank_ids': [], 'category_id': [], } vals1 = { 'name': 'Non-empty relational fields', 'country_id': self.ref('base.be'), 'bank_ids': [Command.create({'acc_number': 'FOO42'})], 'category_id': [Command.link(self.partner_category.id)], } partners = partners.create(vals0) + partners.create(vals1) for partner in partners: same_prefetch(partner, partners) same_prefetch(partner.country_id, partners.country_id) same_prefetch(partner.bank_ids, partners.bank_ids) same_prefetch(partner.category_id, partners.category_id)
def test_channel_unsubscribe_auto(self): """ Archiving / deleting a user should automatically unsubscribe related partner from private channels """ test_user = self.env['res.users'].create({ "login": "******", "name": "Jonas", }) test_partner = test_user.partner_id test_channel_private = self.env['mail.channel'].with_context(self._test_context).create({ 'name': 'Winden caves', 'description': 'Channel to travel through time', 'public': 'private', 'channel_partner_ids': [Command.link(self.user_employee.partner_id.id), Command.link(test_partner.id)], }) test_channel_group = self.env['mail.channel'].with_context(self._test_context).create({ 'name': 'Sic Mundus', 'public': 'groups', 'group_public_id': self.env.ref('base.group_user').id, 'channel_partner_ids': [Command.link(self.user_employee.partner_id.id), Command.link(test_partner.id)], }) self.test_channel.with_context(self._test_context).write({ 'channel_partner_ids': [Command.link(self.user_employee.partner_id.id), Command.link(test_partner.id)], }) test_chat = self.env['mail.channel'].with_context(self._test_context).create({ 'name': 'test', 'channel_type': 'chat', 'public': 'private', 'channel_partner_ids': [Command.link(self.user_employee.partner_id.id), Command.link(test_partner.id)], }) # Unsubscribe archived user from the private channels, but not from public channels and not from chat self.user_employee.active = False (test_chat | self.test_channel).invalidate_cache(fnames=['channel_partner_ids']) self.assertEqual(test_channel_private.channel_partner_ids, test_partner) self.assertEqual(test_channel_group.channel_partner_ids, test_partner) self.assertEqual(self.test_channel.channel_partner_ids, self.user_employee.partner_id | test_partner) self.assertEqual(test_chat.channel_partner_ids, self.user_employee.partner_id | test_partner) # Unsubscribe deleted user from the private channels, but not from public channels and not from chat test_user.unlink() self.assertEqual(test_channel_private.channel_partner_ids, self.env['res.partner']) self.assertEqual(test_channel_group.channel_partner_ids, self.env['res.partner']) self.assertEqual(self.test_channel.channel_partner_ids, self.user_employee.partner_id | test_partner) self.assertEqual(test_chat.channel_partner_ids, self.user_employee.partner_id | test_partner)
def test_write(self): self.env.ref('base.group_no_one').write( {'users': [Command.link(self.user.id)]}) with self.assertRaises(AccessError) as ctx: self.record.write({'forbidden': 1, 'forbidden2': 2}) self.assertEqual( ctx.exception.args[0], """The requested operation can not be completed due to security restrictions. Document type: Object For Test Access Right (test_access_right.some_obj) Operation: write User: %s Fields: - forbidden (allowed for groups 'User types / Internal User', 'Test Group'; forbidden for groups 'Extra Rights / Technical Features', 'User types / Public') - forbidden2 (allowed for groups 'Test Group')""" % self.user.id)
def test_project_share_wizard(self): """ Test Project Share Wizard Test Cases: ========== 1) Create the wizard record 2) Check if no access rights are given to a portal user 3) Add access rights to a portal user """ project_share_wizard = self.env['project.share.wizard'].create({ 'res_model': 'project.project', 'res_id': self.project_portal.id, 'access_mode': 'edit', }) self.assertFalse(project_share_wizard.partner_ids, 'No collaborator should be in the wizard.') self.assertFalse( self.project_portal.with_user( self.user_portal)._check_project_sharing_access(), 'The portal user should not have accessed in project sharing views.' ) project_share_wizard.write( {'partner_ids': [Command.link(self.user_portal.partner_id.id)]}) project_share_wizard.action_send_mail() self.assertEqual( len(self.project_portal.collaborator_ids), 1, 'The access right added in project share wizard should be added in the project when the user confirm the access in the wizard.' ) self.assertDictEqual( { 'partner_id': self.project_portal.collaborator_ids.partner_id, 'project_id': self.project_portal.collaborator_ids.project_id, }, { 'partner_id': self.user_portal.partner_id, 'project_id': self.project_portal, }, 'The access rights added should be the read access for the portal project for Chell Gladys.' ) self.assertTrue( self.project_portal.with_user( self.user_portal)._check_project_sharing_access(), 'The portal user should have read access to the portal project with project sharing feature.' )
def _run_action_object_create(self, eval_context=None): """Create specified model object with specified values. If applicable, link active_id.<self.link_field_id> to the new record. """ vals = self.fields_lines.eval_value(eval_context=eval_context) res = {line.col1.name: vals[line.id] for line in self.fields_lines} res = self.env[self.crud_model_id.model].create(res) if self.link_field_id: record = self.env[self.model_id.model].browse( self._context.get('active_id')) if self.link_field_id.ttype in ['one2many', 'many2many']: record.write({self.link_field_id.name: [Command.link(res.id)]}) else: record.write({self.link_field_id.name: res.id})
def copy(self, default=None): res = super().copy(default) if self.operation_ids: operations_mapping = {} for original, copied in zip(self.operation_ids, res.operation_ids.sorted()): operations_mapping[original] = copied for bom_line in res.bom_line_ids: if bom_line.operation_id: bom_line.operation_id = operations_mapping[bom_line.operation_id] for operation in self.operation_ids: if operation.blocked_by_operation_ids: copied_operation = operations_mapping[operation] dependencies = [] for dependency in operation.blocked_by_operation_ids: dependencies.append(Command.link(operations_mapping[dependency].id)) copied_operation.blocked_by_operation_ids = dependencies return res
def create(self, vals_list): # add default favicon for vals in vals_list: if not vals.get('favicon'): vals['favicon'] = self._get_default_favicon() # create missing partners no_partner_vals_list = [ vals for vals in vals_list if vals.get('name') and not vals.get('partner_id') ] if no_partner_vals_list: partners = self.env['res.partner'].create([ { 'name': vals['name'], 'is_company': True, 'image_1920': vals.get('logo'), 'email': vals.get('email'), 'phone': vals.get('phone'), 'website': vals.get('website'), 'vat': vals.get('vat'), 'country_id': vals.get('country_id'), } for vals in no_partner_vals_list ]) # compute stored fields, for example address dependent fields partners.flush() for vals, partner in zip(no_partner_vals_list, partners): vals['partner_id'] = partner.id self.clear_caches() companies = super().create(vals_list) # The write is made on the user to set it automatically in the multi company group. if companies: self.env.user.write({ 'company_ids': [Command.link(company.id) for company in companies], }) # Make sure that the selected currencies are enabled companies.currency_id.sudo().filtered(lambda c: not c.active).active = True return companies
def test_mrp_report_bom_structure_subcontracting(self): self.comp2_bom.write({'type': 'subcontract', 'subcontractor_ids': [Command.link(self.subcontractor_partner1.id)]}) self.env['product.supplierinfo'].create({ 'product_tmpl_id': self.finished.product_tmpl_id.id, 'partner_id': self.subcontractor_partner1.id, 'price': 10, }) supplier = self.env['product.supplierinfo'].create({ 'product_tmpl_id': self.comp2.product_tmpl_id.id, 'partner_id': self.subcontractor_partner1.id, 'price': 5, }) self.assertTrue(supplier.is_subcontractor) self.comp1.standard_price = 5 report_values = self.env['report.mrp.report_bom_structure']._get_report_data(self.bom.id, searchQty=1, searchVariant=False) subcontracting_values = report_values['lines']['subcontracting'] self.assertEqual(subcontracting_values['name'], self.subcontractor_partner1.display_name) self.assertEqual(subcontracting_values['bom_cost'], 10) self.assertEqual(report_values['lines']['total'], 20) # 10 For subcontracting + 5 for comp1 + 5 for subcontracting of comp2_bom
def setUp(self): super().setUp() self.users = self.env['res.users'].create([ { 'email': '*****@*****.**', 'groups_id': [Command.link(self.env.ref('base.group_user').id)], 'login': '******', 'name': 'Ernest Employee', 'notification_type': 'inbox', 'odoobot_state': 'disabled', 'signature': '--\nErnest', }, { 'name': 'test1', 'login': '******', 'email': '*****@*****.**' }, ])
def setUp(self): super().setUp() self.users = self.env['res.users'].create([ { 'email': '*****@*****.**', 'groups_id': [Command.link(self.env.ref('base.group_user').id)], 'login': '******', 'name': 'Ernest Employee', 'notification_type': 'inbox', 'signature': '--\nErnest', }, {'name': 'test1', 'login': '******', 'email': '*****@*****.**'}, {'name': 'test2', 'login': '******', 'email': '*****@*****.**'}, {'name': 'test3', 'login': '******'}, {'name': 'test4', 'login': '******'}, {'name': 'test5', 'login': '******'}, {'name': 'test6', 'login': '******'}, {'name': 'test7', 'login': '******'}, {'name': 'test8', 'login': '******'}, {'name': 'test9', 'login': '******'}, {'name': 'test10', 'login': '******'}, {'name': 'test11', 'login': '******'}, {'name': 'test12', 'login': '******'}, {'name': 'test13', 'login': '******'}, {'name': 'test14', 'login': '******'}, {'name': 'test15', 'login': '******'}, ]) self.employees = self.env['hr.employee'].create([{ 'user_id': user.id, } for user in self.users]) self.leave_type = self.env['hr.leave.type'].create({ 'allocation_type': 'no', 'name': 'Legal Leaves', 'time_type': 'leave', 'validity_start': False, }) self.leaves = self.env['hr.leave'].create([{ 'date_from': date.today() + relativedelta(days=-2), 'date_to': date.today() + relativedelta(days=2), 'employee_id': employee.id, 'holiday_status_id': self.leave_type.id, } for employee in self.employees])
def test_activity_calendar_event_id(self): """Test the computed field "activity_calendar_event_id" which is the event of the next activity. It must evaluate to False if the next activity is not related to an event""" def create_event(name, event_date): return self.env['calendar.event'].create({ 'name': name, 'start': datetime.combine(event_date, time(12, 0, 0)), 'stop': datetime.combine(event_date, time(14, 0, 0)), }) def schedule_meeting_activity(record, date_deadline, calendar_event=False): meeting = record.activity_schedule('calendar.calendar_activity_test_default', date_deadline=date_deadline) meeting.calendar_event_id = calendar_event return meeting group_partner_manager = self.env['ir.model.data'].xmlid_to_res_id('base.group_partner_manager') self.user_employee.write({ 'tz': self.user_admin.tz, 'groups_id': [Command.link(group_partner_manager)] }) with self.with_user('employee'): test_record = self.env['res.partner'].browse(self.test_record.id) self.assertEqual(test_record.activity_ids, self.env['mail.activity']) now_utc = datetime.now(pytz.UTC) now_user = now_utc.astimezone(pytz.timezone(self.env.user.tz or 'UTC')) today_user = now_user.date() date1 = today_user + relativedelta(days=1) date2 = today_user + relativedelta(days=2) ev1 = create_event('ev1', date1) ev2 = create_event('ev2', date2) act1 = schedule_meeting_activity(test_record, date1) schedule_meeting_activity(test_record, date2, ev2) self.assertFalse(test_record.activity_calendar_event_id, "The next activity does not have a calendar event") act1.calendar_event_id = ev1 self.assertEqual(test_record.activity_calendar_event_id.name, ev1.name, "This should be the calendar event of the next activity")
def setUpClass(cls): super().setUpClass() project_sharing_stages_vals_list = [ (0, 0, {'name': 'To Do', 'sequence': 1}), (0, 0, {'name': 'Done', 'sequence': 10}), ] cls.partner_portal = cls.env['res.partner'].create({ 'name': 'Chell Gladys', 'email': '*****@*****.**', 'company_id': False, 'user_ids': [Command.link(cls.user_portal.id)]}) cls.project_cows = cls.env['project.project'].with_context({'mail_create_nolog': True}).create({ 'name': 'Cows', 'privacy_visibility': 'portal', 'alias_name': 'project+cows', 'type_ids': project_sharing_stages_vals_list, }) cls.project_portal = cls.env['project.project'].with_context({'mail_create_nolog': True}).create({ 'name': 'Portal', 'privacy_visibility': 'portal', 'alias_name': 'project+portal', 'partner_id': cls.user_portal.partner_id.id, 'type_ids': project_sharing_stages_vals_list, }) cls.project_portal.message_subscribe(partner_ids=[cls.partner_portal.id]) cls.task_cow = cls.env['project.task'].with_context({'mail_create_nolog': True}).create({ 'name': 'Cow UserTask', 'user_ids': cls.user_projectuser, 'project_id': cls.project_cows.id, }) cls.task_portal = cls.env['project.task'].with_context({'mail_create_nolog': True}).create({ 'name': 'Portal UserTask', 'user_ids': cls.user_projectuser, 'project_id': cls.project_portal.id, }) cls.project_sharing_form_view_xml_id = 'project.project_sharing_project_task_view_form'
def _make_rule(self, name, domain, global_=False, attr='write'): res = self.env['ir.rule'].create({ 'name': name, 'model_id': self.model.id, 'groups': [] if global_ else [Command.link(self.group2.id)], 'domain_force': domain, 'perm_read': False, 'perm_write': False, 'perm_create': False, 'perm_unlink': False, 'perm_' + attr: True, }) return res
def _get_repartition_lines_oss(self): self.ensure_one() defaults = self.env['account.tax'].with_company(self).default_get( ['invoice_repartition_line_ids', 'refund_repartition_line_ids']) oss_account = self._get_oss_account() if oss_account: defaults['invoice_repartition_line_ids'][1][2][ 'account_id'] = oss_account.id defaults['refund_repartition_line_ids'][1][2][ 'account_id'] = oss_account.id oss_tag = self.env.ref('l10n_eu_oss.tag_oss') for orm_command in itertools.chain( defaults['invoice_repartition_line_ids'], defaults['refund_repartition_line_ids']): rep_line_vals = orm_command[2] rep_line_vals['tag_ids'] = rep_line_vals.get( 'tag_ids', []) + [Command.link(oss_tag.id)] return defaults['invoice_repartition_line_ids'], defaults[ 'refund_repartition_line_ids']
def test_two_user_types_implied_groups(self): """Contrarily to test_two_user_types, we simply add an implied_id to a group. This will trigger the addition of the relevant users to the relevant groups; if, say, this was done in SQL and thus bypassing the ORM, it would bypass the constraints and thus give us a case uncovered by the aforementioned test. """ grp_test = self.env["res.groups"].create({ "name": "test", "implied_ids": [Command.set([self.grp_internal.id])] }) test_user = self.env['res.users'].create({ 'login': '******', 'name': "Test User with one user types", 'groups_id': [Command.set([grp_test.id])] }) with self.assertRaises(ValidationError): grp_test.write({'implied_ids': [Command.link(self.grp_portal.id)]})
def test_copy(self): Model = self.env['test_new_api.compute.onchange'] # create tags tag_foo, tag_bar = self.env['test_new_api.multi.tag'].create([ { 'name': 'foo1' }, { 'name': 'bar1' }, ]) # compute 'bar' (readonly), 'baz', 'line_ids' and 'tag_ids' (editable) record = Model.create({'active': True, 'foo': "foo1"}) self.assertEqual(record.bar, "foo1r") self.assertEqual(record.baz, "foo1z") self.assertEqual(record.line_ids.mapped('foo'), ['foo1']) self.assertEqual(record.tag_ids, tag_foo) # manually update 'baz' and 'lines' to test copy attribute record.write({ 'baz': "baz1", 'line_ids': [Command.create({'foo': 'bar'})], 'tag_ids': [Command.link(tag_bar.id)], }) self.assertEqual(record.bar, "foo1r") self.assertEqual(record.baz, "baz1") self.assertEqual(record.line_ids.mapped('foo'), ['foo1', 'bar']) self.assertEqual(record.tag_ids, tag_foo + tag_bar) # copy the record, and check results copied = record.copy() self.assertEqual(copied.foo, "foo1 (copy)") # copied and modified self.assertEqual(copied.bar, "foo1 (copy)r") # computed self.assertEqual(copied.baz, "baz1") # copied self.assertEqual(record.line_ids.mapped('foo'), ['foo1', 'bar']) # copied self.assertEqual(record.tag_ids, tag_foo + tag_bar) # copied
def test_create_base_with_tags(self): """ Create records with many2many tags. """ with self.assertQueryCount(2): self.env['test_performance.base'].create({'name': 'X'}) # create N tags: add O(N) queries with self.assertQueryCount(13): self.env['test_performance.base'].create({ 'name': 'X', 'tag_ids': [Command.create({'name': val}) for val in range(10)], }) # link N tags: add O(1) queries tags = self.env['test_performance.tag'].create([{ 'name': val } for val in range(10)]) with self.assertQueryCount(3): self.env['test_performance.base'].create({ 'name': 'X', 'tag_ids': [Command.link(tag.id) for tag in tags], }) with self.assertQueryCount(2): self.env['test_performance.base'].create({ 'name': 'X', 'tag_ids': [Command.set([])], }) with self.assertQueryCount(3): self.env['test_performance.base'].create({ 'name': 'X', 'tag_ids': [Command.set(tags.ids)], })
def create(self, vals): if not vals.get('favicon'): vals['favicon'] = self._get_default_favicon() if not vals.get('name') or vals.get('partner_id'): self.clear_caches() return super(Company, self).create(vals) partner = self.env['res.partner'].create({ 'name': vals['name'], 'is_company': True, 'image_1920': vals.get('logo'), 'email': vals.get('email'), 'phone': vals.get('phone'), 'website': vals.get('website'), 'vat': vals.get('vat'), 'country_id': vals.get('country_id'), }) # compute stored fields, for example address dependent fields partner.flush() vals['partner_id'] = partner.id self.clear_caches() company = super(Company, self).create(vals) # The write is made on the user to set it automatically in the multi company group. self.env.user.write({'company_ids': [Command.link(company.id)]}) # Make sure that the selected currency is enabled if vals.get('currency_id'): currency = self.env['res.currency'].browse(vals['currency_id']) if not currency.active: currency.write({'active': True}) return company
def _str_to_many2many(self, model, field, value): [record] = value subfield, warnings = self._referencing_subfield(record) ids = [] for reference in record[subfield].split(','): id, _, ws = self.db_id_for(model, field, subfield, reference) ids.append(id) warnings.extend(ws) if field.name in self._context.get('import_set_empty_fields', []) and any( [id is None for id in ids]): ids = [id for id in ids if id] elif field.name in self._context.get( 'import_skip_records', []) and any([id is None for id in ids]): return None, warnings if self._context.get('update_many2many'): return [Command.link(id) for id in ids], warnings else: return [Command.set(ids)], warnings
def task_create(self, email_subject, email_body, project_id, partner_id): partner = request.env['res.partner'].browse(partner_id).exists() if not partner: return {'error': 'partner_not_found'} if not request.env['project.project'].browse(project_id).exists(): return {'error': 'project_not_found'} if not email_subject: email_subject = _('Task for %s', partner.name) record = request.env['project.task'].create({ 'name': email_subject, 'partner_id': partner_id, 'description': email_body, 'project_id': project_id, 'user_ids': [Command.link(request.env.uid)], }) return {'task_id': record.id, 'name': record.name}
def setUpClass(cls): super().setUpClass() cls.users = [] for u in ('user1', 'user2'): credentials = cls.env['google.calendar.credentials'].create({ 'calendar_rtoken': f'{u}_rtoken', 'calendar_token': f'{u}_token', 'calendar_token_validity': fields.Datetime.today(), 'calendar_sync_token': f'{u}_sync_token', }) user = cls.env['res.users'].create({ 'name': f'{u}', 'login': f'{u}', 'email': f'{u}@odoo.com', 'google_cal_account_id': credentials.id, }) cls.users += [user] cls.system_user = cls.env['res.users'].create({ 'name': 'system_user', 'login': '******', 'email': '*****@*****.**', 'groups_id': [Command.link(cls.env.ref('base.group_system').id)], })
def setUpClass(cls): super(TestModeration, cls).setUpClass() # Test group: members, moderation cls.test_group_2 = cls.env['mail.group'].create({ 'access_mode': 'members', 'alias_name': 'test.mail.group.2', 'moderation': True, 'moderator_ids': [Command.link(cls.user_employee.id)], 'name': 'Test group 2', }) cls.test_group_2_member_emp = cls.env['mail.group.member'].create({ 'partner_id': cls.user_employee_2.partner_id.id, 'email': cls.user_employee_2.email, 'mail_group_id': cls.test_group_2.id, }) # Existing messages on group 2 cls.test_group_2_msg_1_pending = cls.env['mail.group.message'].create({ 'email_from': cls.email_from_unknown, 'subject': 'Group 2 Pending', 'mail_group_id': cls.test_group_2.id, 'moderation_status': 'pending_moderation', })
def _populate(self, size): records = super()._populate(size) self.env.ref('base.user_admin').write({ 'company_ids': [Command.link(rec.id) for rec in records] }) # add all created companies on user admin return records
def _timesheet_preprocess(self, vals): """ Deduce other field values from the one given. Overrride this to compute on the fly some field that can not be computed fields. :param values: dict values for `create`or `write`. """ # task implies analytic account and tags if vals.get('task_id') and not vals.get('account_id'): task = self.env['project.task'].browse(vals.get('task_id')) task_analytic_account_id = task._get_task_analytic_account_id() vals['account_id'] = task_analytic_account_id.id vals[ 'company_id'] = task_analytic_account_id.company_id.id or task.company_id.id if vals.get('tag_ids'): vals['tag_ids'] += [ Command.link(tag_id.id) for tag_id in task.analytic_tag_ids ] else: vals['tag_ids'] = [Command.set(task.analytic_tag_ids.ids)] if not task_analytic_account_id.active: raise UserError( _('You cannot add timesheets to a project or a task linked to an inactive analytic account.' )) # project implies analytic account if vals.get('project_id') and not vals.get('account_id'): project = self.env['project.project'].browse( vals.get('project_id')) vals['account_id'] = project.analytic_account_id.id vals[ 'company_id'] = project.analytic_account_id.company_id.id or project.company_id.id if vals.get('tag_ids'): vals['tag_ids'] += [ Command.link(tag_id.id) for tag_id in project.analytic_tag_ids ] else: vals['tag_ids'] = [Command.set(project.analytic_tag_ids.ids)] if not project.analytic_account_id.active: raise UserError( _('You cannot add timesheets to a project linked to an inactive analytic account.' )) # employee implies user if vals.get('employee_id') and not vals.get('user_id'): employee = self.env['hr.employee'].browse(vals['employee_id']) vals['user_id'] = employee.user_id.id # force customer partner, from the task or the project if (vals.get('project_id') or vals.get('task_id')) and not vals.get('partner_id'): partner_id = False if vals.get('task_id'): partner_id = self.env['project.task'].browse( vals['task_id']).partner_id.id else: partner_id = self.env['project.project'].browse( vals['project_id']).partner_id.id if partner_id: vals['partner_id'] = partner_id # set timesheet UoM from the AA company (AA implies uom) if 'product_uom_id' not in vals and all( v in vals for v in ['account_id', 'project_id'] ): # project_id required to check this is timesheet flow analytic_account = self.env['account.analytic.account'].sudo( ).browse(vals['account_id']) vals[ 'product_uom_id'] = analytic_account.company_id.project_time_mode_id.id return vals
def send_mail(self, res_id, force_send=False, raise_exception=False, email_values=None, notif_layout=False): """ Generates a new mail.mail. Template is rendered on record given by res_id and model coming from template. :param int res_id: id of the record to render the template :param bool force_send: send email immediately; otherwise use the mail queue (recommended); :param dict email_values: update generated mail with those values to further customize the mail; :param str notif_layout: optional notification layout to encapsulate the generated email; :returns: id of the mail.mail that was created """ # Grant access to send_mail only if access to related document self.ensure_one() self._send_check_access([res_id]) Attachment = self.env[ 'ir.attachment'] # TDE FIXME: should remove default_type from context # create a mail_mail based on values, without attachments values = self.generate_email(res_id, [ 'subject', 'body_html', 'email_from', 'email_to', 'partner_to', 'email_cc', 'reply_to', 'scheduled_date' ]) values['recipient_ids'] = [ Command.link(pid) for pid in values.get('partner_ids', list()) ] values['attachment_ids'] = [ Command.link(aid) for aid in values.get('attachment_ids', list()) ] values.update(email_values or {}) attachment_ids = values.pop('attachment_ids', []) attachments = values.pop('attachments', []) # add a protection against void email_from if 'email_from' in values and not values.get('email_from'): values.pop('email_from') # encapsulate body if notif_layout and values['body_html']: try: template = self.env.ref(notif_layout, raise_if_not_found=True) except ValueError: _logger.warning( 'QWeb template %s not found when sending template %s. Sending without layouting.' % (notif_layout, self.name)) else: record = self.env[self.model].browse(res_id) template_ctx = { 'message': self.env['mail.message'].sudo().new( dict(body=values['body_html'], record_name=record.display_name)), 'model_description': self.env['ir.model']._get(record._name).display_name, 'company': 'company_id' in record and record['company_id'] or self.env.company, 'record': record, } body = template._render(template_ctx, engine='ir.qweb', minimal_qcontext=True) values['body_html'] = self.env[ 'mail.render.mixin']._replace_local_links(body) mail = self.env['mail.mail'].sudo().create(values) # manage attachments for attachment in attachments: attachment_data = { 'name': attachment[0], 'datas': attachment[1], 'type': 'binary', 'res_model': 'mail.message', 'res_id': mail.mail_message_id.id, } attachment_ids.append((4, Attachment.create(attachment_data).id)) if attachment_ids: mail.write({'attachment_ids': attachment_ids}) if force_send: mail.send(raise_exception=raise_exception) return mail.id # TDE CLEANME: return mail + api.returns ?
def test_write_base_many2many(self): """ Write on many2many field. """ rec1 = self.env['test_performance.base'].create({'name': 'X'}) # create N tags on rec1: O(N) queries with self.assertQueryCount(4): rec1.invalidate_cache() rec1.write({'tag_ids': [Command.create({'name': 0})]}) self.assertEqual(len(rec1.tag_ids), 1) with self.assertQueryCount(14): rec1.invalidate_cache() rec1.write({ 'tag_ids': [Command.create({'name': val}) for val in range(1, 12)] }) self.assertEqual(len(rec1.tag_ids), 12) tags = rec1.tag_ids # update N tags: O(N) queries with self.assertQueryCount(3): rec1.invalidate_cache() rec1.write({ 'tag_ids': [Command.update(tag.id, {'name': 'X'}) for tag in tags[0]] }) self.assertEqual(rec1.tag_ids, tags) with self.assertQueryCount(3): rec1.invalidate_cache() rec1.write({ 'tag_ids': [Command.update(tag.id, {'name': 'X'}) for tag in tags[1:]] }) self.assertEqual(rec1.tag_ids, tags) # delete N tags: O(1) queries with self.assertQueryCount(__system__=8, demo=8): rec1.invalidate_cache() rec1.write( {'tag_ids': [Command.delete(tag.id) for tag in tags[0]]}) self.assertEqual(rec1.tag_ids, tags[1:]) with self.assertQueryCount(__system__=8, demo=8): rec1.invalidate_cache() rec1.write( {'tag_ids': [Command.delete(tag.id) for tag in tags[1:]]}) self.assertFalse(rec1.tag_ids) self.assertFalse(tags.exists()) rec1.write( {'tag_ids': [Command.create({'name': val}) for val in range(12)]}) tags = rec1.tag_ids # unlink N tags: O(1) queries with self.assertQueryCount(3): rec1.invalidate_cache() rec1.write( {'tag_ids': [Command.unlink(tag.id) for tag in tags[0]]}) self.assertEqual(rec1.tag_ids, tags[1:]) with self.assertQueryCount(3): rec1.invalidate_cache() rec1.write( {'tag_ids': [Command.unlink(tag.id) for tag in tags[1:]]}) self.assertFalse(rec1.tag_ids) self.assertTrue(tags.exists()) rec2 = self.env['test_performance.base'].create({'name': 'X'}) # link N tags from rec1 to rec2: O(1) queries with self.assertQueryCount(3): rec1.invalidate_cache() rec2.write({'tag_ids': [Command.link(tag.id) for tag in tags[0]]}) self.assertEqual(rec2.tag_ids, tags[0]) with self.assertQueryCount(3): rec1.invalidate_cache() rec2.write({'tag_ids': [Command.link(tag.id) for tag in tags[1:]]}) self.assertEqual(rec2.tag_ids, tags) with self.assertQueryCount(2): rec1.invalidate_cache() rec2.write({'tag_ids': [Command.link(tag.id) for tag in tags[1:]]}) self.assertEqual(rec2.tag_ids, tags) # empty N tags in rec2: O(1) queries with self.assertQueryCount(3): rec1.invalidate_cache() rec2.write({'tag_ids': [Command.clear()]}) self.assertFalse(rec2.tag_ids) self.assertTrue(tags.exists()) with self.assertQueryCount(2): rec1.invalidate_cache() rec2.write({'tag_ids': [Command.clear()]}) self.assertFalse(rec2.tag_ids) # set N tags in rec2: O(1) queries with self.assertQueryCount(3): rec1.invalidate_cache() rec2.write({'tag_ids': [Command.set(tags.ids)]}) self.assertEqual(rec2.tag_ids, tags) with self.assertQueryCount(3): rec1.invalidate_cache() rec2.write({'tag_ids': [Command.set(tags[:8].ids)]}) self.assertEqual(rec2.tag_ids, tags[:8]) with self.assertQueryCount(4): rec1.invalidate_cache() rec2.write({'tag_ids': [Command.set(tags[4:].ids)]}) self.assertEqual(rec2.tag_ids, tags[4:]) with self.assertQueryCount(3): rec1.invalidate_cache() rec2.write({'tag_ids': [Command.set(tags.ids)]}) self.assertEqual(rec2.tag_ids, tags) with self.assertQueryCount(2): rec1.invalidate_cache() rec2.write({'tag_ids': [Command.set(tags.ids)]}) self.assertEqual(rec2.tag_ids, tags)
def test_write_base_one2many(self): """ Write on one2many field. """ rec1 = self.env['test_performance.base'].create({'name': 'X'}) # create N lines on rec1: O(N) queries with self.assertQueryCount(3): rec1.invalidate_cache() rec1.write({'line_ids': [Command.create({'value': 0})]}) self.assertEqual(len(rec1.line_ids), 1) with self.assertQueryCount(15): rec1.invalidate_cache() rec1.write({ 'line_ids': [Command.create({'value': val}) for val in range(1, 12)] }) self.assertEqual(len(rec1.line_ids), 12) lines = rec1.line_ids # update N lines: O(N) queries with self.assertQueryCount(6): rec1.invalidate_cache() rec1.write({ 'line_ids': [Command.update(line.id, {'value': 42}) for line in lines[0]] }) self.assertEqual(rec1.line_ids, lines) with self.assertQueryCount(26): rec1.invalidate_cache() rec1.write({ 'line_ids': [ Command.update(line.id, {'value': 42 + line.id}) for line in lines[1:] ] }) self.assertEqual(rec1.line_ids, lines) # delete N lines: O(1) queries with self.assertQueryCount(14): rec1.invalidate_cache() rec1.write( {'line_ids': [Command.delete(line.id) for line in lines[0]]}) self.assertEqual(rec1.line_ids, lines[1:]) with self.assertQueryCount(12): rec1.invalidate_cache() rec1.write( {'line_ids': [Command.delete(line.id) for line in lines[1:]]}) self.assertFalse(rec1.line_ids) self.assertFalse(lines.exists()) rec1.write({ 'line_ids': [Command.create({'value': val}) for val in range(12)] }) lines = rec1.line_ids # unlink N lines: O(1) queries with self.assertQueryCount(14): rec1.invalidate_cache() rec1.write( {'line_ids': [Command.unlink(line.id) for line in lines[0]]}) self.assertEqual(rec1.line_ids, lines[1:]) with self.assertQueryCount(12): rec1.invalidate_cache() rec1.write( {'line_ids': [Command.unlink(line.id) for line in lines[1:]]}) self.assertFalse(rec1.line_ids) self.assertFalse(lines.exists()) rec1.write({ 'line_ids': [Command.create({'value': val}) for val in range(12)] }) lines = rec1.line_ids rec2 = self.env['test_performance.base'].create({'name': 'X'}) # link N lines from rec1 to rec2: O(1) queries with self.assertQueryCount(8): rec1.invalidate_cache() rec2.write( {'line_ids': [Command.link(line.id) for line in lines[0]]}) self.assertEqual(rec1.line_ids, lines[1:]) self.assertEqual(rec2.line_ids, lines[0]) with self.assertQueryCount(8): rec1.invalidate_cache() rec2.write( {'line_ids': [Command.link(line.id) for line in lines[1:]]}) self.assertFalse(rec1.line_ids) self.assertEqual(rec2.line_ids, lines) with self.assertQueryCount(4): rec1.invalidate_cache() rec2.write( {'line_ids': [Command.link(line.id) for line in lines[0]]}) self.assertEqual(rec2.line_ids, lines) with self.assertQueryCount(4): rec1.invalidate_cache() rec2.write( {'line_ids': [Command.link(line.id) for line in lines[1:]]}) self.assertEqual(rec2.line_ids, lines) # empty N lines in rec2: O(1) queries with self.assertQueryCount(13): rec1.invalidate_cache() rec2.write({'line_ids': [Command.clear()]}) self.assertFalse(rec2.line_ids) with self.assertQueryCount(3): rec1.invalidate_cache() rec2.write({'line_ids': [Command.clear()]}) self.assertFalse(rec2.line_ids) rec1.write({ 'line_ids': [Command.create({'value': val}) for val in range(12)] }) lines = rec1.line_ids # set N lines in rec2: O(1) queries with self.assertQueryCount(8): rec1.invalidate_cache() rec2.write({'line_ids': [Command.set(lines[0].ids)]}) self.assertEqual(rec1.line_ids, lines[1:]) self.assertEqual(rec2.line_ids, lines[0]) with self.assertQueryCount(6): rec1.invalidate_cache() rec2.write({'line_ids': [Command.set(lines.ids)]}) self.assertFalse(rec1.line_ids) self.assertEqual(rec2.line_ids, lines) with self.assertQueryCount(4): rec1.invalidate_cache() rec2.write({'line_ids': [Command.set(lines.ids)]}) self.assertEqual(rec2.line_ids, lines)
def _add_followers(self, res_model, res_ids, partner_ids, subtypes, check_existing=False, existing_policy='skip'): """ Internal method that generates values to insert or update followers. Callers have to handle the result, for example by making a valid ORM command, inserting or updating directly follower records, ... This method returns two main data * first one is a dict which keys are res_ids. Value is a list of dict of values valid for creating new followers for the related res_id; * second one is a dict which keys are follower ids. Value is a dict of values valid for updating the related follower record; :param subtypes: optional subtypes for new partner followers. This is a dict whose keys are partner IDs and value subtype IDs for that partner. :param channel_subtypes: optional subtypes for new channel followers. This is a dict whose keys are channel IDs and value subtype IDs for that channel. :param check_existing: if True, check for existing followers for given documents and handle them according to existing_policy parameter. Setting to False allows to save some computation if caller is sure there are no conflict for followers; :param existing policy: if check_existing, tells what to do with already existing followers: * skip: simply skip existing followers, do not touch them; * force: update existing with given subtypes only; * replace: replace existing with new subtypes (like force without old / new follower); * update: gives an update dict allowing to add missing subtypes (no subtype removal); """ _res_ids = res_ids or [0] data_fols, doc_pids = dict(), dict((i, set()) for i in _res_ids) if check_existing and res_ids: for fid, rid, pid, sids in self._get_subscription_data( [(res_model, res_ids)], partner_ids or None): if existing_policy != 'force': if pid: doc_pids[rid].add(pid) data_fols[fid] = (rid, pid, sids) if existing_policy == 'force': self.sudo().browse(data_fols.keys()).unlink() new, update = dict(), dict() for res_id in _res_ids: for partner_id in set(partner_ids or []): if partner_id not in doc_pids[res_id]: new.setdefault(res_id, list()).append({ 'res_model': res_model, 'partner_id': partner_id, 'subtype_ids': [Command.set(subtypes[partner_id])], }) elif existing_policy in ('replace', 'update'): fol_id, sids = next( ((key, val[2]) for key, val in data_fols.items() if val[0] == res_id and val[1] == partner_id), (False, [])) new_sids = set(subtypes[partner_id]) - set(sids) old_sids = set(sids) - set(subtypes[partner_id]) update_cmd = [] if fol_id and new_sids: update_cmd += [Command.link(sid) for sid in new_sids] if fol_id and old_sids and existing_policy == 'replace': update_cmd += [Command.unlink(sid) for sid in old_sids] if update_cmd: update[fol_id] = {'subtype_ids': update_cmd} return new, update
def get_mail_values(self, res_ids): """Generate the values that will be used by send_mail to create mail_messages or mail_mails. """ self.ensure_one() results = dict.fromkeys(res_ids, False) rendered_values = {} mass_mail_mode = self.composition_mode == 'mass_mail' # render all template-based value at once if mass_mail_mode and self.model: rendered_values = self.render_message(res_ids) # compute alias-based reply-to in batch reply_to_value = dict.fromkeys(res_ids, None) if mass_mail_mode and not self.reply_to_force_new: records = self.env[self.model].browse(res_ids) reply_to_value = records._notify_get_reply_to( default=self.email_from) for res_id in res_ids: # static wizard (mail.message) values mail_values = { 'subject': self.subject, 'body': self.body or '', 'parent_id': self.parent_id and self.parent_id.id, 'partner_ids': [partner.id for partner in self.partner_ids], 'attachment_ids': [attach.id for attach in self.attachment_ids], 'author_id': self.author_id.id, 'email_from': self.email_from, 'record_name': self.record_name, 'reply_to_force_new': self.reply_to_force_new, 'mail_server_id': self.mail_server_id.id, 'mail_activity_type_id': self.mail_activity_type_id.id, } # mass mailing: rendering override wizard static values if mass_mail_mode and self.model: record = self.env[self.model].browse(res_id) mail_values['headers'] = record._notify_email_headers() # keep a copy unless specifically requested, reset record name (avoid browsing records) mail_values.update( is_notification=not self.auto_delete_message, model=self.model, res_id=res_id, record_name=False) # auto deletion of mail_mail if self.auto_delete or self.template_id.auto_delete: mail_values['auto_delete'] = True # rendered values using template email_dict = rendered_values[res_id] mail_values['partner_ids'] += email_dict.pop('partner_ids', []) mail_values.update(email_dict) if not self.reply_to_force_new: mail_values.pop('reply_to') if reply_to_value.get(res_id): mail_values['reply_to'] = reply_to_value[res_id] if self.reply_to_force_new and not mail_values.get('reply_to'): mail_values['reply_to'] = mail_values['email_from'] # mail_mail values: body -> body_html, partner_ids -> recipient_ids mail_values['body_html'] = mail_values.get('body', '') mail_values['recipient_ids'] = [ Command.link(id) for id in mail_values.pop('partner_ids', []) ] # process attachments: should not be encoded before being processed by message_post / mail_mail create mail_values['attachments'] = [ (name, base64.b64decode(enc_cont)) for name, enc_cont in email_dict.pop( 'attachments', list()) ] attachment_ids = [] for attach_id in mail_values.pop('attachment_ids'): new_attach_id = self.env['ir.attachment'].browse( attach_id).copy({ 'res_model': self._name, 'res_id': self.id }) attachment_ids.append(new_attach_id.id) attachment_ids.reverse() mail_values['attachment_ids'] = self.env[ 'mail.thread'].with_context( attached_to=record)._message_post_process_attachments( mail_values.pop('attachments', []), attachment_ids, { 'model': 'mail.message', 'res_id': 0 })['attachment_ids'] results[res_id] = mail_values results = self._process_state(results) return results
def _str_to_one2many(self, model, field, records): name_create_enabled_fields = self._context.get( 'name_create_enabled_fields') or {} prefix = field.name + '/' relative_name_create_enabled_fields = { k[len(prefix):]: v for k, v in name_create_enabled_fields.items() if k.startswith(prefix) } commands = [] warnings = [] if len(records) == 1 and exclude_ref_fields(records[0]) == {}: # only one row with only ref field, field=ref1,ref2,ref3 as in # m2o/m2m record = records[0] subfield, ws = self._referencing_subfield(record) warnings.extend(ws) # transform [{subfield:ref1,ref2,ref3}] into # [{subfield:ref1},{subfield:ref2},{subfield:ref3}] records = ({ subfield: item } for item in record[subfield].split(',')) def log(f, exception): if not isinstance(exception, Warning): current_field_name = self.env[ field.comodel_name]._fields[f].string arg0 = exception.args[0] % { 'field': '%(field)s/' + current_field_name } exception.args = (arg0, *exception.args[1:]) raise exception warnings.append(exception) # Complete the field hierarchy path # E.g. For "parent/child/subchild", field hierarchy path for "subchild" is ['parent', 'child'] parent_fields_hierarchy = self._context.get('parent_fields_hierarchy', []) + [field.name] convert = self.with_context( name_create_enabled_fields=relative_name_create_enabled_fields, parent_fields_hierarchy=parent_fields_hierarchy).for_model( self.env[field.comodel_name]) for record in records: id = None refs = only_ref_fields(record) writable = convert(exclude_ref_fields(record), log) if refs: subfield, w1 = self._referencing_subfield(refs) warnings.extend(w1) try: id, _, w2 = self.db_id_for(model, field, subfield, record[subfield]) warnings.extend(w2) except ValueError: if subfield != 'id': raise writable['id'] = record['id'] if id: commands.append(Command.link(id)) commands.append(Command.update(id, writable)) else: commands.append(Command.create(writable)) return commands, warnings