class MaintenanceRequest(models.Model): _name = 'maintenance.request' _inherit = ['mail.thread.cc', 'mail.activity.mixin'] _description = 'Maintenance Request' _order = "id desc" _check_company_auto = True @api.returns('self') def _default_stage(self): return self.env['maintenance.stage'].search([], limit=1) def _creation_subtype(self): return self.env.ref('maintenance.mt_req_created') def _track_subtype(self, init_values): self.ensure_one() if 'stage_id' in init_values: return self.env.ref('maintenance.mt_req_status') return super(MaintenanceRequest, self)._track_subtype(init_values) def _get_default_team_id(self): MT = self.env['maintenance.team'] team = MT.search([('company_id', '=', self.env.company.id)], limit=1) if not team: team = MT.search([], limit=1) return team.id name = fields.Char('Subjects', required=True) company_id = fields.Many2one('res.company', string='Company', default=lambda self: self.env.company) description = fields.Text('Description') request_date = fields.Date( 'Request Date', tracking=True, default=fields.Date.context_today, help="Date requested for the maintenance to happen") owner_user_id = fields.Many2one('res.users', string='Created by User', default=lambda s: s.env.uid) category_id = fields.Many2one('maintenance.equipment.category', related='equipment_id.category_id', string='Category', store=True, readonly=True) equipment_id = fields.Many2one('maintenance.equipment', string='Equipment', ondelete='restrict', index=True, check_company=True) user_id = fields.Many2one('res.users', string='Technician', tracking=True) stage_id = fields.Many2one('maintenance.stage', string='Stage', ondelete='restrict', tracking=True, group_expand='_read_group_stage_ids', default=_default_stage, copy=False) priority = fields.Selection([('0', 'Very Low'), ('1', 'Low'), ('2', 'Normal'), ('3', 'High')], string='Priority') color = fields.Integer('Color Index') close_date = fields.Date('Close Date', help="Date the maintenance was finished. ") kanban_state = fields.Selection([('normal', 'In Progress'), ('blocked', 'Blocked'), ('done', 'Ready for next stage')], string='Kanban State', required=True, default='normal', tracking=True) # active = fields.Boolean(default=True, help="Set active to false to hide the maintenance request without deleting it.") archive = fields.Boolean( default=False, help= "Set archive to true to hide the maintenance request without deleting it." ) maintenance_type = fields.Selection([('corrective', 'Corrective'), ('preventive', 'Preventive')], string='Maintenance Type', default="corrective") schedule_date = fields.Datetime( 'Scheduled Date', help= "Date the maintenance team plans the maintenance. It should not differ much from the Request Date. " ) maintenance_team_id = fields.Many2one('maintenance.team', string='Team', required=True, default=_get_default_team_id, check_company=True) duration = fields.Float(help="Duration in hours.") done = fields.Boolean(related='stage_id.done') def archive_equipment_request(self): self.write({'archive': True}) def reset_equipment_request(self): """ Reinsert the maintenance request into the maintenance pipe in the first stage""" first_stage_obj = self.env['maintenance.stage'].search( [], order="sequence asc", limit=1) # self.write({'active': True, 'stage_id': first_stage_obj.id}) self.write({'archive': False, 'stage_id': first_stage_obj.id}) @api.onchange('company_id') def _onchange_company_id(self): if self.company_id and self.maintenance_team_id: if self.maintenance_team_id.company_id and not self.maintenance_team_id.company_id.id == self.company_id.id: self.maintenance_team_id = False @api.onchange('equipment_id') def onchange_equipment_id(self): if self.equipment_id: self.user_id = self.equipment_id.technician_user_id if self.equipment_id.technician_user_id else self.equipment_id.category_id.technician_user_id self.category_id = self.equipment_id.category_id if self.equipment_id.maintenance_team_id: self.maintenance_team_id = self.equipment_id.maintenance_team_id.id @api.onchange('category_id') def onchange_category_id(self): if not self.user_id or not self.equipment_id or ( self.user_id and not self.equipment_id.technician_user_id): self.user_id = self.category_id.technician_user_id @api.model def create(self, vals): # context: no_log, because subtype already handle this request = super(MaintenanceRequest, self).create(vals) if request.owner_user_id or request.user_id: request._add_followers() if request.equipment_id and not request.maintenance_team_id: request.maintenance_team_id = request.equipment_id.maintenance_team_id request.activity_update() return request def write(self, vals): # Overridden to reset the kanban_state to normal whenever # the stage (stage_id) of the Maintenance Request changes. if vals and 'kanban_state' not in vals and 'stage_id' in vals: vals['kanban_state'] = 'normal' res = super(MaintenanceRequest, self).write(vals) if vals.get('owner_user_id') or vals.get('user_id'): self._add_followers() if 'stage_id' in vals: self.filtered(lambda m: m.stage_id.done).write( {'close_date': fields.Date.today()}) self.activity_feedback( ['maintenance.mail_act_maintenance_request']) if vals.get('user_id') or vals.get('schedule_date'): self.activity_update() if vals.get('equipment_id'): # need to change description of activity also so unlink old and create new activity self.activity_unlink(['maintenance.mail_act_maintenance_request']) self.activity_update() return res def activity_update(self): """ Update maintenance activities based on current record set state. It reschedule, unlink or create maintenance request activities. """ self.filtered( lambda request: not request.schedule_date).activity_unlink( ['maintenance.mail_act_maintenance_request']) for request in self.filtered(lambda request: request.schedule_date): date_dl = fields.Datetime.from_string(request.schedule_date).date() updated = request.activity_reschedule( ['maintenance.mail_act_maintenance_request'], date_deadline=date_dl, new_user_id=request.user_id.id or request.owner_user_id.id or self.env.uid) if not updated: if request.equipment_id: note = _( 'Request planned for <a href="#" data-oe-model="%s" data-oe-id="%s">%s</a>' ) % (request.equipment_id._name, request.equipment_id.id, request.equipment_id.display_name) else: note = False request.activity_schedule( 'maintenance.mail_act_maintenance_request', fields.Datetime.from_string(request.schedule_date).date(), note=note, user_id=request.user_id.id or request.owner_user_id.id or self.env.uid) def _add_followers(self): for request in self: partner_ids = (request.owner_user_id.partner_id + request.user_id.partner_id).ids request.message_subscribe(partner_ids=partner_ids) @api.model def _read_group_stage_ids(self, stages, domain, order): """ Read group customization in order to display all the stages in the kanban view, even if they are empty """ stage_ids = stages._search([], order=order, access_rights_uid=SUPERUSER_ID) return stages.browse(stage_ids)
class UtmCampaign(models.Model): _inherit = 'utm.campaign' _description = 'UTM Campaign' quotation_count = fields.Integer('Quotation Count', groups='sales_team.group_sale_salesman', compute="_compute_quotation_count") invoiced_amount = fields.Integer( default=0, compute="_compute_sale_invoiced_amount", string="Revenues generated by the campaign") company_id = fields.Many2one('res.company', string='Company', readonly=True, states={ 'draft': [('readonly', False)], 'refused': [('readonly', False)] }, default=lambda self: self.env.company) currency_id = fields.Many2one('res.currency', related='company_id.currency_id', string='Currency') def _compute_quotation_count(self): quotation_data = self.env['sale.order'].read_group( [('campaign_id', 'in', self.ids)], ['campaign_id'], ['campaign_id']) data_map = { datum['campaign_id'][0]: datum['campaign_id_count'] for datum in quotation_data } for campaign in self: campaign.quotation_count = data_map.get(campaign.id, 0) def _compute_sale_invoiced_amount(self): self.env['account.move.line'].flush( ['balance', 'move_id', 'account_id', 'exclude_from_invoice_tab']) self.env['account.move'].flush(['state', 'campaign_id', 'type']) query = """SELECT move.campaign_id, -SUM(line.balance) as price_subtotal FROM account_move_line line INNER JOIN account_move move ON line.move_id = move.id WHERE move.state not in ('draft', 'cancel') AND move.campaign_id IN %s AND move.type IN ('out_invoice', 'out_refund', 'in_invoice', 'in_refund', 'out_receipt', 'in_receipt') AND line.account_id IS NOT NULL AND NOT line.exclude_from_invoice_tab GROUP BY move.campaign_id """ self._cr.execute(query, [tuple(self.ids)]) query_res = self._cr.dictfetchall() campaigns = self.browse() for datum in query_res: campaign = self.browse(datum['campaign_id']) campaign.invoiced_amount = datum['price_subtotal'] campaigns |= campaign for campaign in (self - campaigns): campaign.invoiced_amount = 0 def action_redirect_to_quotations(self): action = self.env.ref( 'sale.action_quotations_with_onboarding').read()[0] action['domain'] = [('campaign_id', '=', self.id)] action['context'] = {'default_campaign_id': self.id} return action def action_redirect_to_invoiced(self): action = self.env.ref('account.action_move_journal_line').read()[0] invoices = self.env['account.move'].search([('campaign_id', '=', self.id)]) action['context'] = { 'create': False, 'edit': False, 'view_no_maturity': True } action['domain'] = [('id', 'in', invoices.ids), ('type', 'in', ('out_invoice', 'out_refund', 'in_invoice', 'in_refund', 'out_receipt', 'in_receipt')), ('state', 'not in', ['draft', 'cancel'])] return action
class UtmCampaign(models.Model): _inherit = 'utm.campaign' mailing_mail_ids = fields.One2many('mailing.mailing', 'campaign_id', domain=[('mailing_type', '=', 'mail')], string='Mass Mailings') mailing_mail_count = fields.Integer('Number of Mass Mailing', compute="_compute_mailing_mail_count") mailing_clicks_ratio = fields.Integer( compute="_compute_mailing_clicks_ratio", string="Number of clicks") mailing_items = fields.Integer(compute="_compute_mailing_items", string='Mailings') mailing_clicked = fields.Integer(compute="_compute_mailing_items", string='Mailings Clicked') # stat fields total = fields.Integer(compute="_compute_statistics") scheduled = fields.Integer(compute="_compute_statistics") failed = fields.Integer(compute="_compute_statistics") ignored = fields.Integer(compute="_compute_statistics") sent = fields.Integer(compute="_compute_statistics", string="Sent Emails") delivered = fields.Integer(compute="_compute_statistics") opened = fields.Integer(compute="_compute_statistics") replied = fields.Integer(compute="_compute_statistics") bounced = fields.Integer(compute="_compute_statistics") received_ratio = fields.Integer(compute="_compute_statistics", string='Received Ratio') opened_ratio = fields.Integer(compute="_compute_statistics", string='Opened Ratio') replied_ratio = fields.Integer(compute="_compute_statistics", string='Replied Ratio') bounced_ratio = fields.Integer(compute="_compute_statistics", string='Bounced Ratio') @api.depends('mailing_mail_ids') def _compute_mailing_mail_count(self): for campaign in self: campaign.mailing_mail_count = len(campaign.mailing_mail_ids) def _compute_mailing_items(self): query = """SELECT trace.campaign_id AS campaign_id, COUNT(DISTINCT(trace.id)) AS items_total, COUNT(DISTINCT(click.mailing_trace_id)) AS clicked_total FROM mailing_trace AS trace LEFT OUTER JOIN link_tracker_click as click ON click.mailing_trace_id = trace.id WHERE trace.campaign_id IN %s GROUP BY trace.campaign_id """ params = [tuple(self.ids)] self.env.cr.execute(query, params) clicked_data = self.env.cr.dictfetchall() mapped_data = { datum['campaign_id']: { 'clicked_total': datum['clicked_total'], 'items_total': datum['items_total'] } for datum in clicked_data } for campaign in self: campaign_items_values = mapped_data.get(campaign.id, {}) campaign.mailing_items = campaign_items_values.get( 'items_total', 0) campaign.mailing_clicked = campaign_items_values.get( 'clicked_total', 0) @api.depends('mailing_items', 'mailing_clicked') def _compute_mailing_clicks_ratio(self): for campaign in self: campaign.mailing_clicks_ratio = campaign.mailing_clicked / campaign.mailing_items * 100 if campaign.mailing_items > 0 else 0 def _compute_statistics(self): """ Compute statistics of the mass mailing campaign """ self.env.cr.execute( """ SELECT c.id as campaign_id, COUNT(s.id) AS total, COUNT(CASE WHEN s.sent is not null THEN 1 ELSE null END) AS sent, COUNT(CASE WHEN s.scheduled is not null AND s.sent is null AND s.exception is null AND s.ignored is null THEN 1 ELSE null END) AS scheduled, COUNT(CASE WHEN s.scheduled is not null AND s.sent is null AND s.exception is not null THEN 1 ELSE null END) AS failed, COUNT(CASE WHEN s.scheduled is not null AND s.sent is null AND s.exception is null AND s.ignored is not null THEN 1 ELSE null END) AS ignored, COUNT(CASE WHEN s.id is not null AND s.bounced is null THEN 1 ELSE null END) AS delivered, COUNT(CASE WHEN s.opened is not null THEN 1 ELSE null END) AS opened, COUNT(CASE WHEN s.replied is not null THEN 1 ELSE null END) AS replied , COUNT(CASE WHEN s.bounced is not null THEN 1 ELSE null END) AS bounced FROM mailing_trace s RIGHT JOIN utm_campaign c ON (c.id = s.campaign_id) WHERE c.id IN %s GROUP BY c.id """, (tuple(self.ids), )) for row in self.env.cr.dictfetchall(): total = (row['total'] - row['ignored']) or 1 row['delivered'] = row['sent'] - row['bounced'] row['received_ratio'] = 100.0 * row['delivered'] / total row['opened_ratio'] = 100.0 * row['opened'] / total row['replied_ratio'] = 100.0 * row['replied'] / total row['bounced_ratio'] = 100.0 * row['bounced'] / total self.browse(row.pop('campaign_id')).update(row) def _get_mailing_recipients(self, model=None): """Return the recipients of a mailing campaign. This is based on the statistics build for each mailing. """ res = dict.fromkeys(self.ids, {}) for campaign in self: domain = [('campaign_id', '=', campaign.id)] if model: domain += [('model', '=', model)] res[campaign.id] = set( self.env['mailing.trace'].search(domain).mapped('res_id')) return res
class AnotherUnit(models.Model): _name = 'test.another_unit' _description = 'Another Test Unit' val1 = fields.Integer('Value 1', required=True)
class MailComposer(models.TransientModel): """ Generic message composition wizard. You may inherit from this wizard at model and view levels to provide specific features. The behavior of the wizard depends on the composition_mode field: - 'comment': post on a record. The wizard is pre-populated via ``get_record_data`` - 'mass_mail': wizard in mass mailing mode where the mail details can contain template placeholders that will be merged with actual data before being sent to each recipient. """ _name = 'mail.compose.message' _description = 'Email composition wizard' _log_access = True _batch_size = 500 @api.model def default_get(self, fields): """ Handle composition mode. Some details about context keys: - comment: default mode, model and ID of a record the user comments - default_model or active_model - default_res_id or active_id - reply: active_id of a message the user replies to - default_parent_id or message_id or active_id: ID of the mail.message we reply to - message.res_model or default_model - message.res_id or default_res_id - mass_mail: model and IDs of records the user mass-mails - active_ids: record IDs - default_model or active_model """ result = super(MailComposer, self).default_get(fields) # author if 'author_id' not in result: result['author_id'] = self.env.user.partner_id.id if 'email_from' not in result and self.env.user.email: result['email_from'] = self.env.user.email_formatted elif 'email_from' not in result: author = self.env['res.partner'].browse(result['author_id']) if author.email: result['email_from'] = tools.formataddr( (author.name, author.email)) # v6.1 compatibility mode result['composition_mode'] = result.get( 'composition_mode', self._context.get('mail.compose.message.mode', 'comment')) result['model'] = result.get('model', self._context.get('active_model')) result['res_id'] = result.get('res_id', self._context.get('active_id')) result['parent_id'] = result.get('parent_id', self._context.get('message_id')) if 'no_auto_thread' not in result and ( result['model'] not in self.env or not hasattr(self.env[result['model']], 'message_post')): result['no_auto_thread'] = True # default values according to composition mode - NOTE: reply is deprecated, fall back on comment if result['composition_mode'] == 'reply': result['composition_mode'] = 'comment' vals = {} if 'active_domain' in self._context: # not context.get() because we want to keep global [] domains vals['active_domain'] = '%s' % self._context.get('active_domain') if result['composition_mode'] == 'comment': vals.update(self.get_record_data(result)) for field in vals: if field in fields: result[field] = vals[field] # TDE HACK: as mailboxes used default_model='res.users' and default_res_id=uid # (because of lack of an accessible pid), creating a message on its own # profile may crash (res_users does not allow writing on it) # Posting on its own profile works (res_users redirect to res_partner) # but when creating the mail.message to create the mail.compose.message # access rights issues may rise # We therefore directly change the model and res_id if result['model'] == 'res.users' and result['res_id'] == self._uid: result['model'] = 'res.partner' result['res_id'] = self.env.user.partner_id.id if fields is not None: [ result.pop(field, None) for field in list(result) if field not in fields ] return result @api.model def _get_composition_mode_selection(self): return [('comment', 'Post on a document'), ('mass_mail', 'Email Mass Mailing'), ('mass_post', 'Post on Multiple Documents')] # content subject = fields.Char('Subject') body = fields.Html('Contents', default='', sanitize_style=True) parent_id = fields.Many2one('mail.message', 'Parent Message', index=True, ondelete='set null', help="Initial thread message.") attachment_ids = fields.Many2many( 'ir.attachment', 'mail_compose_message_ir_attachments_rel', 'wizard_id', 'attachment_id', 'Attachments') # origin email_from = fields.Char( 'From', help= "Email address of the sender. This field is set when no matching partner is found and replaces the author_id field in the chatter." ) author_id = fields.Many2one( 'res.partner', 'Author', index=True, help= "Author of the message. If not set, email_from may hold an email address that did not match any partner." ) # related document model = fields.Char('Related Document Model', index=True) res_id = fields.Integer('Related Document ID', index=True) record_name = fields.Char('Message Record Name', help="Name get of the related document.") # characteristics message_type = fields.Selection( [('comment', 'Comment'), ('notification', 'System notification')], 'Type', required=True, default='comment', help="Message type: email for email message, notification for system " "message, comment for other messages such as user replies") subtype_id = fields.Many2one('mail.message.subtype', 'Subtype', ondelete='set null', index=True, default=lambda self: self.env['ir.model.data'] .xmlid_to_res_id('mail.mt_comment')) mail_activity_type_id = fields.Many2one('mail.activity.type', 'Mail Activity Type', index=True, ondelete='set null') # destination composition_mode = fields.Selection( selection=_get_composition_mode_selection, string='Composition mode', default='comment') reply_to = fields.Char( 'Reply-To', help= 'Reply email address. Setting the reply_to bypasses the automatic thread creation.' ) no_auto_thread = fields.Boolean( 'No threading for answers', help= 'Answers do not go in the original document discussion thread. This has an impact on the generated message-id.' ) is_log = fields.Boolean( 'Log an Internal Note', help='Whether the message is an internal note (comment mode only)') partner_ids = fields.Many2many('res.partner', 'mail_compose_message_res_partner_rel', 'wizard_id', 'partner_id', 'Additional Contacts') use_active_domain = fields.Boolean('Use active domain') active_domain = fields.Text('Active domain', readonly=True) # mass mode options notify = fields.Boolean( 'Notify followers', help='Notify followers of the document (mass post only)') auto_delete = fields.Boolean('Delete Emails', help='Delete sent emails (mass mailing only)') auto_delete_message = fields.Boolean( 'Delete Message Copy', help= 'Do not keep a copy of the email in the document communication history (mass mailing only)' ) template_id = fields.Many2one('mail.template', 'Use template', index=True, domain="[('model', '=', model)]") # technical stuff mail_server_id = fields.Many2one('ir.mail_server', 'Outgoing mail server') layout = fields.Char('Layout', copy=False) # xml id of layout add_sign = fields.Boolean(default=True) @api.model def get_record_data(self, values): """ Returns a defaults-like dict with initial values for the composition wizard when sending an email related a previous email (parent_id) or a document (model, res_id). This is based on previously computed default values. """ result, subject = {}, False if values.get('parent_id'): parent = self.env['mail.message'].browse(values.get('parent_id')) result['record_name'] = parent.record_name, subject = tools.ustr(parent.subject or parent.record_name or '') if not values.get('model'): result['model'] = parent.model if not values.get('res_id'): result['res_id'] = parent.res_id partner_ids = values.get('partner_ids', list()) + parent.partner_ids.ids result['partner_ids'] = partner_ids elif values.get('model') and values.get('res_id'): doc_name_get = self.env[values.get('model')].browse( values.get('res_id')).name_get() result['record_name'] = doc_name_get and doc_name_get[0][1] or '' subject = tools.ustr(result['record_name']) re_prefix = _('Re:') if subject and not (subject.startswith('Re:') or subject.startswith(re_prefix)): subject = "%s %s" % (re_prefix, subject) result['subject'] = subject return result #------------------------------------------------------ # Wizard validation and send #------------------------------------------------------ # action buttons call with positionnal arguments only, so we need an intermediary function # to ensure the context is passed correctly def action_send_mail(self): self.send_mail() return {'type': 'ir.actions.act_window_close', 'infos': 'mail_sent'} def send_mail(self, auto_commit=False): """ Process the wizard content and proceed with sending the related email(s), rendering any template patterns on the fly if needed. """ notif_layout = self._context.get('custom_layout') # Several custom layouts make use of the model description at rendering, e.g. in the # 'View <document>' button. Some models are used for different business concepts, such as # 'purchase.order' which is used for a RFQ and and PO. To avoid confusion, we must use a # different wording depending on the state of the object. # Therefore, we can set the description in the context from the beginning to avoid falling # back on the regular display_name retrieved in '_notify_prepare_template_context'. model_description = self._context.get('model_description') for wizard in self: # Duplicate attachments linked to the email.template. # Indeed, basic mail.compose.message wizard duplicates attachments in mass # mailing mode. But in 'single post' mode, attachments of an email template # also have to be duplicated to avoid changing their ownership. if wizard.attachment_ids and wizard.composition_mode != 'mass_mail' and wizard.template_id: new_attachment_ids = [] for attachment in wizard.attachment_ids: if attachment in wizard.template_id.attachment_ids: new_attachment_ids.append( attachment.copy({ 'res_model': 'mail.compose.message', 'res_id': wizard.id }).id) else: new_attachment_ids.append(attachment.id) new_attachment_ids.reverse() wizard.write({'attachment_ids': [(6, 0, new_attachment_ids)]}) # Mass Mailing mass_mode = wizard.composition_mode in ('mass_mail', 'mass_post') Mail = self.env['mail.mail'] ActiveModel = self.env[wizard.model] if wizard.model and hasattr( self.env[wizard.model], 'message_post') else self.env['mail.thread'] if wizard.composition_mode == 'mass_post': # do not send emails directly but use the queue instead # add context key to avoid subscribing the author ActiveModel = ActiveModel.with_context( mail_notify_force_send=False, mail_create_nosubscribe=True) # wizard works in batch mode: [res_id] or active_ids or active_domain if mass_mode and wizard.use_active_domain and wizard.model: res_ids = self.env[wizard.model].search( safe_eval(wizard.active_domain)).ids elif mass_mode and wizard.model and self._context.get( 'active_ids'): res_ids = self._context['active_ids'] else: res_ids = [wizard.res_id] batch_size = int(self.env['ir.config_parameter'].sudo().get_param( 'mail.batch_size')) or self._batch_size sliced_res_ids = [ res_ids[i:i + batch_size] for i in range(0, len(res_ids), batch_size) ] if wizard.composition_mode == 'mass_mail' or wizard.is_log or ( wizard.composition_mode == 'mass_post' and not wizard.notify): # log a note: subtype is False subtype_id = False elif wizard.subtype_id: subtype_id = wizard.subtype_id.id else: subtype_id = self.env['ir.model.data'].xmlid_to_res_id( 'mail.mt_comment') for res_ids in sliced_res_ids: batch_mails = Mail all_mail_values = wizard.get_mail_values(res_ids) for res_id, mail_values in all_mail_values.items(): if wizard.composition_mode == 'mass_mail': batch_mails |= Mail.create(mail_values) else: post_params = dict( message_type=wizard.message_type, subtype_id=subtype_id, email_layout_xmlid=notif_layout, add_sign=not bool(wizard.template_id), mail_auto_delete=wizard.template_id.auto_delete if wizard.template_id else False, model_description=model_description) post_params.update(mail_values) if ActiveModel._name == 'mail.thread': if wizard.model: post_params['model'] = wizard.model post_params['res_id'] = res_id if not ActiveModel.message_notify(**post_params): # if message_notify returns an empty record set, no recipients where found. raise UserError(_("No recipient found.")) else: ActiveModel.browse(res_id).message_post( **post_params) if wizard.composition_mode == 'mass_mail': batch_mails.send(auto_commit=auto_commit) def get_mail_values(self, res_ids): """Generate the values that will be used by send_mail to create mail_messages or mail_mails. """ self.ensure_one() results = dict.fromkeys(res_ids, False) rendered_values = {} mass_mail_mode = self.composition_mode == 'mass_mail' # render all template-based value at once if mass_mail_mode and self.model: rendered_values = self.render_message(res_ids) # compute alias-based reply-to in batch reply_to_value = dict.fromkeys(res_ids, None) if mass_mail_mode and not self.no_auto_thread: records = self.env[self.model].browse(res_ids) reply_to_value = self.env[ 'mail.thread']._notify_get_reply_to_on_records( default=self.email_from, records=records) blacklisted_rec_ids = [] if mass_mail_mode and issubclass(type(self.env[self.model]), self.pool['mail.thread.blacklist']): BL_sudo = self.env['mail.blacklist'].sudo() blacklist = set(BL_sudo.search([]).mapped('email')) if blacklist: targets = self.env[self.model].browse(res_ids).read( ['email_normalized']) # First extract email from recipient before comparing with blacklist blacklisted_rec_ids.extend([ target['id'] for target in targets if target['email_normalized'] and target['email_normalized'] in blacklist ]) for res_id in res_ids: # static wizard (mail.message) values mail_values = { 'subject': self.subject, 'body': self.body or '', 'parent_id': self.parent_id and self.parent_id.id, 'partner_ids': [partner.id for partner in self.partner_ids], 'attachment_ids': [attach.id for attach in self.attachment_ids], 'author_id': self.author_id.id, 'email_from': self.email_from, 'record_name': self.record_name, 'no_auto_thread': self.no_auto_thread, 'mail_server_id': self.mail_server_id.id, 'mail_activity_type_id': self.mail_activity_type_id.id, } # mass mailing: rendering override wizard static values if mass_mail_mode and self.model: record = self.env[self.model].browse(res_id) mail_values['headers'] = record._notify_email_headers() # keep a copy unless specifically requested, reset record name (avoid browsing records) mail_values.update(notification=not self.auto_delete_message, model=self.model, res_id=res_id, record_name=False) # auto deletion of mail_mail if self.auto_delete or self.template_id.auto_delete: mail_values['auto_delete'] = True # rendered values using template email_dict = rendered_values[res_id] mail_values['partner_ids'] += email_dict.pop('partner_ids', []) mail_values.update(email_dict) if not self.no_auto_thread: mail_values.pop('reply_to') if reply_to_value.get(res_id): mail_values['reply_to'] = reply_to_value[res_id] if self.no_auto_thread and not mail_values.get('reply_to'): mail_values['reply_to'] = mail_values['email_from'] # mail_mail values: body -> body_html, partner_ids -> recipient_ids mail_values['body_html'] = mail_values.get('body', '') mail_values['recipient_ids'] = [ (4, id) for id in mail_values.pop('partner_ids', []) ] # process attachments: should not be encoded before being processed by message_post / mail_mail create mail_values['attachments'] = [ (name, base64.b64decode(enc_cont)) for name, enc_cont in email_dict.pop( 'attachments', list()) ] attachment_ids = [] for attach_id in mail_values.pop('attachment_ids'): new_attach_id = self.env['ir.attachment'].browse( attach_id).copy({ 'res_model': self._name, 'res_id': self.id }) attachment_ids.append(new_attach_id.id) attachment_ids.reverse() mail_values['attachment_ids'] = self.env[ 'mail.thread']._message_post_process_attachments( mail_values.pop('attachments', []), attachment_ids, { 'model': 'mail.message', 'res_id': 0 })['attachment_ids'] # Filter out the blacklisted records by setting the mail state to cancel -> Used for Mass Mailing stats if res_id in blacklisted_rec_ids: mail_values['state'] = 'cancel' # Do not post the mail into the recipient's chatter mail_values['notification'] = False results[res_id] = mail_values return results #------------------------------------------------------ # Template methods #------------------------------------------------------ @api.onchange('template_id') def onchange_template_id_wrapper(self): self.ensure_one() values = self.onchange_template_id(self.template_id.id, self.composition_mode, self.model, self.res_id)['value'] for fname, value in values.items(): setattr(self, fname, value) def onchange_template_id(self, template_id, composition_mode, model, res_id): """ - mass_mailing: we cannot render, so return the template values - normal mode: return rendered values /!\ for x2many field, this onchange return command instead of ids """ if template_id and composition_mode == 'mass_mail': template = self.env['mail.template'].browse(template_id) fields = [ 'subject', 'body_html', 'email_from', 'reply_to', 'mail_server_id' ] values = dict((field, getattr(template, field)) for field in fields if getattr(template, field)) if template.attachment_ids: values['attachment_ids'] = [ att.id for att in template.attachment_ids ] if template.mail_server_id: values['mail_server_id'] = template.mail_server_id.id if template.user_signature and 'body_html' in values: signature = self.env.user.signature values['body_html'] = tools.append_content_to_html( values['body_html'], signature, plaintext=False) elif template_id: values = self.generate_email_for_composer(template_id, [res_id])[res_id] # transform attachments into attachment_ids; not attached to the document because this will # be done further in the posting process, allowing to clean database if email not send attachment_ids = [] Attachment = self.env['ir.attachment'] for attach_fname, attach_datas in values.pop('attachments', []): data_attach = { 'name': attach_fname, 'datas': attach_datas, 'res_model': 'mail.compose.message', 'res_id': 0, 'type': 'binary', # override default_type from context, possibly meant for another model! } attachment_ids.append(Attachment.create(data_attach).id) if values.get('attachment_ids', []) or attachment_ids: values['attachment_ids'] = [ (6, 0, values.get('attachment_ids', []) + attachment_ids) ] else: default_values = self.with_context( default_composition_mode=composition_mode, default_model=model, default_res_id=res_id).default_get([ 'composition_mode', 'model', 'res_id', 'parent_id', 'partner_ids', 'subject', 'body', 'email_from', 'reply_to', 'attachment_ids', 'mail_server_id' ]) values = dict((key, default_values[key]) for key in [ 'subject', 'body', 'partner_ids', 'email_from', 'reply_to', 'attachment_ids', 'mail_server_id' ] if key in default_values) if values.get('body_html'): values['body'] = values.pop('body_html') # This onchange should return command instead of ids for x2many field. values = self._convert_to_write(values) return {'value': values} def save_as_template(self): """ hit save as template button: current form value will be a new template attached to the current document. """ for record in self: model = self.env['ir.model']._get(record.model or 'mail.message') model_name = model.name or '' template_name = "%s: %s" % (model_name, tools.ustr(record.subject)) values = { 'name': template_name, 'subject': record.subject or False, 'body_html': record.body or False, 'model_id': model.id or False, 'attachment_ids': [(6, 0, [att.id for att in record.attachment_ids])], } template = self.env['mail.template'].create(values) # generate the saved template record.write({'template_id': template.id}) record.onchange_template_id_wrapper() return _reopen(self, record.id, record.model, context=self._context) #------------------------------------------------------ # Template rendering #------------------------------------------------------ def render_message(self, res_ids): """Generate template-based values of wizard, for the document records given by res_ids. This method is meant to be inherited by email_template that will produce a more complete dictionary, using Jinja2 templates. Each template is generated for all res_ids, allowing to parse the template once, and render it multiple times. This is useful for mass mailing where template rendering represent a significant part of the process. Default recipients are also computed, based on mail_thread method _message_get_default_recipients. This allows to ensure a mass mailing has always some recipients specified. :param browse wizard: current mail.compose.message browse record :param list res_ids: list of record ids :return dict results: for each res_id, the generated template values for subject, body, email_from and reply_to """ self.ensure_one() multi_mode = True if isinstance(res_ids, int): multi_mode = False res_ids = [res_ids] subjects = self.env['mail.template']._render_template( self.subject, self.model, res_ids) bodies = self.env['mail.template']._render_template(self.body, self.model, res_ids, post_process=True) emails_from = self.env['mail.template']._render_template( self.email_from, self.model, res_ids) replies_to = self.env['mail.template']._render_template( self.reply_to, self.model, res_ids) default_recipients = {} if not self.partner_ids: records = self.env[self.model].browse(res_ids).sudo() default_recipients = self.env[ 'mail.thread']._message_get_default_recipients_on_records( records) results = dict.fromkeys(res_ids, False) for res_id in res_ids: results[res_id] = { 'subject': subjects[res_id], 'body': bodies[res_id], 'email_from': emails_from[res_id], 'reply_to': replies_to[res_id], } results[res_id].update(default_recipients.get(res_id, dict())) # generate template-based values if self.template_id: template_values = self.generate_email_for_composer( self.template_id.id, res_ids, fields=[ 'email_to', 'partner_to', 'email_cc', 'attachment_ids', 'mail_server_id' ]) else: template_values = {} for res_id in res_ids: if template_values.get(res_id): # recipients are managed by the template results[res_id].pop('partner_ids', None) results[res_id].pop('email_to', None) results[res_id].pop('email_cc', None) # remove attachments from template values as they should not be rendered template_values[res_id].pop('attachment_ids', None) else: template_values[res_id] = dict() # update template values by composer values template_values[res_id].update(results[res_id]) return multi_mode and template_values or template_values[res_ids[0]] @api.model def generate_email_for_composer(self, template_id, res_ids, fields=None): """ Call email_template.generate_email(), get fields relevant for mail.compose.message, transform email_cc and email_to into partner_ids """ multi_mode = True if isinstance(res_ids, int): multi_mode = False res_ids = [res_ids] if fields is None: fields = [ 'subject', 'body_html', 'email_from', 'email_to', 'partner_to', 'email_cc', 'reply_to', 'attachment_ids', 'mail_server_id' ] returned_fields = fields + ['partner_ids', 'attachments'] values = dict.fromkeys(res_ids, False) template_values = self.env['mail.template'].with_context( tpl_partners_only=True).browse(template_id).generate_email( res_ids, fields=fields) for res_id in res_ids: res_id_values = dict((field, template_values[res_id][field]) for field in returned_fields if template_values[res_id].get(field)) res_id_values['body'] = res_id_values.pop('body_html', '') values[res_id] = res_id_values return multi_mode and values or values[res_ids[0]]
class SaleOrder(models.Model): _inherit = 'sale.order' purchase_order_count = fields.Integer( "Number of Purchase Order", compute='_compute_purchase_order_count', groups='purchase.group_purchase_user') @api.depends('order_line.purchase_line_ids') def _compute_purchase_order_count(self): purchase_line_data = self.env['purchase.order.line'].read_group( [('sale_order_id', 'in', self.ids)], ['sale_order_id', 'purchase_order_count:count_distinct(order_id)'], ['sale_order_id']) purchase_count_map = { item['sale_order_id'][0]: item['purchase_order_count'] for item in purchase_line_data } for order in self: order.purchase_order_count = purchase_count_map.get(order.id, 0) def _action_confirm(self): result = super(SaleOrder, self)._action_confirm() for order in self: order.order_line.sudo()._purchase_service_generation() return result def action_cancel(self): result = super(SaleOrder, self).action_cancel() # When a sale person cancel a SO, he might not have the rights to write # on PO. But we need the system to create an activity on the PO (so 'write' # access), hence the `sudo`. self.sudo()._activity_cancel_on_purchase() return result def action_view_purchase(self): action = self.env.ref('purchase.purchase_rfq').read()[0] action['domain'] = [ ('id', 'in', self.mapped('order_line.purchase_line_ids.order_id').ids) ] return action def _activity_cancel_on_purchase(self): """ If some SO are cancelled, we need to put an activity on their generated purchase. If sale lines of different sale orders impact different purchase, we only want one activity to be attached. """ purchase_to_notify_map = { } # map PO -> recordset of SOL as {purchase.order: set(sale.orde.liner)} purchase_order_lines = self.env['purchase.order.line'].search([ ('sale_line_id', 'in', self.mapped('order_line').ids), ('state', '!=', 'cancel') ]) for purchase_line in purchase_order_lines: purchase_to_notify_map.setdefault(purchase_line.order_id, self.env['sale.order.line']) purchase_to_notify_map[ purchase_line.order_id] |= purchase_line.sale_line_id for purchase_order, sale_order_lines in purchase_to_notify_map.items(): purchase_order.activity_schedule_with_view( 'mail.mail_activity_data_warning', user_id=purchase_order.user_id.id or self.env.uid, views_or_xmlid= 'sale_purchase.exception_purchase_on_sale_cancellation', render_context={ 'sale_orders': sale_order_lines.mapped('order_id'), 'sale_order_lines': sale_order_lines, })
class RatingMixin(models.AbstractModel): _name = 'rating.mixin' _description = "Rating Mixin" rating_ids = fields.One2many('rating.rating', 'res_id', string='Rating', domain=lambda self: [('res_model', '=', self._name)], auto_join=True) rating_last_value = fields.Float('Rating Last Value', compute='_compute_rating_last_value', compute_sudo=True, store=True) rating_last_feedback = fields.Text('Rating Last Feedback', related='rating_ids.feedback') rating_last_image = fields.Binary('Rating Last Image', related='rating_ids.rating_image') rating_count = fields.Integer('Rating count', compute="_compute_rating_stats") rating_avg = fields.Float("Rating Average", compute='_compute_rating_stats') @api.depends('rating_ids.rating') def _compute_rating_last_value(self): for record in self: ratings = self.env['rating.rating'].search([('res_model', '=', self._name), ('res_id', '=', record.id)], limit=1) record.rating_last_value = ratings and ratings.rating or 0 @api.depends('rating_ids') def _compute_rating_stats(self): """ Compute avg and count in one query, as thoses fields will be used together most of the time. """ domain = expression.AND([self._rating_domain(), [('rating', '>=', RATING_LIMIT_MIN)]]) read_group_res = self.env['rating.rating'].read_group(domain, ['rating:avg'], groupby=['res_id'], lazy=False) # force average on rating column mapping = {item['res_id']: {'rating_count': item['__count'], 'rating_avg': item['rating']} for item in read_group_res} for record in self: record.rating_count = mapping.get(record.id, {}).get('rating_count', 0) record.rating_avg = mapping.get(record.id, {}).get('rating_avg', 0) def write(self, values): """ If the rated ressource name is modified, we should update the rating res_name too. If the rated ressource parent is changed we should update the parent_res_id too""" with self.env.norecompute(): result = super(RatingMixin, self).write(values) for record in self: if record._rec_name in values: # set the res_name of ratings to be recomputed res_name_field = self.env['rating.rating']._fields['res_name'] self.env.add_to_compute(res_name_field, record.rating_ids) if record._rating_get_parent_field_name() in values: record.rating_ids.write({'parent_res_id': record[record._rating_get_parent_field_name()].id}) return result def unlink(self): """ When removing a record, its rating should be deleted too. """ record_ids = self.ids result = super(RatingMixin, self).unlink() self.env['rating.rating'].sudo().search([('res_model', '=', self._name), ('res_id', 'in', record_ids)]).unlink() return result def _rating_get_parent_field_name(self): """Return the parent relation field name Should return a Many2One""" return None def _rating_domain(self): """ Returns a normalized domain on rating.rating to select the records to include in count, avg, ... computation of current model. """ return ['&', '&', ('res_model', '=', self._name), ('res_id', 'in', self.ids), ('consumed', '=', True)] def rating_get_partner_id(self): if hasattr(self, 'partner_id') and self.partner_id: return self.partner_id return self.env['res.partner'] def rating_get_rated_partner_id(self): if hasattr(self, 'user_id') and self.user_id.partner_id: return self.user_id.partner_id return self.env['res.partner'] def rating_get_access_token(self, partner=None): if not partner: partner = self.rating_get_partner_id() rated_partner = self.rating_get_rated_partner_id() ratings = self.rating_ids.filtered(lambda x: x.partner_id.id == partner.id and not x.consumed) if not ratings: record_model_id = self.env['ir.model'].sudo().search([('model', '=', self._name)], limit=1).id rating = self.env['rating.rating'].create({ 'partner_id': partner.id, 'rated_partner_id': rated_partner.id, 'res_model_id': record_model_id, 'res_id': self.id }) else: rating = ratings[0] return rating.access_token def rating_send_request(self, template, lang=False, subtype_id=False, force_send=True, composition_mode='comment', notif_layout=None): """ This method send rating request by email, using a template given in parameter. :param template: a mail.template record used to compute the message body; :param lang: optional lang; it can also be specified directly on the template itself in the lang field; :param subtype_id: optional subtype to use when creating the message; is a note by default to avoid spamming followers; :param force_send: whether to send the request directly or use the mail queue cron (preferred option); :param composition_mode: comment (message_post) or mass_mail (template.send_mail); :param notif_layout: layout used to encapsulate the content when sending email; """ if lang: template = template.with_context(lang=lang) if subtype_id is False: subtype_id = self.env['ir.model.data'].xmlid_to_res_id('mail.mt_note') if force_send: self = self.with_context(mail_notify_force_send=True) # default value is True, should be set to false if not? for record in self: record.message_post_with_template( template.id, composition_mode=composition_mode, email_layout_xmlid=notif_layout if notif_layout is not None else 'mail.mail_notification_light', subtype_id=subtype_id ) def rating_apply(self, rate, token=None, feedback=None, subtype=None): """ Apply a rating given a token. If the current model inherits from mail.thread mixing, a message is posted on its chatter. :param rate : the rating value to apply :type rate : float :param token : access token :param feedback : additional feedback :type feedback : string :param subtype : subtype for mail :type subtype : string :returns rating.rating record """ Rating, rating = self.env['rating.rating'], None if token: rating = self.env['rating.rating'].search([('access_token', '=', token)], limit=1) else: rating = Rating.search([('res_model', '=', self._name), ('res_id', '=', self.ids[0])], limit=1) if rating: rating.write({'rating': rate, 'feedback': feedback, 'consumed': True}) if hasattr(self, 'message_post'): feedback = tools.plaintext2html(feedback or '') self.message_post( body="<img src='/rating/static/src/img/rating_%s.png' alt=':%s/10' style='width:18px;height:18px;float:left;margin-right: 5px;'/>%s" % (rate, rate, feedback), subtype=subtype or "mail.mt_comment", author_id=rating.partner_id and rating.partner_id.id or None # None will set the default author in mail_thread.py ) if hasattr(self, 'stage_id') and self.stage_id and hasattr(self.stage_id, 'auto_validation_kanban_state') and self.stage_id.auto_validation_kanban_state: if rating.rating > 5: self.write({'kanban_state': 'done'}) if rating.rating < 5: self.write({'kanban_state': 'blocked'}) return rating def rating_get_repartition(self, add_stats=False, domain=None): """ get the repatition of rating grade for the given res_ids. :param add_stats : flag to add stat to the result :type add_stats : boolean :param domain : optional extra domain of the rating to include/exclude in repartition :return dictionnary if not add_stats, the dict is like - key is the rating value (integer) - value is the number of object (res_model, res_id) having the value otherwise, key is the value of the information (string) : either stat name (avg, total, ...) or 'repartition' containing the same dict if add_stats was False. """ base_domain = expression.AND([self._rating_domain(), [('rating', '>=', 1)]]) if domain: base_domain += domain data = self.env['rating.rating'].read_group(base_domain, ['rating'], ['rating', 'res_id']) # init dict with all posible rate value, except 0 (no value for the rating) values = dict.fromkeys(range(1, 11), 0) values.update((d['rating'], d['rating_count']) for d in data) # add other stats if add_stats: rating_number = sum(values.values()) result = { 'repartition': values, 'avg': sum(float(key * values[key]) for key in values) / rating_number if rating_number > 0 else 0, 'total': sum(it['rating_count'] for it in data), } return result return values def rating_get_grades(self, domain=None): """ get the repatition of rating grade for the given res_ids. :param domain : optional domain of the rating to include/exclude in grades computation :return dictionnary where the key is the grade (great, okay, bad), and the value, the number of object (res_model, res_id) having the grade the grade are compute as 0-30% : Bad 31-69%: Okay 70-100%: Great """ data = self.rating_get_repartition(domain=domain) res = dict.fromkeys(['great', 'okay', 'bad'], 0) for key in data: if key >= RATING_LIMIT_SATISFIED: res['great'] += data[key] elif key > RATING_LIMIT_OK: res['okay'] += data[key] else: res['bad'] += data[key] return res def rating_get_stats(self, domain=None): """ get the statistics of the rating repatition :param domain : optional domain of the rating to include/exclude in statistic computation :return dictionnary where - key is the the name of the information (stat name) - value is statistic value : 'percent' contains the repartition in percentage, 'avg' is the average rate and 'total' is the number of rating """ data = self.rating_get_repartition(domain=domain, add_stats=True) result = { 'avg': data['avg'], 'total': data['total'], 'percent': dict.fromkeys(range(1, 11), 0), } for rate in data['repartition']: result['percent'][rate] = (data['repartition'][rate] * 100) / data['total'] if data['total'] > 0 else 0 return result
class ProductProduct(models.Model): _inherit = "product.product" variant_bom_ids = fields.One2many('mrp.bom', 'product_id', 'BOM Product Variants') bom_line_ids = fields.One2many('mrp.bom.line', 'product_id', 'BoM Components') bom_count = fields.Integer('# Bill of Material', compute='_compute_bom_count', compute_sudo=False) used_in_bom_count = fields.Integer('# BoM Where Used', compute='_compute_used_in_bom_count', compute_sudo=False) mrp_product_qty = fields.Float('Manufactured', compute='_compute_mrp_product_qty', compute_sudo=False) def _compute_bom_count(self): for product in self: product.bom_count = self.env['mrp.bom'].search_count([ '|', ('product_id', '=', product.id), '&', ('product_id', '=', False), ('product_tmpl_id', '=', product.product_tmpl_id.id) ]) def _compute_used_in_bom_count(self): for product in self: product.used_in_bom_count = self.env['mrp.bom'].search_count([ ('bom_line_ids.product_id', '=', product.id) ]) def get_components(self): """ Return the components list ids in case of kit product. Return the product itself otherwise""" self.ensure_one() bom_kit = self.env['mrp.bom']._bom_find(product=self, bom_type='phantom') if bom_kit: boms, bom_sub_lines = bom_kit.explode(self, 1) return [ bom_line.product_id.id for bom_line, data in bom_sub_lines if bom_line.product_id.type == 'product' ] else: return super(ProductProduct, self).get_components() def action_used_in_bom(self): self.ensure_one() action = self.env.ref('mrp.mrp_bom_form_action').read()[0] action['domain'] = [('bom_line_ids.product_id', '=', self.id)] return action def _compute_mrp_product_qty(self): date_from = fields.Datetime.to_string(fields.datetime.now() - timedelta(days=365)) #TODO: state = done? domain = [('state', '=', 'done'), ('product_id', 'in', self.ids), ('date_planned_start', '>', date_from)] read_group_res = self.env['mrp.production'].read_group( domain, ['product_id', 'product_uom_qty'], ['product_id']) mapped_data = dict([(data['product_id'][0], data['product_uom_qty']) for data in read_group_res]) for product in self: if not product.id: product.mrp_product_qty = 0.0 continue product.mrp_product_qty = float_round( mapped_data.get(product.id, 0), precision_rounding=product.uom_id.rounding) def _compute_quantities(self): """ When the product is a kit, this override computes the fields : - 'virtual_available' - 'qty_available' - 'incoming_qty' - 'outgoing_qty' - 'free_qty' """ self.virtual_available = 0 self.qty_available = 0 self.incoming_qty = 0 self.outgoing_qty = 0 self.free_qty = 0 bom_kits = { product: bom for product in self for bom in (self.env['mrp.bom']._bom_find(product=product, bom_type='phantom'), ) if bom } kits = self.filtered(lambda p: bom_kits.get(p)) super( ProductProduct, self.filtered(lambda p: p not in bom_kits))._compute_quantities() for product in bom_kits: boms, bom_sub_lines = bom_kits[product].explode(product, 1) ratios_virtual_available = [] ratios_qty_available = [] ratios_incoming_qty = [] ratios_outgoing_qty = [] ratios_free_qty = [] for bom_line, bom_line_data in bom_sub_lines: component = bom_line.product_id if component.type != 'product' or float_is_zero( bom_line_data['qty'], precision_rounding=bom_line.product_uom_id.rounding): # As BoMs allow components with 0 qty, a.k.a. optionnal components, we simply skip those # to avoid a division by zero. The same logic is applied to non-storable products as those # products have 0 qty available. continue uom_qty_per_kit = bom_line_data['qty'] / bom_line_data[ 'original_qty'] qty_per_kit = bom_line.product_uom_id._compute_quantity( uom_qty_per_kit, bom_line.product_id.uom_id) ratios_virtual_available.append(component.virtual_available / qty_per_kit) ratios_qty_available.append(component.qty_available / qty_per_kit) ratios_incoming_qty.append(component.incoming_qty / qty_per_kit) ratios_outgoing_qty.append(component.outgoing_qty / qty_per_kit) ratios_free_qty.append(component.free_qty / qty_per_kit) if bom_sub_lines and ratios_virtual_available: # Guard against all cnsumable bom: at least one ratio should be present. product.virtual_available = min(ratios_virtual_available) // 1 product.qty_available = min(ratios_qty_available) // 1 product.incoming_qty = min(ratios_incoming_qty) // 1 product.outgoing_qty = min(ratios_outgoing_qty) // 1 product.free_qty = min(ratios_free_qty) // 1 def action_view_bom(self): action = self.env.ref('mrp.product_open_bom').read()[0] template_ids = self.mapped('product_tmpl_id').ids # bom specific to this variant or global to template action['context'] = { 'default_product_tmpl_id': template_ids[0], 'default_product_id': self.ids[0], } action['domain'] = [ '|', ('product_id', 'in', self.ids), '&', ('product_id', '=', False), ('product_tmpl_id', 'in', template_ids) ] return action def action_view_mos(self): action = self.product_tmpl_id.action_view_mos() action['domain'] = [('state', '=', 'done'), ('product_id', 'in', self.ids)] return action
class ProductTemplate(models.Model): _inherit = "product.template" bom_line_ids = fields.One2many('mrp.bom.line', 'product_tmpl_id', 'BoM Components') bom_ids = fields.One2many('mrp.bom', 'product_tmpl_id', 'Bill of Materials') bom_count = fields.Integer('# Bill of Material', compute='_compute_bom_count', compute_sudo=False) used_in_bom_count = fields.Integer('# of BoM Where is Used', compute='_compute_used_in_bom_count', compute_sudo=False) mrp_product_qty = fields.Float('Manufactured', compute='_compute_mrp_product_qty', compute_sudo=False) produce_delay = fields.Float( 'Manufacturing Lead Time', default=0.0, help= "Average lead time in days to manufacture this product. In the case of multi-level BOM, the manufacturing lead times of the components will be added." ) def _compute_bom_count(self): for product in self: product.bom_count = self.env['mrp.bom'].search_count([ ('product_tmpl_id', '=', product.id) ]) def _compute_used_in_bom_count(self): for template in self: template.used_in_bom_count = self.env['mrp.bom'].search_count([ ('bom_line_ids.product_id', 'in', template.product_variant_ids.ids) ]) def action_used_in_bom(self): self.ensure_one() action = self.env.ref('mrp.mrp_bom_form_action').read()[0] action['domain'] = [('bom_line_ids.product_id', 'in', self.product_variant_ids.ids)] return action def _compute_mrp_product_qty(self): for template in self: template.mrp_product_qty = float_round( sum( template.mapped('product_variant_ids').mapped( 'mrp_product_qty')), precision_rounding=template.uom_id.rounding) def action_view_mos(self): action = self.env.ref('mrp.mrp_production_report').read()[0] action['domain'] = [('state', '=', 'done'), ('product_tmpl_id', 'in', self.ids)] action['context'] = { 'graph_measure': 'product_uom_qty', 'time_ranges': { 'field': 'date_planned_start', 'range': 'last_365_days' } } return action
class SurveyUserInputLine(models.Model): _name = 'survey.user_input_line' _description = 'Survey User Input Line' _rec_name = 'user_input_id' _order = 'question_sequence,id' # survey data user_input_id = fields.Many2one('survey.user_input', string='User Input', ondelete='cascade', required=True) survey_id = fields.Many2one(related='user_input_id.survey_id', string='Survey', store=True, readonly=False) question_id = fields.Many2one('survey.question', string='Question', ondelete='cascade', required=True) page_id = fields.Many2one(related='question_id.page_id', string="Section", readonly=False) question_sequence = fields.Integer('Sequence', related='question_id.sequence', store=True) # answer skipped = fields.Boolean('Skipped') answer_type = fields.Selection([('text', 'Text'), ('number', 'Number'), ('date', 'Date'), ('datetime', 'Datetime'), ('free_text', 'Free Text'), ('suggestion', 'Suggestion')], string='Answer Type') value_text = fields.Char('Text answer') value_number = fields.Float('Numerical answer') value_date = fields.Date('Date answer') value_datetime = fields.Datetime('Datetime answer') value_free_text = fields.Text('Free Text answer') value_suggested = fields.Many2one('survey.label', string="Suggested answer") value_suggested_row = fields.Many2one('survey.label', string="Row answer") answer_score = fields.Float('Score') answer_is_correct = fields.Boolean('Correct', compute='_compute_answer_is_correct') @api.depends('value_suggested', 'question_id') def _compute_answer_is_correct(self): for answer in self: if answer.value_suggested and answer.question_id.question_type in [ 'simple_choice', 'multiple_choice' ]: answer.answer_is_correct = answer.value_suggested.is_correct else: answer.answer_is_correct = False @api.constrains('skipped', 'answer_type') def _answered_or_skipped(self): for uil in self: if not uil.skipped != bool(uil.answer_type): raise ValidationError( _('This question cannot be unanswered or skipped.')) @api.constrains('answer_type') def _check_answer_type(self): for uil in self: fields_type = { 'text': bool(uil.value_text), 'number': (bool(uil.value_number) or uil.value_number == 0), 'date': bool(uil.value_date), 'free_text': bool(uil.value_free_text), 'suggestion': bool(uil.value_suggested) } if not fields_type.get(uil.answer_type, True): raise ValidationError( _('The answer must be in the right type')) @api.model_create_multi def create(self, vals_list): for vals in vals_list: value_suggested = vals.get('value_suggested') if value_suggested: vals.update({ 'answer_score': self.env['survey.label'].browse( int(value_suggested)).answer_score }) return super(SurveyUserInputLine, self).create(vals_list) def write(self, vals): value_suggested = vals.get('value_suggested') if value_suggested: vals.update({ 'answer_score': self.env['survey.label'].browse( int(value_suggested)).answer_score }) return super(SurveyUserInputLine, self).write(vals) @api.model def save_lines(self, user_input_id, question, post, answer_tag): """ Save answers to questions, depending on question type If an answer already exists for question and user_input_id, it will be overwritten (in order to maintain data consistency). """ try: saver = getattr(self, 'save_line_' + question.question_type) except AttributeError: _logger.error(question.question_type + ": This type of question has no saving function") return False else: saver(user_input_id, question, post, answer_tag) @api.model def save_line_free_text(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False, } if answer_tag in post and post[answer_tag].strip(): vals.update({ 'answer_type': 'free_text', 'value_free_text': post[answer_tag] }) else: vals.update({'answer_type': None, 'skipped': True}) old_uil = self.search([('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id)]) if old_uil: old_uil.write(vals) else: old_uil.create(vals) return True @api.model def save_line_textbox(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } if answer_tag in post and post[answer_tag].strip(): vals.update({ 'answer_type': 'text', 'value_text': post[answer_tag] }) else: vals.update({'answer_type': None, 'skipped': True}) old_uil = self.search([('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id)]) if old_uil: old_uil.write(vals) else: old_uil.create(vals) return True @api.model def save_line_numerical_box(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } if answer_tag in post and post[answer_tag].strip(): vals.update({ 'answer_type': 'number', 'value_number': float(post[answer_tag]) }) else: vals.update({'answer_type': None, 'skipped': True}) old_uil = self.search([('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id)]) if old_uil: old_uil.write(vals) else: old_uil.create(vals) return True @api.model def save_line_date(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } if answer_tag in post and post[answer_tag].strip(): vals.update({ 'answer_type': 'date', 'value_date': post[answer_tag] }) else: vals.update({'answer_type': None, 'skipped': True}) old_uil = self.search([('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id)]) if old_uil: old_uil.write(vals) else: old_uil.create(vals) return True @api.model def save_line_datetime(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } if answer_tag in post and post[answer_tag].strip(): vals.update({ 'answer_type': 'datetime', 'value_datetime': post[answer_tag] }) else: vals.update({'answer_type': None, 'skipped': True}) old_uil = self.search([('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id)]) if old_uil: old_uil.write(vals) else: old_uil.create(vals) return True @api.model def save_line_simple_choice(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } old_uil = self.search([('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id)]) old_uil.sudo().unlink() if answer_tag in post and post[answer_tag].strip(): vals.update({ 'answer_type': 'suggestion', 'value_suggested': int(post[answer_tag]) }) else: vals.update({'answer_type': None, 'skipped': True}) # '-1' indicates 'comment count as an answer' so do not need to record it if post.get(answer_tag) and post.get(answer_tag) != '-1': self.create(vals) comment_answer = post.pop(("%s_%s" % (answer_tag, 'comment')), '').strip() if comment_answer: vals.update({ 'answer_type': 'text', 'value_text': comment_answer, 'skipped': False, 'value_suggested': False }) self.create(vals) return True @api.model def save_line_multiple_choice(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } old_uil = self.search([('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id)]) old_uil.sudo().unlink() ca_dict = dict_keys_startswith(post, answer_tag + '_') comment_answer = ca_dict.pop(("%s_%s" % (answer_tag, 'comment')), '').strip() if len(ca_dict) > 0: for key in ca_dict: # '-1' indicates 'comment count as an answer' so do not need to record it if key != ('%s_%s' % (answer_tag, '-1')): val = ca_dict[key] vals.update({ 'answer_type': 'suggestion', 'value_suggested': bool(val) and int(val) }) self.create(vals) if comment_answer: vals.update({ 'answer_type': 'text', 'value_text': comment_answer, 'value_suggested': False }) self.create(vals) if not ca_dict and not comment_answer: vals.update({'answer_type': None, 'skipped': True}) self.create(vals) return True @api.model def save_line_matrix(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } old_uil = self.search([('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id)]) old_uil.sudo().unlink() no_answers = True ca_dict = dict_keys_startswith(post, answer_tag + '_') comment_answer = ca_dict.pop(("%s_%s" % (answer_tag, 'comment')), '').strip() if comment_answer: vals.update({'answer_type': 'text', 'value_text': comment_answer}) self.create(vals) no_answers = False if question.matrix_subtype == 'simple': for row in question.labels_ids_2: a_tag = "%s_%s" % (answer_tag, row.id) if a_tag in ca_dict: no_answers = False vals.update({ 'answer_type': 'suggestion', 'value_suggested': ca_dict[a_tag], 'value_suggested_row': row.id }) self.create(vals) elif question.matrix_subtype == 'multiple': for col in question.labels_ids: for row in question.labels_ids_2: a_tag = "%s_%s_%s" % (answer_tag, row.id, col.id) if a_tag in ca_dict: no_answers = False vals.update({ 'answer_type': 'suggestion', 'value_suggested': col.id, 'value_suggested_row': row.id }) self.create(vals) if no_answers: vals.update({'answer_type': None, 'skipped': True}) self.create(vals) return True
class SurveyUserInput(models.Model): """ Metadata for a set of one user's answers to a particular survey """ _name = "survey.user_input" _rec_name = 'survey_id' _description = 'Survey User Input' # description survey_id = fields.Many2one('survey.survey', string='Survey', required=True, readonly=True, ondelete='cascade') scoring_type = fields.Selection(string="Scoring", related="survey_id.scoring_type") is_attempts_limited = fields.Boolean( "Limited number of attempts", related='survey_id.is_attempts_limited') attempts_limit = fields.Integer("Number of attempts", related='survey_id.attempts_limit') start_datetime = fields.Datetime('Start date and time', readonly=True) is_time_limit_reached = fields.Boolean( "Is time limit reached?", compute='_compute_is_time_limit_reached') input_type = fields.Selection([('manually', 'Manual'), ('link', 'Invitation')], string='Answer Type', default='manually', required=True, readonly=True) state = fields.Selection([('new', 'Not started yet'), ('skip', 'Partially completed'), ('done', 'Completed')], string='Status', default='new', readonly=True) test_entry = fields.Boolean(readonly=True) # identification and access token = fields.Char('Identification token', default=lambda self: str(uuid.uuid4()), readonly=True, required=True, copy=False) # no unique constraint, as it identifies a pool of attempts invite_token = fields.Char('Invite token', readonly=True, copy=False) partner_id = fields.Many2one('res.partner', string='Partner', readonly=True) email = fields.Char('E-mail', readonly=True) attempt_number = fields.Integer("Attempt n°", compute='_compute_attempt_number') # Displaying data last_displayed_page_id = fields.Many2one( 'survey.question', string='Last displayed question/page') # answers user_input_line_ids = fields.One2many('survey.user_input_line', 'user_input_id', string='Answers', copy=True) # Pre-defined questions question_ids = fields.Many2many('survey.question', string='Predefined Questions', readonly=True) deadline = fields.Datetime( 'Deadline', help="Datetime until customer can open the survey and submit answers") # Stored for performance reasons while displaying results page quizz_score = fields.Float("Score (%)", compute="_compute_quizz_score", store=True, compute_sudo=True) quizz_passed = fields.Boolean('Quizz Passed', compute='_compute_quizz_passed', store=True, compute_sudo=True) @api.depends('user_input_line_ids.answer_score', 'user_input_line_ids.question_id') def _compute_quizz_score(self): for user_input in self: total_possible_score = sum([ answer_score if answer_score > 0 else 0 for answer_score in user_input.question_ids.mapped('labels_ids.answer_score') ]) if total_possible_score == 0: user_input.quizz_score = 0 else: score = (sum( user_input.user_input_line_ids.mapped('answer_score')) / total_possible_score) * 100 user_input.quizz_score = round(score, 2) if score > 0 else 0 @api.depends('quizz_score', 'survey_id.passing_score') def _compute_quizz_passed(self): for user_input in self: user_input.quizz_passed = user_input.quizz_score >= user_input.survey_id.passing_score _sql_constraints = [ ('unique_token', 'UNIQUE (token)', 'A token must be unique!'), ] @api.model def do_clean_emptys(self): """ Remove empty user inputs that have been created manually (used as a cronjob declared in data/survey_cron.xml) """ an_hour_ago = fields.Datetime.to_string(datetime.datetime.now() - datetime.timedelta(hours=1)) self.search([('input_type', '=', 'manually'), ('state', '=', 'new'), ('create_date', '<', an_hour_ago)]).unlink() @api.model def _generate_invite_token(self): return str(uuid.uuid4()) def action_resend(self): partners = self.env['res.partner'] emails = [] for user_answer in self: if user_answer.partner_id: partners |= user_answer.partner_id elif user_answer.email: emails.append(user_answer.email) return self.survey_id.with_context( default_existing_mode='resend', default_partner_ids=partners.ids, default_emails=','.join(emails)).action_send_survey() def action_print_answers(self): """ Open the website page with the survey form """ self.ensure_one() return { 'type': 'ir.actions.act_url', 'name': "View Answers", 'target': 'self', 'url': '/survey/print/%s?answer_token=%s' % (self.survey_id.access_token, self.token) } @api.depends('start_datetime', 'survey_id.is_time_limited', 'survey_id.time_limit') def _compute_is_time_limit_reached(self): """ Checks that the user_input is not exceeding the survey's time limit. """ for user_input in self: user_input.is_time_limit_reached = user_input.survey_id.is_time_limited and fields.Datetime.now() \ > user_input.start_datetime + relativedelta(minutes=user_input.survey_id.time_limit) @api.depends('state', 'test_entry', 'survey_id.is_attempts_limited', 'partner_id', 'email', 'invite_token') def _compute_attempt_number(self): attempts_to_compute = self.filtered( lambda user_input: user_input.state == 'done' and not user_input. test_entry and user_input.survey_id.is_attempts_limited) for user_input in (self - attempts_to_compute): user_input.attempt_number = 1 if attempts_to_compute: self.env.cr.execute( """SELECT user_input.id, (COUNT(previous_user_input.id) + 1) AS attempt_number FROM survey_user_input user_input LEFT OUTER JOIN survey_user_input previous_user_input ON user_input.survey_id = previous_user_input.survey_id AND previous_user_input.state = 'done' AND previous_user_input.test_entry IS NOT TRUE AND previous_user_input.id < user_input.id AND (user_input.invite_token IS NULL OR user_input.invite_token = previous_user_input.invite_token) AND (user_input.partner_id = previous_user_input.partner_id OR user_input.email = previous_user_input.email) WHERE user_input.id IN %s GROUP BY user_input.id; """, (tuple(attempts_to_compute.ids), )) attempts_count_results = self.env.cr.dictfetchall() for user_input in attempts_to_compute: attempt_number = 1 for attempts_count_result in attempts_count_results: if attempts_count_result['id'] == user_input.id: attempt_number = attempts_count_result[ 'attempt_number'] break user_input.attempt_number = attempt_number def _mark_done(self): """ This method will: 1. mark the state as 'done' 2. send the certification email with attached document if - The survey is a certification - It has a certification_mail_template_id set - The user succeeded the test Will also run challenge Cron to give the certification badge if any.""" self.write({'state': 'done'}) Challenge = self.env['gamification.challenge'].sudo() badge_ids = [] for user_input in self: if user_input.survey_id.certificate and user_input.quizz_passed: if user_input.survey_id.certification_mail_template_id and not user_input.test_entry: user_input.survey_id.certification_mail_template_id.send_mail( user_input.id, notif_layout="mail.mail_notification_light") if user_input.survey_id.certification_give_badge: badge_ids.append( user_input.survey_id.certification_badge_id.id) if badge_ids: challenges = Challenge.search([('reward_id', 'in', badge_ids)]) if challenges: Challenge._cron_update(ids=challenges.ids, commit=False) def _get_survey_url(self): self.ensure_one() return '/survey/start/%s?answer_token=%s' % ( self.survey_id.access_token, self.token)
class Channel(models.Model): """ A channel is a container of slides. """ _name = 'slide.channel' _description = 'Slide Channel' _inherit = [ 'mail.thread', 'rating.mixin', 'image.mixin', 'website.seo.metadata', 'website.published.multi.mixin' ] _order = 'sequence, id' def _default_access_token(self): return str(uuid.uuid4()) # description name = fields.Char('Name', translate=True, required=True) active = fields.Boolean(default=True) description = fields.Text('Short Description', translate=True) description_html = fields.Html('Description', translate=tools.html_translate, sanitize_attributes=False) channel_type = fields.Selection([('documentation', 'Documentation'), ('training', 'Training')], string="Course type", default="documentation", required=True) sequence = fields.Integer(default=10, help='Display order') user_id = fields.Many2one('res.users', string='Responsible', default=lambda self: self.env.uid) color = fields.Integer('Color Index', default=0, help='Used to decorate kanban view') tag_ids = fields.Many2many( 'slide.channel.tag', 'slide_channel_tag_rel', 'channel_id', 'tag_id', string='Tags', help='Used to categorize and filter displayed channels/courses') # slides: promote, statistics slide_ids = fields.One2many('slide.slide', 'channel_id', string="Slides and categories") slide_content_ids = fields.One2many( 'slide.slide', string='Slides', compute="_compute_category_and_slide_ids") slide_category_ids = fields.One2many( 'slide.slide', string='Categories', compute="_compute_category_and_slide_ids") slide_last_update = fields.Date('Last Update', compute='_compute_slide_last_update', store=True) slide_partner_ids = fields.One2many( 'slide.slide.partner', 'channel_id', string="Slide User Data", copy=False, groups='website.group_website_publisher') promote_strategy = fields.Selection([('latest', 'Latest Published'), ('most_voted', 'Most Voted'), ('most_viewed', 'Most Viewed')], string="Featured Content", default='latest', required=True) access_token = fields.Char("Security Token", copy=False, default=_default_access_token) nbr_presentation = fields.Integer('Presentations', compute='_compute_slides_statistics', store=True) nbr_document = fields.Integer('Documents', compute='_compute_slides_statistics', store=True) nbr_video = fields.Integer('Videos', compute='_compute_slides_statistics', store=True) nbr_infographic = fields.Integer('Infographics', compute='_compute_slides_statistics', store=True) nbr_webpage = fields.Integer("Webpages", compute='_compute_slides_statistics', store=True) nbr_quiz = fields.Integer("Number of Quizs", compute='_compute_slides_statistics', store=True) total_slides = fields.Integer('Content', compute='_compute_slides_statistics', store=True) total_views = fields.Integer('Visits', compute='_compute_slides_statistics', store=True) total_votes = fields.Integer('Votes', compute='_compute_slides_statistics', store=True) total_time = fields.Float('Duration', compute='_compute_slides_statistics', digits=(10, 2), store=True) rating_avg_stars = fields.Float("Rating Average (Stars)", compute='_compute_rating_stats', digits=(16, 1)) # configuration allow_comment = fields.Boolean( "Allow rating on Course", default=False, help="If checked it allows members to either:\n" " * like content and post comments on documentation course;\n" " * post comment and review on training course;") publish_template_id = fields.Many2one( 'mail.template', string='New Content Email', help="Email template to send slide publication through email", default=lambda self: self.env['ir.model.data'].xmlid_to_res_id( 'website_slides.slide_template_published')) share_template_id = fields.Many2one( 'mail.template', string='Share Template', help="Email template used when sharing a slide", default=lambda self: self.env['ir.model.data'].xmlid_to_res_id( 'website_slides.slide_template_shared')) enroll = fields.Selection( [('public', 'Public'), ('invite', 'On Invitation')], default='public', string='Enroll Policy', required=True, help= 'Condition to enroll: everyone, on invite, on payment (sale bridge).') enroll_msg = fields.Html('Enroll Message', help="Message explaining the enroll process", default=False, translate=tools.html_translate, sanitize_attributes=False) enroll_group_ids = fields.Many2many( 'res.groups', string='Auto Enroll Groups', help= "Members of those groups are automatically added as members of the channel." ) visibility = fields.Selection( [('public', 'Public'), ('members', 'Members Only')], default='public', string='Visibility', required=True, help= 'Applied directly as ACLs. Allow to hide channels and their content for non members.' ) partner_ids = fields.Many2many('res.partner', 'slide_channel_partner', 'channel_id', 'partner_id', string='Members', help="All members of the channel.", context={'active_test': False}, copy=False, depends=['channel_partner_ids']) members_count = fields.Integer('Attendees count', compute='_compute_members_count') members_done_count = fields.Integer('Attendees Done Count', compute='_compute_members_done_count') is_member = fields.Boolean(string='Is Member', compute='_compute_is_member') channel_partner_ids = fields.One2many( 'slide.channel.partner', 'channel_id', string='Members Information', groups='website.group_website_publisher', depends=['partner_ids']) upload_group_ids = fields.Many2many( 'res.groups', 'rel_upload_groups', 'channel_id', 'group_id', string='Upload Groups', help= "Group of users allowed to publish contents on a documentation course." ) # not stored access fields, depending on each user completed = fields.Boolean('Done', compute='_compute_user_statistics', compute_sudo=False) completion = fields.Integer('Completion', compute='_compute_user_statistics', compute_sudo=False) can_upload = fields.Boolean('Can Upload', compute='_compute_can_upload', compute_sudo=False) # karma generation karma_gen_slide_vote = fields.Integer(string='Lesson voted', default=1) karma_gen_channel_rank = fields.Integer(string='Course ranked', default=5) karma_gen_channel_finish = fields.Integer(string='Course finished', default=10) # Karma based actions karma_review = fields.Integer( 'Add Review', default=10, help="Karma needed to add a review on the course") karma_slide_comment = fields.Integer( 'Add Comment', default=3, help="Karma needed to add a comment on a slide of this course") karma_slide_vote = fields.Integer( 'Vote', default=3, help="Karma needed to like/dislike a slide of this course.") can_review = fields.Boolean('Can Review', compute='_compute_action_rights', compute_sudo=False) can_comment = fields.Boolean('Can Comment', compute='_compute_action_rights', compute_sudo=False) can_vote = fields.Boolean('Can Vote', compute='_compute_action_rights', compute_sudo=False) @api.depends('slide_ids.is_published') def _compute_slide_last_update(self): for record in self: record.slide_last_update = fields.Date.today() @api.depends('channel_partner_ids.channel_id') def _compute_members_count(self): read_group_res = self.env['slide.channel.partner'].sudo().read_group( [('channel_id', 'in', self.ids)], ['channel_id'], 'channel_id') data = dict((res['channel_id'][0], res['channel_id_count']) for res in read_group_res) for channel in self: channel.members_count = data.get(channel.id, 0) @api.depends('channel_partner_ids.channel_id', 'channel_partner_ids.completed') def _compute_members_done_count(self): read_group_res = self.env['slide.channel.partner'].sudo().read_group( ['&', ('channel_id', 'in', self.ids), ('completed', '=', True)], ['channel_id'], 'channel_id') data = dict((res['channel_id'][0], res['channel_id_count']) for res in read_group_res) for channel in self: channel.members_done_count = data.get(channel.id, 0) @api.depends('channel_partner_ids.partner_id') @api.model def _compute_is_member(self): channel_partners = self.env['slide.channel.partner'].sudo().search([ ('channel_id', 'in', self.ids), ]) result = dict() for cp in channel_partners: result.setdefault(cp.channel_id.id, []).append(cp.partner_id.id) for channel in self: channel.is_member = channel.is_member = self.env.user.partner_id.id in result.get( channel.id, []) @api.depends('slide_ids.is_category') def _compute_category_and_slide_ids(self): for channel in self: channel.slide_category_ids = channel.slide_ids.filtered( lambda slide: slide.is_category) channel.slide_content_ids = channel.slide_ids - channel.slide_category_ids @api.depends('slide_ids.slide_type', 'slide_ids.is_published', 'slide_ids.completion_time', 'slide_ids.likes', 'slide_ids.dislikes', 'slide_ids.total_views', 'slide_ids.is_category', 'slide_ids.active') def _compute_slides_statistics(self): default_vals = dict(total_views=0, total_votes=0, total_time=0, total_slides=0) keys = [ 'nbr_%s' % slide_type for slide_type in self.env['slide.slide']._fields['slide_type'].get_values(self.env) ] default_vals.update(dict((key, 0) for key in keys)) result = dict((cid, dict(default_vals)) for cid in self.ids) read_group_res = self.env['slide.slide'].read_group( [('active', '=', True), ('is_published', '=', True), ('channel_id', 'in', self.ids), ('is_category', '=', False)], [ 'channel_id', 'slide_type', 'likes', 'dislikes', 'total_views', 'completion_time' ], groupby=['channel_id', 'slide_type'], lazy=False) for res_group in read_group_res: cid = res_group['channel_id'][0] result[cid]['total_views'] += res_group.get('total_views', 0) result[cid]['total_votes'] += res_group.get('likes', 0) result[cid]['total_votes'] -= res_group.get('dislikes', 0) result[cid]['total_time'] += res_group.get('completion_time', 0) type_stats = self._compute_slides_statistics_type(read_group_res) for cid, cdata in type_stats.items(): result[cid].update(cdata) for record in self: record.update(result.get(record.id, default_vals)) def _compute_slides_statistics_type(self, read_group_res): """ Compute statistics based on all existing slide types """ slide_types = self.env['slide.slide']._fields['slide_type'].get_values( self.env) keys = ['nbr_%s' % slide_type for slide_type in slide_types] result = dict((cid, dict((key, 0) for key in keys + ['total_slides'])) for cid in self.ids) for res_group in read_group_res: cid = res_group['channel_id'][0] slide_type = res_group.get('slide_type') if slide_type: slide_type_count = res_group.get('__count', 0) result[cid]['nbr_%s' % slide_type] = slide_type_count result[cid]['total_slides'] += slide_type_count return result def _compute_rating_stats(self): super(Channel, self)._compute_rating_stats() for record in self: record.rating_avg_stars = record.rating_avg / 2 @api.depends('slide_partner_ids', 'total_slides') @api.depends_context('uid') def _compute_user_statistics(self): current_user_info = self.env['slide.channel.partner'].sudo().search([ ('channel_id', 'in', self.ids), ('partner_id', '=', self.env.user.partner_id.id) ]) mapped_data = dict( (info.channel_id.id, (info.completed, info.completion)) for info in current_user_info) for record in self: completed, completion = mapped_data.get(record.id, (False, 0)) record.completed = completed record.completion = round(100.0 * completion / (record.total_slides or 1)) @api.depends('upload_group_ids', 'user_id') @api.depends_context('uid') def _compute_can_upload(self): for record in self: if record.user_id == self.env.user: record.can_upload = True elif record.upload_group_ids: record.can_upload = bool(record.upload_group_ids & self.env.user.groups_id) else: record.can_upload = self.env.user.has_group( 'website.group_website_publisher') @api.depends('channel_type', 'user_id', 'can_upload') def _compute_can_publish(self): """ For channels of type 'training', only the responsible (see user_id field) can publish slides. The 'sudo' user needs to be handled because he's the one used for uploads done on the front-end when the logged in user is not publisher but fulfills the upload_group_ids condition. """ for record in self: if not record.can_upload: record.can_publish = False elif record.user_id == self.env.user or self.env.is_superuser(): record.can_publish = True else: record.can_publish = self.env.user.has_group( 'website.group_website_publisher') @api.model def _get_can_publish_error_message(self): return _( "Publishing is restricted to the responsible of training courses or members of the publisher group for documentation courses" ) @api.depends('name', 'website_id.domain') def _compute_website_url(self): super(Channel, self)._compute_website_url() for channel in self: if channel.id: # avoid to perform a slug on a not yet saved record in case of an onchange. base_url = channel.get_base_url() channel.website_url = '%s/slides/%s' % (base_url, slug(channel)) def get_backend_menu_id(self): return self.env.ref('website_slides.website_slides_menu_root').id def _compute_action_rights(self): user_karma = self.env.user.karma for channel in self: if channel.can_publish: channel.can_vote = channel.can_comment = channel.can_review = True elif not channel.is_member: channel.can_vote = channel.can_comment = channel.can_review = False else: channel.can_review = user_karma >= channel.karma_review channel.can_comment = user_karma >= channel.karma_slide_comment channel.can_vote = user_karma >= channel.karma_slide_vote # --------------------------------------------------------- # ORM Overrides # --------------------------------------------------------- def _init_column(self, column_name): """ Initialize the value of the given column for existing rows. Overridden here because we need to generate different access tokens and by default _init_column calls the default method once and applies it for every record. """ if column_name != 'access_token': super(Channel, self)._init_column(column_name) else: query = """ UPDATE %(table_name)s SET %(column_name)s = md5(md5(random()::varchar || id::varchar) || clock_timestamp()::varchar)::uuid::varchar WHERE %(column_name)s IS NULL """ % { 'table_name': self._table, 'column_name': column_name } self.env.cr.execute(query) @api.model def create(self, vals): # Ensure creator is member of its channel it is easier for him to manage it (unless it is harpiyabot) if not vals.get('channel_partner_ids') and not self.env.is_superuser(): vals['channel_partner_ids'] = [(0, 0, { 'partner_id': self.env.user.partner_id.id })] channel = super( Channel, self.with_context(mail_create_nosubscribe=True)).create(vals) if channel.user_id: channel._action_add_members(channel.user_id.partner_id) if 'enroll_group_ids' in vals: channel._add_groups_members() return channel def write(self, vals): res = super(Channel, self).write(vals) if vals.get('user_id'): self._action_add_members(self.env['res.users'].sudo().browse( vals['user_id']).partner_id) if 'active' in vals: # archiving/unarchiving a channel does it on its slides, too self.with_context(active_test=False).mapped('slide_ids').write( {'active': vals['active']}) if 'enroll_group_ids' in vals: self._add_groups_members() return res @api.returns('mail.message', lambda value: value.id) def message_post(self, *, parent_id=False, subtype=None, **kwargs): """ Temporary workaround to avoid spam. If someone replies on a channel through the 'Presentation Published' email, it should be considered as a note as we don't want all channel followers to be notified of this answer. """ self.ensure_one() if kwargs.get('message_type') == 'comment' and not self.can_review: raise AccessError(_('Not enough karma to review')) if parent_id: parent_message = self.env['mail.message'].sudo().browse(parent_id) if parent_message.subtype_id and parent_message.subtype_id == self.env.ref( 'website_slides.mt_channel_slide_published'): if kwargs.get('subtype_id'): kwargs['subtype_id'] = False subtype = 'mail.mt_note' return super(Channel, self).message_post(parent_id=parent_id, subtype=subtype, **kwargs) # --------------------------------------------------------- # Business / Actions # --------------------------------------------------------- def action_redirect_to_members(self, state=None): action = self.env.ref( 'website_slides.slide_channel_partner_action').read()[0] action['domain'] = [('channel_id', 'in', self.ids)] if len(self) == 1: action['display_name'] = _('Attendees of %s') % self.name action['context'] = { 'active_test': False, 'default_channel_id': self.id } if state: action['domain'] += [('completed', '=', state == 'completed')] return action def action_redirect_to_running_members(self): return self.action_redirect_to_members('running') def action_redirect_to_done_members(self): return self.action_redirect_to_members('completed') def action_channel_invite(self): self.ensure_one() template = self.env.ref( 'website_slides.mail_template_slide_channel_invite', raise_if_not_found=False) local_context = dict( self.env.context, default_channel_id=self.id, default_use_template=bool(template), default_template_id=template and template.id or False, notif_layout='mail.mail_notification_light', ) return { 'type': 'ir.actions.act_window', 'view_mode': 'form', 'res_model': 'slide.channel.invite', 'target': 'new', 'context': local_context, } def action_add_member(self, **member_values): """ Adds the logged in user in the channel members. (see '_action_add_members' for more info) Returns True if added successfully, False otherwise.""" return bool( self._action_add_members(self.env.user.partner_id, **member_values)) def _action_add_members(self, target_partners, **member_values): """ Add the target_partner as a member of the channel (to its slide.channel.partner). This will make the content (slides) of the channel available to that partner. Returns the added 'slide.channel.partner's (! as sudo !) """ to_join = self._filter_add_members(target_partners, **member_values) if to_join: existing = self.env['slide.channel.partner'].sudo().search([ ('channel_id', 'in', self.ids), ('partner_id', 'in', target_partners.ids) ]) existing_map = dict((cid, list()) for cid in self.ids) for item in existing: existing_map[item.channel_id.id].append(item.partner_id.id) to_create_values = [ dict(channel_id=channel.id, partner_id=partner.id, **member_values) for channel in to_join for partner in target_partners if partner.id not in existing_map[channel.id] ] slide_partners_sudo = self.env['slide.channel.partner'].sudo( ).create(to_create_values) to_join.message_subscribe( partner_ids=target_partners.ids, subtype_ids=[ self.env.ref( 'website_slides.mt_channel_slide_published').id ]) return slide_partners_sudo return self.env['slide.channel.partner'].sudo() def _filter_add_members(self, target_partners, **member_values): allowed = self.filtered(lambda channel: channel.enroll == 'public') on_invite = self.filtered(lambda channel: channel.enroll == 'invite') if on_invite: try: on_invite.check_access_rights('write') on_invite.check_access_rule('write') except: pass else: allowed |= on_invite return allowed def _add_groups_members(self): for channel in self: channel._action_add_members( channel.mapped('enroll_group_ids.users.partner_id')) def _get_earned_karma(self, partner_ids): """ Compute the number of karma earned by partners on a channel Warning: this count will not be accurate if the configuration has been modified after the completion of a course! """ total_karma = defaultdict(int) slide_completed = self.env['slide.slide.partner'].sudo().search([ ('partner_id', 'in', partner_ids), ('channel_id', 'in', self.ids), ('completed', '=', True), ('quiz_attempts_count', '>', 0) ]) for partner_slide in slide_completed: slide = partner_slide.slide_id if not slide.question_ids: continue gains = [ slide.quiz_first_attempt_reward, slide.quiz_second_attempt_reward, slide.quiz_third_attempt_reward, slide.quiz_fourth_attempt_reward ] attempts = min(partner_slide.quiz_attempts_count - 1, 3) total_karma[partner_slide.partner_id.id] += gains[attempts] channel_completed = self.env['slide.channel.partner'].sudo().search([ ('partner_id', 'in', partner_ids), ('channel_id', 'in', self.ids), ('completed', '=', True) ]) for partner_channel in channel_completed: channel = partner_channel.channel_id total_karma[partner_channel.partner_id. id] += channel.karma_gen_channel_finish return total_karma def _remove_membership(self, partner_ids): """ Unlink (!!!) the relationships between the passed partner_ids and the channels and their slides (done in the unlink of slide.channel.partner model). Remove earned karma when completed quizz """ if not partner_ids: raise ValueError( "Do not use this method with an empty partner_id recordset") earned_karma = self._get_earned_karma(partner_ids) users = self.env['res.users'].sudo().search([ ('partner_id', 'in', list(earned_karma)), ]) for user in users: if earned_karma[user.partner_id.id]: user.add_karma(-1 * earned_karma[user.partner_id.id]) removed_channel_partner_domain = [] for channel in self: removed_channel_partner_domain = expression.OR([ removed_channel_partner_domain, [('partner_id', 'in', partner_ids), ('channel_id', '=', channel.id)] ]) self.message_unsubscribe(partner_ids=partner_ids) if removed_channel_partner_domain: self.env['slide.channel.partner'].sudo().search( removed_channel_partner_domain).unlink() def action_view_slides(self): action = self.env.ref('website_slides.slide_slide_action').read()[0] action['context'] = { 'search_default_published': 1, 'default_channel_id': self.id } action['domain'] = [('channel_id', "=", self.id), ('is_category', '=', False)] return action def action_view_ratings(self): action = self.env.ref( 'website_slides.rating_rating_action_slide_channel').read()[0] action['name'] = _('Rating of %s') % (self.name) action['domain'] = [('res_id', 'in', self.ids)] return action # --------------------------------------------------------- # Rating Mixin API # --------------------------------------------------------- def _rating_domain(self): """ Only take the published rating into account to compute avg and count """ domain = super(Channel, self)._rating_domain() return expression.AND([domain, [('website_published', '=', True)]]) # --------------------------------------------------------- # Data / Misc # --------------------------------------------------------- def _get_categorized_slides(self, base_domain, order, force_void=True, limit=False, offset=False): """ Return an ordered structure of slides by categories within a given base_domain that must fulfill slides. As a course structure is based on its slides sequences, uncategorized slides must have the lowest sequences. Example * category 1 (sequence 1), category 2 (sequence 3) * slide 1 (sequence 0), slide 2 (sequence 2) * course structure is: slide 1, category 1, slide 2, category 2 * slide 1 is uncategorized, * category 1 has one slide : Slide 2 * category 2 is empty. Backend and frontend ordering is the same, uncategorized first. It eases resequencing based on DOM / displayed order, notably when drag n drop is involved. """ self.ensure_one() all_categories = self.env['slide.slide'].sudo().search([ ('channel_id', '=', self.id), ('is_category', '=', True) ]) all_slides = self.env['slide.slide'].sudo().search(base_domain, order=order) category_data = [] # Prepare all categories by natural order for category in all_categories: category_slides = all_slides.filtered( lambda slide: slide.category_id == category) if not category_slides and not force_void: continue category_data.append({ 'category': category, 'id': category.id, 'name': category.name, 'slug_name': slug(category), 'total_slides': len(category_slides), 'slides': category_slides[(offset or 0):( limit + offset or len(category_slides))], }) # Add uncategorized slides in first position uncategorized_slides = all_slides.filtered( lambda slide: not slide.category_id) if uncategorized_slides or force_void: category_data.insert( 0, { 'category': False, 'id': False, 'name': _('Uncategorized'), 'slug_name': _('Uncategorized'), 'total_slides': len(uncategorized_slides), 'slides': uncategorized_slides[(offset or 0):( offset + limit or len(uncategorized_slides))], }) return category_data def _resequence_slides(self, slide, force_category=False): ids_to_resequence = self.slide_ids.ids index_of_added_slide = ids_to_resequence.index(slide.id) next_category_id = None if self.slide_category_ids: force_category_id = force_category.id if force_category else slide.category_id.id index_of_category = self.slide_category_ids.ids.index( force_category_id) if force_category_id else None if index_of_category is None: next_category_id = self.slide_category_ids.ids[0] elif index_of_category < len(self.slide_category_ids.ids) - 1: next_category_id = self.slide_category_ids.ids[ index_of_category + 1] if next_category_id: added_slide_id = ids_to_resequence.pop(index_of_added_slide) index_of_next_category = ids_to_resequence.index(next_category_id) ids_to_resequence.insert(index_of_next_category, added_slide_id) for i, record in enumerate( self.env['slide.slide'].browse(ids_to_resequence)): record.write({'sequence': i + 1}) # start at 1 to make people scream else: slide.write({ 'sequence': self.env['slide.slide'].browse(ids_to_resequence[-1]).sequence + 1 })
class ChannelUsersRelation(models.Model): _name = 'slide.channel.partner' _description = 'Channel / Partners (Members)' _table = 'slide_channel_partner' channel_id = fields.Many2one('slide.channel', index=True, required=True, ondelete='cascade') completed = fields.Boolean( 'Is Completed', help='Channel validated, even if slides / lessons are added once done.' ) # Todo master: rename this field to avoid confusion between completion (%) and completed count (#) completion = fields.Integer('# Completed Slides') partner_id = fields.Many2one('res.partner', index=True, required=True, ondelete='cascade') partner_email = fields.Char(related='partner_id.email', readonly=True) def _recompute_completion(self): read_group_res = self.env['slide.slide.partner'].sudo().read_group( [ '&', '&', ('channel_id', 'in', self.mapped('channel_id').ids), ('partner_id', 'in', self.mapped('partner_id').ids), ('completed', '=', True), ('slide_id.is_published', '=', True), ('slide_id.active', '=', True) ], ['channel_id', 'partner_id'], groupby=['channel_id', 'partner_id'], lazy=False) mapped_data = dict() for item in read_group_res: mapped_data.setdefault(item['channel_id'][0], dict()) mapped_data[item['channel_id'][0]][item['partner_id'] [0]] = item['__count'] partner_karma = dict.fromkeys(self.mapped('partner_id').ids, 0) for record in self: slide_done = mapped_data.get(record.channel_id.id, dict()).get(record.partner_id.id, 0) record.completion = slide_done if not record.completed and record.completion >= record.channel_id.total_slides: record.completed = True partner_karma[record.partner_id. id] += record.channel_id.karma_gen_channel_finish partner_karma = { partner_id: karma_to_add for partner_id, karma_to_add in partner_karma.items() if karma_to_add > 0 } if partner_karma: users = self.env['res.users'].sudo().search([ ('partner_id', 'in', list(partner_karma.keys())) ]) for user in users: users.add_karma(partner_karma[user.partner_id.id]) def unlink(self): """ Override unlink method : Remove attendee from a channel, then also remove slide.slide.partner related to. """ removed_slide_partner_domain = [] for channel_partner in self: # find all slide link to the channel and the partner removed_slide_partner_domain = expression.OR([ removed_slide_partner_domain, [('partner_id', '=', channel_partner.partner_id.id), ('slide_id', 'in', channel_partner.channel_id.slide_ids.ids)] ]) if removed_slide_partner_domain: self.env['slide.slide.partner'].search( removed_slide_partner_domain).unlink() return super(ChannelUsersRelation, self).unlink()
class MaintenanceTeam(models.Model): _name = 'maintenance.team' _description = 'Maintenance Teams' name = fields.Char('Team Name', required=True, translate=True) active = fields.Boolean(default=True) company_id = fields.Many2one('res.company', string='Company', default=lambda self: self.env.company) member_ids = fields.Many2many('res.users', 'maintenance_team_users_rel', string="Team Members", domain="[('company_ids', 'in', company_id)]") color = fields.Integer("Color Index", default=0) request_ids = fields.One2many('maintenance.request', 'maintenance_team_id', copy=False) equipment_ids = fields.One2many('maintenance.equipment', 'maintenance_team_id', copy=False) # For the dashboard only todo_request_ids = fields.One2many('maintenance.request', string="Requests", copy=False, compute='_compute_todo_requests') todo_request_count = fields.Integer(string="Number of Requests", compute='_compute_todo_requests') todo_request_count_date = fields.Integer( string="Number of Requests Scheduled", compute='_compute_todo_requests') todo_request_count_high_priority = fields.Integer( string="Number of Requests in High Priority", compute='_compute_todo_requests') todo_request_count_block = fields.Integer( string="Number of Requests Blocked", compute='_compute_todo_requests') todo_request_count_unscheduled = fields.Integer( string="Number of Requests Unscheduled", compute='_compute_todo_requests') @api.depends('request_ids.stage_id.done') def _compute_todo_requests(self): for team in self: team.todo_request_ids = team.request_ids.filtered( lambda e: e.stage_id.done == False) team.todo_request_count = len(team.todo_request_ids) team.todo_request_count_date = len( team.todo_request_ids.filtered( lambda e: e.schedule_date != False)) team.todo_request_count_high_priority = len( team.todo_request_ids.filtered(lambda e: e.priority == '3')) team.todo_request_count_block = len( team.todo_request_ids.filtered( lambda e: e.kanban_state == 'blocked')) team.todo_request_count_unscheduled = len( team.todo_request_ids.filtered(lambda e: not e.schedule_date)) @api.depends('equipment_ids') def _compute_equipment(self): for team in self: team.equipment_count = len(team.equipment_ids)
class DeliveryCarrier(models.Model): _name = 'delivery.carrier' _description = "Shipping Methods" _order = 'sequence, id' ''' A Shipping Provider In order to add your own external provider, follow these steps: 1. Create your model MyProvider that _inherit 'delivery.carrier' 2. Extend the selection of the field "delivery_type" with a pair ('<my_provider>', 'My Provider') 3. Add your methods: <my_provider>_rate_shipment <my_provider>_send_shipping <my_provider>_get_tracking_link <my_provider>_cancel_shipment _<my_provider>_get_default_custom_package_code (they are documented hereunder) ''' # -------------------------------- # # Internals for shipping providers # # -------------------------------- # name = fields.Char('Delivery Method', required=True, translate=True) active = fields.Boolean(default=True) sequence = fields.Integer(help="Determine the display order", default=10) # This field will be overwritten by internal shipping providers by adding their own type (ex: 'fedex') delivery_type = fields.Selection([('fixed', 'Fixed Price')], string='Provider', default='fixed', required=True) integration_level = fields.Selection( [('rate', 'Get Rate'), ('rate_and_ship', 'Get Rate and Create Shipment')], string="Integration Level", default='rate_and_ship', help="Action while validating Delivery Orders") prod_environment = fields.Boolean( "Environment", help="Set to True if your credentials are certified for production.") debug_logging = fields.Boolean( 'Debug logging', help="Log requests in order to ease debugging") company_id = fields.Many2one('res.company', string='Company', related='product_id.company_id', store=True, readonly=False) product_id = fields.Many2one('product.product', string='Delivery Product', required=True, ondelete='restrict') invoice_policy = fields.Selection( [('estimated', 'Estimated cost'), ('real', 'Real cost')], string='Invoicing Policy', default='estimated', required=True, help= "Estimated Cost: the customer will be invoiced the estimated cost of the shipping.\nReal Cost: the customer will be invoiced the real cost of the shipping, the cost of the shipping will be updated on the SO after the delivery." ) country_ids = fields.Many2many('res.country', 'delivery_carrier_country_rel', 'carrier_id', 'country_id', 'Countries') state_ids = fields.Many2many('res.country.state', 'delivery_carrier_state_rel', 'carrier_id', 'state_id', 'States') zip_from = fields.Char('Zip From') zip_to = fields.Char('Zip To') margin = fields.Float( help='This percentage will be added to the shipping price.') free_over = fields.Boolean( 'Free if order amount is above', help= "If the order total amount (shipping excluded) is above or equal to this value, the customer benefits from a free shipping", default=False) amount = fields.Float( string='Amount', help= "Amount of the order to benefit from a free shipping, expressed in the company currency" ) can_generate_return = fields.Boolean( compute="_compute_can_generate_return") return_label_on_delivery = fields.Boolean( string="Generate Return Label", help="The return label is automatically generated at the delivery.") get_return_label_from_portal = fields.Boolean( string="Return Label Accessible from Customer Portal", help= "The return label can be downloaded by the customer from the customer portal." ) _sql_constraints = [ ('margin_not_under_100_percent', 'CHECK (margin >= -100)', 'Margin cannot be lower than -100%'), ] @api.depends('delivery_type') def _compute_can_generate_return(self): for carrier in self: carrier.can_generate_return = False def toggle_prod_environment(self): for c in self: c.prod_environment = not c.prod_environment def toggle_debug(self): for c in self: c.debug_logging = not c.debug_logging def install_more_provider(self): return { 'name': 'New Providers', 'view_mode': 'kanban,form', 'res_model': 'ir.module.module', 'domain': [['name', '=like', 'delivery_%'], ['name', '!=', 'delivery_barcode']], 'type': 'ir.actions.act_window', 'help': _('''<p class="o_view_nocontent"> Buy Harpiya Enterprise now to get more providers. </p>'''), } def available_carriers(self, partner): return self.filtered(lambda c: c._match_address(partner)) def _match_address(self, partner): self.ensure_one() if self.country_ids and partner.country_id not in self.country_ids: return False if self.state_ids and partner.state_id not in self.state_ids: return False if self.zip_from and (partner.zip or '').upper() < self.zip_from.upper(): return False if self.zip_to and (partner.zip or '').upper() > self.zip_to.upper(): return False return True @api.onchange('integration_level') def _onchange_integration_level(self): if self.integration_level == 'rate': self.invoice_policy = 'estimated' @api.onchange('can_generate_return') def _onchange_can_generate_return(self): if not self.can_generate_return: self.return_label_on_delivery = False @api.onchange('return_label_on_delivery') def _onchange_return_label_on_delivery(self): if not self.return_label_on_delivery: self.get_return_label_from_portal = False @api.onchange('state_ids') def onchange_states(self): self.country_ids = [ (6, 0, self.country_ids.ids + self.state_ids.mapped('country_id.id')) ] @api.onchange('country_ids') def onchange_countries(self): self.state_ids = [ (6, 0, self.state_ids.filtered(lambda state: state.id in self.country_ids .mapped('state_ids').ids).ids) ] # -------------------------- # # API for external providers # # -------------------------- # def rate_shipment(self, order): ''' Compute the price of the order shipment :param order: record of sale.order :return dict: {'success': boolean, 'price': a float, 'error_message': a string containing an error message, 'warning_message': a string containing a warning message} # TODO maybe the currency code? ''' self.ensure_one() if hasattr(self, '%s_rate_shipment' % self.delivery_type): res = getattr(self, '%s_rate_shipment' % self.delivery_type)(order) # apply margin on computed price res['price'] = float(res['price']) * (1.0 + (self.margin / 100.0)) # save the real price in case a free_over rule overide it to 0 res['carrier_price'] = res['price'] # free when order is large enough if res['success'] and self.free_over and order._compute_amount_total_without_delivery( ) >= self.amount: res['warning_message'] = _( 'The shipping is free since the order amount exceeds %.2f.' ) % (self.amount) res['price'] = 0.0 return res def send_shipping(self, pickings): ''' Send the package to the service provider :param pickings: A recordset of pickings :return list: A list of dictionaries (one per picking) containing of the form:: { 'exact_price': price, 'tracking_number': number } # TODO missing labels per package # TODO missing currency # TODO missing success, error, warnings ''' self.ensure_one() if hasattr(self, '%s_send_shipping' % self.delivery_type): return getattr(self, '%s_send_shipping' % self.delivery_type)(pickings) def get_return_label(self, pickings, tracking_number=None, origin_date=None): self.ensure_one() if self.can_generate_return: return getattr(self, '%s_get_return_label' % self.delivery_type)( pickings, tracking_number, origin_date) def get_return_label_prefix(self): return 'ReturnLabel-%s' % self.delivery_type def get_tracking_link(self, picking): ''' Ask the tracking link to the service provider :param picking: record of stock.picking :return str: an URL containing the tracking link or False ''' self.ensure_one() if hasattr(self, '%s_get_tracking_link' % self.delivery_type): return getattr(self, '%s_get_tracking_link' % self.delivery_type)(picking) def cancel_shipment(self, pickings): ''' Cancel a shipment :param pickings: A recordset of pickings ''' self.ensure_one() if hasattr(self, '%s_cancel_shipment' % self.delivery_type): return getattr(self, '%s_cancel_shipment' % self.delivery_type)(pickings) def log_xml(self, xml_string, func): self.ensure_one() if self.debug_logging: self.flush() db_name = self._cr.dbname # Use a new cursor to avoid rollback that could be caused by an upper method try: db_registry = registry(db_name) with db_registry.cursor() as cr: env = api.Environment(cr, SUPERUSER_ID, {}) IrLogging = env['ir.logging'] IrLogging.sudo().create({ 'name': 'delivery.carrier', 'type': 'server', 'dbname': db_name, 'level': 'DEBUG', 'message': xml_string, 'path': self.delivery_type, 'func': func, 'line': 1 }) except psycopg2.Error: pass def _get_default_custom_package_code(self): """ Some delivery carriers require a prefix to be sent in order to use custom packages (ie not official ones). This optional method will return it as a string. """ self.ensure_one() if hasattr(self, '_%s_get_default_custom_package_code' % self.delivery_type): return getattr( self, '_%s_get_default_custom_package_code' % self.delivery_type)() else: return False # ------------------------------------------------ # # Fixed price shipping, aka a very simple provider # # ------------------------------------------------ # fixed_price = fields.Float(compute='_compute_fixed_price', inverse='_set_product_fixed_price', store=True, string='Fixed Price') @api.depends('product_id.list_price', 'product_id.product_tmpl_id.list_price') def _compute_fixed_price(self): for carrier in self: carrier.fixed_price = carrier.product_id.list_price def _set_product_fixed_price(self): for carrier in self: carrier.product_id.list_price = carrier.fixed_price def fixed_rate_shipment(self, order): carrier = self._match_address(order.partner_shipping_id) if not carrier: return { 'success': False, 'price': 0.0, 'error_message': _('Error: this delivery method is not available for this address.' ), 'warning_message': False } price = self.fixed_price company = self.company_id or order.company_id or self.env.company if company.currency_id and company.currency_id != order.currency_id: price = company.currency_id._convert(price, order.currency_id, company, fields.Date.today()) return { 'success': True, 'price': price, 'error_message': False, 'warning_message': False } def fixed_send_shipping(self, pickings): res = [] for p in pickings: res = res + [{ 'exact_price': p.carrier_id.fixed_price, 'tracking_number': False }] return res def fixed_get_tracking_link(self, picking): return False def fixed_cancel_shipment(self, pickings): raise NotImplementedError()
class StockRule(models.Model): """ A rule describe what a procurement should do; produce, buy, move, ... """ _name = 'stock.rule' _description = "Stock Rule" _order = "sequence, id" _check_company_auto = True name = fields.Char( 'Name', required=True, translate=True, help="This field will fill the packing origin and the name of its moves") active = fields.Boolean( 'Active', default=True, help="If unchecked, it will allow you to hide the rule without removing it.") group_propagation_option = fields.Selection([ ('none', 'Leave Empty'), ('propagate', 'Propagate'), ('fixed', 'Fixed')], string="Propagation of Procurement Group", default='propagate') group_id = fields.Many2one('procurement.group', 'Fixed Procurement Group') action = fields.Selection( selection=[('pull', 'Pull From'), ('push', 'Push To'), ('pull_push', 'Pull & Push')], string='Action', required=True) sequence = fields.Integer('Sequence', default=20) company_id = fields.Many2one('res.company', 'Company', default=lambda self: self.env.company) location_id = fields.Many2one('stock.location', 'Destination Location', required=True, check_company=True) location_src_id = fields.Many2one('stock.location', 'Source Location', check_company=True) route_id = fields.Many2one('stock.location.route', 'Route', required=True, ondelete='cascade') procure_method = fields.Selection([ ('make_to_stock', 'Take From Stock'), ('make_to_order', 'Trigger Another Rule'), ('mts_else_mto', 'Take From Stock, if unavailable, Trigger Another Rule')], string='Supply Method', default='make_to_stock', required=True, help="Take From Stock: the products will be taken from the available stock of the source location.\n" "Trigger Another Rule: the system will try to find a stock rule to bring the products in the source location. The available stock will be ignored.\n" "Take From Stock, if Unavailable, Trigger Another Rule: the products will be taken from the available stock of the source location." "If there is no stock available, the system will try to find a rule to bring the products in the source location.") route_sequence = fields.Integer('Route Sequence', related='route_id.sequence', store=True, readonly=False, compute_sudo=True) picking_type_id = fields.Many2one( 'stock.picking.type', 'Operation Type', required=True, check_company=True) delay = fields.Integer('Delay', default=0, help="The expected date of the created transfer will be computed based on this delay.") partner_address_id = fields.Many2one( 'res.partner', 'Partner Address', check_company=True, help="Address where goods should be delivered. Optional.") propagate_cancel = fields.Boolean( 'Cancel Next Move', default=False, help="When ticked, if the move created by this rule is cancelled, the next move will be cancelled too.") warehouse_id = fields.Many2one('stock.warehouse', 'Warehouse', check_company=True) propagate_warehouse_id = fields.Many2one( 'stock.warehouse', 'Warehouse to Propagate', help="The warehouse to propagate on the created move/procurement, which can be different of the warehouse this rule is for (e.g for resupplying rules from another warehouse)") auto = fields.Selection([ ('manual', 'Manual Operation'), ('transparent', 'Automatic No Step Added')], string='Automatic Move', default='manual', index=True, required=True, help="The 'Manual Operation' value will create a stock move after the current one. " "With 'Automatic No Step Added', the location is replaced in the original move.") rule_message = fields.Html(compute='_compute_action_message') propagate_date = fields.Boolean(string="Propagate Rescheduling", default=True, help='The rescheduling is propagated to the next move.') propagate_date_minimum_delta = fields.Integer(string='Reschedule if Higher Than', help='The change must be higher than this value to be propagated', default=1) delay_alert = fields.Boolean( 'Alert if Delay', help='Log an exception on the picking if this move has to be delayed (due to a change in the previous move scheduled date).', ) @api.onchange('picking_type_id') def _onchange_picking_type(self): """ Modify locations to the default picking type's locations source and destination. Enable the delay alert if the picking type is a delivery """ self.location_src_id = self.picking_type_id.default_location_src_id.id self.location_id = self.picking_type_id.default_location_dest_id.id if self.picking_type_id.code == 'outgoing': self.delay_alert = True @api.onchange('route_id', 'company_id') def _onchange_route(self): """ Ensure that the rule's company is the same than the route's company. """ if self.route_id.company_id: self.company_id = self.route_id.company_id if self.picking_type_id.warehouse_id.company_id != self.route_id.company_id: self.picking_type_id = False domain = {'company_id': self.route_id.company_id and [('id', '=', self.route_id.company_id.id)] or []} return {'domain': domain} def _get_message_values(self): """ Return the source, destination and picking_type applied on a stock rule. The purpose of this function is to avoid code duplication in _get_message_dict functions since it often requires those data. """ source = self.location_src_id and self.location_src_id.display_name or _('Source Location') destination = self.location_id and self.location_id.display_name or _('Destination Location') operation = self.picking_type_id and self.picking_type_id.name or _('Operation Type') return source, destination, operation def _get_message_dict(self): """ Return a dict with the different possible message used for the rule message. It should return one message for each stock.rule action (except push and pull). This function is override in mrp and purchase_stock in order to complete the dictionary. """ message_dict = {} source, destination, operation = self._get_message_values() if self.action in ('push', 'pull', 'pull_push'): suffix = "" if self.procure_method == 'make_to_order' and self.location_src_id: suffix = _("<br>A need is created in <b>%s</b> and a rule will be triggered to fulfill it.") % (source) if self.procure_method == 'mts_else_mto' and self.location_src_id: suffix = _("<br>If the products are not available in <b>%s</b>, a rule will be triggered to bring products in this location.") % source message_dict = { 'pull': _('When products are needed in <b>%s</b>, <br/> <b>%s</b> are created from <b>%s</b> to fulfill the need.') % (destination, operation, source) + suffix, 'push': _('When products arrive in <b>%s</b>, <br/> <b>%s</b> are created to send them in <b>%s</b>.') % (source, operation, destination) } return message_dict @api.depends('action', 'location_id', 'location_src_id', 'picking_type_id', 'procure_method') def _compute_action_message(self): """ Generate dynamicaly a message that describe the rule purpose to the end user. """ action_rules = self.filtered(lambda rule: rule.action) for rule in action_rules: message_dict = rule._get_message_dict() message = message_dict.get(rule.action) and message_dict[rule.action] or "" if rule.action == 'pull_push': message = message_dict['pull'] + "<br/><br/>" + message_dict['push'] rule.rule_message = message (self - action_rules).rule_message = None def _run_push(self, move): """ Apply a push rule on a move. If the rule is 'no step added' it will modify the destination location on the move. If the rule is 'manual operation' it will generate a new move in order to complete the section define by the rule. Care this function is not call by method run. It is called explicitely in stock_move.py inside the method _push_apply """ new_date = fields.Datetime.to_string(move.date_expected + relativedelta(days=self.delay)) if self.auto == 'transparent': move.write({ 'date': new_date, 'date_expected': new_date, 'location_dest_id': self.location_id.id}) # avoid looping if a push rule is not well configured; otherwise call again push_apply to see if a next step is defined if self.location_id != move.location_dest_id: # TDE FIXME: should probably be done in the move model IMO move._push_apply() else: new_move_vals = self._push_prepare_move_copy_values(move, new_date) new_move = move.sudo().copy(new_move_vals) move.write({'move_dest_ids': [(4, new_move.id)]}) new_move._action_confirm() def _push_prepare_move_copy_values(self, move_to_copy, new_date): company_id = self.company_id.id if not company_id: company_id = self.sudo().warehouse_id and self.sudo().warehouse_id.company_id.id or self.sudo().picking_type_id.warehouse_id.company_id.id new_move_vals = { 'origin': move_to_copy.origin or move_to_copy.picking_id.name or "/", 'location_id': move_to_copy.location_dest_id.id, 'location_dest_id': self.location_id.id, 'date': new_date, 'date_expected': new_date, 'company_id': company_id, 'picking_id': False, 'picking_type_id': self.picking_type_id.id, 'propagate_cancel': self.propagate_cancel, 'propagate_date': self.propagate_date, 'propagate_date_minimum_delta': self.propagate_date_minimum_delta, 'warehouse_id': self.warehouse_id.id, 'delay_alert': self.delay_alert } return new_move_vals @api.model def _run_pull(self, procurements): moves_values_by_company = defaultdict(list) mtso_products_by_locations = defaultdict(list) # To handle the `mts_else_mto` procure method, we do a preliminary loop to # isolate the products we would need to read the forecasted quantity, # in order to to batch the read. We also make a sanitary check on the # `location_src_id` field. for procurement, rule in procurements: if not rule.location_src_id: msg = _('No source location defined on stock rule: %s!') % (rule.name, ) raise UserError(msg) if rule.procure_method == 'mts_else_mto': mtso_products_by_locations[rule.location_src_id].append(procurement.product_id.id) # Get the forecasted quantity for the `mts_else_mto` procurement. forecasted_qties_by_loc = {} for location, product_ids in mtso_products_by_locations.items(): products = self.env['product.product'].browse(product_ids).with_context(location=location.id) forecasted_qties_by_loc[location] = {product.id: product.free_qty for product in products} # Prepare the move values, adapt the `procure_method` if needed. for procurement, rule in procurements: procure_method = rule.procure_method if rule.procure_method == 'mts_else_mto': qty_needed = procurement.product_uom._compute_quantity(procurement.product_qty, procurement.product_id.uom_id) qty_available = forecasted_qties_by_loc[rule.location_src_id][procurement.product_id.id] if float_compare(qty_needed, qty_available, precision_rounding=procurement.product_id.uom_id.rounding) <= 0: procure_method = 'make_to_stock' forecasted_qties_by_loc[rule.location_src_id][procurement.product_id.id] -= qty_needed else: procure_method = 'make_to_order' move_values = rule._get_stock_move_values(*procurement) move_values['procure_method'] = procure_method moves_values_by_company[procurement.company_id.id].append(move_values) for company_id, moves_values in moves_values_by_company.items(): # create the move as SUPERUSER because the current user may not have the rights to do it (mto product launched by a sale for example) moves = self.env['stock.move'].sudo().with_context(force_company=company_id).create(moves_values) # Since action_confirm launch following procurement_group we should activate it. moves._action_confirm() return True def _get_custom_move_fields(self): """ The purpose of this method is to be override in order to easily add fields from procurement 'values' argument to move data. """ return [] def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, company_id, values): ''' Returns a dictionary of values that will be used to create a stock move from a procurement. This function assumes that the given procurement has a rule (action == 'pull' or 'pull_push') set on it. :param procurement: browse record :rtype: dictionary ''' group_id = False if self.group_propagation_option == 'propagate': group_id = values.get('group_id', False) and values['group_id'].id elif self.group_propagation_option == 'fixed': group_id = self.group_id.id date_expected = fields.Datetime.to_string( fields.Datetime.from_string(values['date_planned']) - relativedelta(days=self.delay or 0) ) partner = self.partner_address_id or (values.get('group_id', False) and values['group_id'].partner_id) if partner: product_id = product_id.with_context(lang=partner.lang or self.env.user.lang) # it is possible that we've already got some move done, so check for the done qty and create # a new move with the correct qty qty_left = product_qty move_values = { 'name': name[:2000], 'company_id': self.company_id.id or self.location_src_id.company_id.id or self.location_id.company_id.id or company_id.id, 'product_id': product_id.id, 'product_uom': product_uom.id, 'product_uom_qty': qty_left, 'partner_id': partner.id if partner else False, 'location_id': self.location_src_id.id, 'location_dest_id': location_id.id, 'move_dest_ids': values.get('move_dest_ids', False) and [(4, x.id) for x in values['move_dest_ids']] or [], 'rule_id': self.id, 'procure_method': self.procure_method, 'origin': origin, 'picking_type_id': self.picking_type_id.id, 'group_id': group_id, 'route_ids': [(4, route.id) for route in values.get('route_ids', [])], 'warehouse_id': self.propagate_warehouse_id.id or self.warehouse_id.id, 'date': date_expected, 'date_expected': date_expected, 'propagate_cancel': self.propagate_cancel, 'propagate_date': self.propagate_date, 'propagate_date_minimum_delta': self.propagate_date_minimum_delta, 'description_picking': product_id._get_description(self.picking_type_id), 'priority': values.get('priority', "1"), 'delay_alert': self.delay_alert, } for field in self._get_custom_move_fields(): if field in values: move_values[field] = values.get(field) return move_values def _log_next_activity(self, product_id, note): existing_activity = self.env['mail.activity'].search([('res_id', '=', product_id.product_tmpl_id.id), ('res_model_id', '=', self.env.ref('product.model_product_template').id), ('note', '=', note)]) if not existing_activity: # If the user deleted warning activity type. try: activity_type_id = self.env.ref('mail.mail_activity_data_warning').id except: activity_type_id = False self.env['mail.activity'].create({ 'activity_type_id': activity_type_id, 'note': note, 'user_id': product_id.responsible_id.id or SUPERUSER_ID, 'res_id': product_id.product_tmpl_id.id, 'res_model_id': self.env.ref('product.model_product_template').id, })
class PortalShare(models.TransientModel): _name = 'portal.share' _description = 'Portal Sharing' @api.model def default_get(self, fields): result = super(PortalShare, self).default_get(fields) result['res_model'] = self._context.get('active_model', False) result['res_id'] = self._context.get('active_id', False) if result['res_model'] and result['res_id']: record = self.env[result['res_model']].browse(result['res_id']) result['share_link'] = record.get_base_url() + record._get_share_url(redirect=True) return result res_model = fields.Char('Related Document Model', required=True) res_id = fields.Integer('Related Document ID', required=True) partner_ids = fields.Many2many('res.partner', string="Recipients", required=True) note = fields.Text(help="Add extra content to display in the email") share_link = fields.Char(string="Link", compute='_compute_share_link') access_warning = fields.Text("Access warning", compute="_compute_access_warning") @api.depends('res_model', 'res_id') def _compute_share_link(self): for rec in self: rec.share_link = False if rec.res_model: res_model = self.env[rec.res_model] if isinstance(res_model, self.pool['portal.mixin']) and rec.res_id: record = res_model.browse(rec.res_id) rec.share_link = record.get_base_url() + record._get_share_url(redirect=True) @api.depends('res_model', 'res_id') def _compute_access_warning(self): for rec in self: rec.access_warning = False if rec.res_model: res_model = self.env[rec.res_model] if isinstance(res_model, self.pool['portal.mixin']) and rec.res_id: record = res_model.browse(rec.res_id) rec.access_warning = record.access_warning def action_send_mail(self): active_record = self.env[self.res_model].browse(self.res_id) template = self.env.ref('portal.portal_share_template', False) note = self.env.ref('mail.mt_note') signup_enabled = self.env['ir.config_parameter'].sudo().get_param('auth_signup.invitation_scope') == 'b2c' if hasattr(active_record, 'access_token') and active_record.access_token or not signup_enabled: partner_ids = self.partner_ids else: partner_ids = self.partner_ids.filtered(lambda x: x.user_ids) # if partner already user or record has access token send common link in batch to all user for partner in self.partner_ids: share_link = active_record.get_base_url() + active_record._get_share_url(redirect=True, pid=partner.id) active_record.with_context(mail_post_autofollow=True).message_post_with_view(template, values={'partner': partner, 'note': self.note, 'record': active_record, 'share_link': share_link}, subject=_("You are invited to access %s" % active_record.display_name), subtype_id=note.id, email_layout_xmlid='mail.mail_notification_light', partner_ids=[(6, 0, partner.ids)]) # when partner not user send individual mail with signup token for partner in self.partner_ids - partner_ids: # prepare partner for signup and send singup url with redirect url partner.signup_get_auth_param() share_link = partner._get_signup_url_for_action(action='/mail/view', res_id=self.res_id, model=self.model)[partner.id] active_record.with_context(mail_post_autofollow=True).message_post_with_view(template, values={'partner': partner, 'note': self.note, 'record': active_record, 'share_link': share_link}, subject=_("You are invited to access %s" % active_record.display_name), subtype_id=note.id, email_layout_xmlid='mail.mail_notification_light', partner_ids=[(6, 0, partner.ids)]) return {'type': 'ir.actions.act_window_close'}
class Property(models.Model): _name = 'ir.property' _description = 'Company Property' name = fields.Char(index=True) res_id = fields.Char( string='Resource', index=True, help="If not set, acts as a default value for new resources", ) company_id = fields.Many2one('res.company', string='Company', index=True) fields_id = fields.Many2one('ir.model.fields', string='Field', ondelete='cascade', required=True, index=True) value_float = fields.Float() value_integer = fields.Integer() value_text = fields.Text() # will contain (char, text) value_binary = fields.Binary(attachment=False) value_reference = fields.Char() value_datetime = fields.Datetime() type = fields.Selection([ ('char', 'Char'), ('float', 'Float'), ('boolean', 'Boolean'), ('integer', 'Integer'), ('text', 'Text'), ('binary', 'Binary'), ('many2one', 'Many2One'), ('date', 'Date'), ('datetime', 'DateTime'), ('selection', 'Selection'), ], required=True, default='many2one', index=True) def _update_values(self, values): if 'value' not in values: return values value = values.pop('value') prop = None type_ = values.get('type') if not type_: if self: prop = self[0] type_ = prop.type else: type_ = self._fields['type'].default(self) field = TYPE2FIELD.get(type_) if not field: raise UserError(_('Invalid type')) if field == 'value_reference': if not value: value = False elif isinstance(value, models.BaseModel): value = '%s,%d' % (value._name, value.id) elif isinstance(value, int): field_id = values.get('fields_id') if not field_id: if not prop: raise ValueError() field_id = prop.fields_id else: field_id = self.env['ir.model.fields'].browse(field_id) value = '%s,%d' % (field_id.sudo().relation, value) values[field] = value return values def write(self, values): # if any of the records we're writing on has a res_id=False *or* # we're writing a res_id=False on any record default_set = False if self._ids: self.env.cr.execute( 'SELECT EXISTS (SELECT 1 FROM ir_property WHERE id in %s AND res_id IS NULL)', [self._ids]) default_set = self.env.cr.rowcount == 1 or any( v.get('res_id') is False for v in values) r = super(Property, self).write(self._update_values(values)) if default_set: # DLE P44: test `test_27_company_dependent` # Easy solution, need to flush write when changing a property. # Maybe it would be better to be able to compute all impacted cache value and update those instead # Then clear_caches must be removed as well. self.flush() self.clear_caches() return r @api.model_create_multi def create(self, vals_list): vals_list = [self._update_values(vals) for vals in vals_list] created_default = any(not v.get('res_id') for v in vals_list) r = super(Property, self).create(vals_list) if created_default: # DLE P44: test `test_27_company_dependent` self.flush() self.clear_caches() return r def unlink(self): default_deleted = False if self._ids: self.env.cr.execute( 'SELECT EXISTS (SELECT 1 FROM ir_property WHERE id in %s)', [self._ids]) default_deleted = self.env.cr.rowcount == 1 r = super().unlink() if default_deleted: self.clear_caches() return r def get_by_record(self): self.ensure_one() if self.type in ('char', 'text', 'selection'): return self.value_text elif self.type == 'float': return self.value_float elif self.type == 'boolean': return bool(self.value_integer) elif self.type == 'integer': return self.value_integer elif self.type == 'binary': return self.value_binary elif self.type == 'many2one': if not self.value_reference: return False model, resource_id = self.value_reference.split(',') return self.env[model].browse(int(resource_id)).exists() elif self.type == 'datetime': return self.value_datetime elif self.type == 'date': if not self.value_datetime: return False return fields.Date.to_string( fields.Datetime.from_string(self.value_datetime)) return False @api.model def get(self, name, model, res_id=False): if not res_id: t, v = self._get_default_property(name, model) if not v or t != 'many2one': return v return self.env[v[0]].browse(v[1]) p = self._get_property(name, model, res_id=res_id) if p: return p.get_by_record() return False # only cache Property.get(res_id=False) as that's # sub-optimally. COMPANY_KEY = "self.env.context.get('force_company') or self.env.company.id" @ormcache(COMPANY_KEY, 'name', 'model') def _get_default_property(self, name, model): prop = self._get_property(name, model, res_id=False) if not prop: return None, False v = prop.get_by_record() if prop.type != 'many2one': return prop.type, v return 'many2one', v and (v._name, v.id) def _get_property(self, name, model, res_id): domain = self._get_domain(name, model) if domain is not None: domain = [('res_id', '=', res_id)] + domain #make the search with company_id asc to make sure that properties specific to a company are given first return self.search(domain, limit=1, order='company_id') return self.browse(()) def _get_domain(self, prop_name, model): field_id = self.env['ir.model.fields']._get_id(model, prop_name) if not field_id: return None company_id = self._context.get('force_company') or self.env.company.id return [('fields_id', '=', field_id), ('company_id', 'in', [company_id, False])] @api.model def get_multi(self, name, model, ids): """ Read the property field `name` for the records of model `model` with the given `ids`, and return a dictionary mapping `ids` to their corresponding value. """ if not ids: return {} field = self.env[model]._fields[name] field_id = self.env['ir.model.fields']._get_id(model, name) company_id = (self._context.get('force_company') or self.env.company.id) if field.type == 'many2one': comodel = self.env[field.comodel_name] model_pos = len(model) + 2 value_pos = len(comodel._name) + 2 # retrieve values: both p.res_id and p.value_reference are formatted # as "<rec._name>,<rec.id>"; the purpose of the LEFT JOIN is to # return the value id if it exists, NULL otherwise query = """ SELECT substr(p.res_id, %s)::integer, r.id FROM ir_property p LEFT JOIN {} r ON substr(p.value_reference, %s)::integer=r.id WHERE p.fields_id=%s AND (p.company_id=%s OR p.company_id IS NULL) AND (p.res_id IN %s OR p.res_id IS NULL) ORDER BY p.company_id NULLS FIRST """.format(comodel._table) params = [model_pos, value_pos, field_id, company_id] clean = comodel.browse elif field.type in TYPE2FIELD: model_pos = len(model) + 2 # retrieve values: p.res_id is formatted as "<rec._name>,<rec.id>" query = """ SELECT substr(p.res_id, %s)::integer, p.{} FROM ir_property p WHERE p.fields_id=%s AND (p.company_id=%s OR p.company_id IS NULL) AND (p.res_id IN %s OR p.res_id IS NULL) ORDER BY p.company_id NULLS FIRST """.format(TYPE2FIELD[field.type]) params = [model_pos, field_id, company_id] clean = TYPE2CLEAN[field.type] else: return dict.fromkeys(ids, False) # retrieve values cr = self.env.cr result = {} refs = {"%s,%s" % (model, id) for id in ids} for sub_refs in cr.split_for_in_conditions(refs): cr.execute(query, params + [sub_refs]) result.update(cr.fetchall()) # determine all values and format them default = result.get(None, None) return {id: clean(result.get(id, default)) for id in ids} @api.model def set_multi(self, name, model, values, default_value=None): """ Assign the property field `name` for the records of model `model` with `values` (dictionary mapping record ids to their value). If the value for a given record is the same as the default value, the property entry will not be stored, to avoid bloating the database. If `default_value` is provided, that value will be used instead of the computed default value, to determine whether the value for a record should be stored or not. """ def clean(value): return value.id if isinstance(value, models.BaseModel) else value if not values: return if default_value is None: domain = self._get_domain(name, model) if domain is None: raise Exception() # retrieve the default value for the field default_value = clean(self.get(name, model)) # retrieve the properties corresponding to the given record ids field_id = self.env['ir.model.fields']._get_id(model, name) company_id = self.env.context.get( 'force_company') or self.env.company.id refs = {('%s,%s' % (model, id)): id for id in values} props = self.search([ ('fields_id', '=', field_id), ('company_id', '=', company_id), ('res_id', 'in', list(refs)), ]) # modify existing properties for prop in props: id = refs.pop(prop.res_id) value = clean(values[id]) if value == default_value: # avoid prop.unlink(), as it clears the record cache that can # contain the value of other properties to set on record! prop.check_access_rights('unlink') prop.check_access_rule('unlink') self._cr.execute("DELETE FROM ir_property WHERE id=%s", [prop.id]) elif value != clean(prop.get_by_record()): prop.write({'value': value}) # create new properties for records that do not have one yet vals_list = [] for ref, id in refs.items(): value = clean(values[id]) if value != default_value: vals_list.append({ 'fields_id': field_id, 'company_id': company_id, 'res_id': ref, 'name': name, 'value': value, 'type': self.env[model]._fields[name].type, }) self.create(vals_list) @api.model def search_multi(self, name, model, operator, value): """ Return a domain for the records that match the given condition. """ default_matches = False include_zero = False field = self.env[model]._fields[name] if field.type == 'many2one': comodel = field.comodel_name def makeref(value): return value and '%s,%s' % (comodel, value) if operator == "=": value = makeref(value) # if searching properties not set, search those not in those set if value is False: default_matches = True elif operator in ('!=', '<=', '<', '>', '>='): value = makeref(value) elif operator in ('in', 'not in'): value = [makeref(v) for v in value] elif operator in ('=like', '=ilike', 'like', 'not like', 'ilike', 'not ilike'): # most probably inefficient... but correct target = self.env[comodel] target_names = target.name_search(value, operator=operator, limit=None) target_ids = [n[0] for n in target_names] operator, value = 'in', [makeref(v) for v in target_ids] elif field.type in ('integer', 'float'): # No record is created in ir.property if the field's type is float or integer with a value # equal to 0. Then to match with the records that are linked to a property field equal to 0, # the negation of the operator must be taken to compute the goods and the domain returned # to match the searched records is just the opposite. if value == 0 and operator == '=': operator = '!=' include_zero = True elif value <= 0 and operator == '>=': operator = '<' include_zero = True elif value < 0 and operator == '>': operator = '<=' include_zero = True elif value >= 0 and operator == '<=': operator = '>' include_zero = True elif value > 0 and operator == '<': operator = '>=' include_zero = True # retrieve the properties that match the condition domain = self._get_domain(name, model) if domain is None: raise Exception() props = self.search(domain + [(TYPE2FIELD[field.type], operator, value)]) # retrieve the records corresponding to the properties that match good_ids = [] for prop in props: if prop.res_id: res_model, res_id = prop.res_id.split(',') good_ids.append(int(res_id)) else: default_matches = True if include_zero: return [('id', 'not in', good_ids)] elif default_matches: # exclude all records with a property that does not match all_ids = [] props = self.search(domain + [('res_id', '!=', False)]) for prop in props: res_model, res_id = prop.res_id.split(',') all_ids.append(int(res_id)) bad_ids = list(set(all_ids) - set(good_ids)) return [('id', 'not in', bad_ids)] else: return [('id', 'in', good_ids)]
class SaleOrderLine(models.Model): _inherit = 'sale.order.line' purchase_line_ids = fields.One2many( 'purchase.order.line', 'sale_line_id', string="Generated Purchase Lines", readonly=True, help= "Purchase line generated by this Sales item on order confirmation, or when the quantity was increased." ) purchase_line_count = fields.Integer("Number of generated purchase items", compute='_compute_purchase_count') @api.depends('purchase_line_ids') def _compute_purchase_count(self): database_data = self.env['purchase.order.line'].sudo().read_group( [('sale_line_id', 'in', self.ids)], ['sale_line_id'], ['sale_line_id']) mapped_data = dict([(db['sale_line_id'][0], db['sale_line_id_count']) for db in database_data]) for line in self: line.purchase_line_count = mapped_data.get(line.id, 0) @api.onchange('product_uom_qty') def _onchange_service_product_uom_qty(self): if self.state == 'sale' and self.product_id.type == 'service' and self.product_id.service_to_purchase: if self.product_uom_qty < self._origin.product_uom_qty: if self.product_uom_qty < self.qty_delivered: return {} warning_mess = { 'title': _('Ordered quantity decreased!'), 'message': _('You are decreasing the ordered quantity! Do not forget to manually update the purchase order if needed.' ), } return {'warning': warning_mess} return {} # -------------------------- # CRUD # -------------------------- @api.model def create(self, values): line = super(SaleOrderLine, self).create(values) # Do not generate purchase when expense SO line since the product is already delivered if line.state == 'sale' and not line.is_expense: line.sudo()._purchase_service_generation() return line def write(self, values): increased_lines = None decreased_lines = None increased_values = {} decreased_values = {} if 'product_uom_qty' in values: precision = self.env['decimal.precision'].precision_get( 'Product Unit of Measure') increased_lines = self.sudo().filtered( lambda r: r.product_id.service_to_purchase and r. purchase_line_count and float_compare( r.product_uom_qty, values['product_uom_qty'], precision_digits=precision) == -1) decreased_lines = self.sudo().filtered( lambda r: r.product_id.service_to_purchase and r. purchase_line_count and float_compare( r.product_uom_qty, values['product_uom_qty'], precision_digits=precision) == 1) increased_values = { line.id: line.product_uom_qty for line in increased_lines } decreased_values = { line.id: line.product_uom_qty for line in decreased_lines } result = super(SaleOrderLine, self).write(values) if increased_lines: increased_lines._purchase_increase_ordered_qty( values['product_uom_qty'], increased_values) if decreased_lines: decreased_lines._purchase_decrease_ordered_qty( values['product_uom_qty'], decreased_values) return result # -------------------------- # Business Methods # -------------------------- def _purchase_decrease_ordered_qty(self, new_qty, origin_values): """ Decrease the quantity from SO line will add a next acitivities on the related purchase order :param new_qty: new quantity (lower than the current one on SO line), expressed in UoM of SO line. :param origin_values: map from sale line id to old value for the ordered quantity (dict) """ purchase_to_notify_map = {} # map PO -> set(SOL) last_purchase_lines = self.env['purchase.order.line'].search([ ('sale_line_id', 'in', self.ids) ]) for purchase_line in last_purchase_lines: purchase_to_notify_map.setdefault(purchase_line.order_id, self.env['sale.order.line']) purchase_to_notify_map[ purchase_line.order_id] |= purchase_line.sale_line_id # create next activity for purchase_order, sale_lines in purchase_to_notify_map.items(): render_context = { 'sale_lines': sale_lines, 'sale_orders': sale_lines.mapped('order_id'), 'origin_values': origin_values, } purchase_order.activity_schedule_with_view( 'mail.mail_activity_data_warning', user_id=purchase_order.user_id.id or self.env.uid, views_or_xmlid= 'sale_purchase.exception_purchase_on_sale_quantity_decreased', render_context=render_context) def _purchase_increase_ordered_qty(self, new_qty, origin_values): """ Increase the quantity on the related purchase lines :param new_qty: new quantity (higher than the current one on SO line), expressed in UoM of SO line. :param origin_values: map from sale line id to old value for the ordered quantity (dict) """ for line in self: last_purchase_line = self.env['purchase.order.line'].search( [('sale_line_id', '=', line.id)], order='create_date DESC', limit=1) if last_purchase_line.state in ['draft', 'sent', 'to approve' ]: # update qty for draft PO lines quantity = line.product_uom._compute_quantity( new_qty, last_purchase_line.product_uom) last_purchase_line.write({'product_qty': quantity}) elif last_purchase_line.state in [ 'purchase', 'done', 'cancel' ]: # create new PO, by forcing the quantity as the difference from SO line quantity = line.product_uom._compute_quantity( new_qty - origin_values.get(line.id, 0.0), last_purchase_line.product_uom) line._purchase_service_create(quantity=quantity) def _purchase_get_date_order(self, supplierinfo): """ return the ordered date for the purchase order, computed as : SO commitment date - supplier delay """ commitment_date = fields.Datetime.from_string( self.order_id.commitment_date or fields.Datetime.now()) return commitment_date - relativedelta(days=int(supplierinfo.delay)) def _purchase_service_prepare_order_values(self, supplierinfo): """ Returns the values to create the purchase order from the current SO line. :param supplierinfo: record of product.supplierinfo :rtype: dict """ self.ensure_one() partner_supplier = supplierinfo.name fiscal_position_id = self.env['account.fiscal.position'].sudo( ).get_fiscal_position(partner_supplier.id) date_order = self._purchase_get_date_order(supplierinfo) return { 'partner_id': partner_supplier.id, 'partner_ref': partner_supplier.ref, 'company_id': self.company_id.id, 'currency_id': partner_supplier.property_purchase_currency_id.id or self.env.company.currency_id.id, 'dest_address_id': False, # False since only supported in stock 'origin': self.order_id.name, 'payment_term_id': partner_supplier.property_supplier_payment_term_id.id, 'date_order': date_order, 'fiscal_position_id': fiscal_position_id, } def _purchase_service_prepare_line_values(self, purchase_order, quantity=False): """ Returns the values to create the purchase order line from the current SO line. :param purchase_order: record of purchase.order :rtype: dict :param quantity: the quantity to force on the PO line, expressed in SO line UoM """ self.ensure_one() # compute quantity from SO line UoM product_quantity = self.product_uom_qty if quantity: product_quantity = quantity purchase_qty_uom = self.product_uom._compute_quantity( product_quantity, self.product_id.uom_po_id) # determine vendor (real supplier, sharing the same partner as the one from the PO, but with more accurate informations like validity, quantity, ...) # Note: one partner can have multiple supplier info for the same product supplierinfo = self.product_id._select_seller( partner_id=purchase_order.partner_id, quantity=purchase_qty_uom, date=purchase_order.date_order and purchase_order.date_order.date( ), # and purchase_order.date_order[:10], uom_id=self.product_id.uom_po_id) fpos = purchase_order.fiscal_position_id taxes = fpos.map_tax(self.product_id.supplier_taxes_id ) if fpos else self.product_id.supplier_taxes_id if taxes: taxes = taxes.filtered( lambda t: t.company_id.id == self.company_id.id) # compute unit price price_unit = 0.0 if supplierinfo: price_unit = self.env['account.tax'].sudo( )._fix_tax_included_price_company( supplierinfo.price, self.product_id.supplier_taxes_id, taxes, self.company_id) if purchase_order.currency_id and supplierinfo.currency_id != purchase_order.currency_id: price_unit = supplierinfo.currency_id.compute( price_unit, purchase_order.currency_id) # purchase line description in supplier lang product_in_supplier_lang = self.product_id.with_context( lang=supplierinfo.name.lang, partner_id=supplierinfo.name.id, ) name = '[%s] %s' % (self.product_id.default_code, product_in_supplier_lang.display_name) if product_in_supplier_lang.description_purchase: name += '\n' + product_in_supplier_lang.description_purchase return { 'name': '[%s] %s' % (self.product_id.default_code, self.name) if self.product_id.default_code else self.name, 'product_qty': purchase_qty_uom, 'product_id': self.product_id.id, 'product_uom': self.product_id.uom_po_id.id, 'price_unit': price_unit, 'date_planned': fields.Date.from_string(purchase_order.date_order) + relativedelta(days=int(supplierinfo.delay)), 'taxes_id': [(6, 0, taxes.ids)], 'order_id': purchase_order.id, 'sale_line_id': self.id, } def _purchase_service_create(self, quantity=False): """ On Sales Order confirmation, some lines (services ones) can create a purchase order line and maybe a purchase order. If a line should create a RFQ, it will check for existing PO. If no one is find, the SO line will create one, then adds a new PO line. The created purchase order line will be linked to the SO line. :param quantity: the quantity to force on the PO line, expressed in SO line UoM """ PurchaseOrder = self.env['purchase.order'] supplier_po_map = {} sale_line_purchase_map = {} for line in self: line = line.with_context(force_company=line.company_id.id) # determine vendor of the order (take the first matching company and product) suppliers = line.product_id.with_context( force_company=line.company_id.id)._select_seller( quantity=line.product_uom_qty, uom_id=line.product_uom) if not suppliers: raise UserError( _("There is no vendor associated to the product %s. Please define a vendor for this product." ) % (line.product_id.display_name, )) supplierinfo = suppliers[0] partner_supplier = supplierinfo.name # yes, this field is not explicit .... it is a res.partner ! # determine (or create) PO purchase_order = supplier_po_map.get(partner_supplier.id) if not purchase_order: purchase_order = PurchaseOrder.search([ ('partner_id', '=', partner_supplier.id), ('state', '=', 'draft'), ('company_id', '=', line.company_id.id), ], limit=1) if not purchase_order: values = line._purchase_service_prepare_order_values( supplierinfo) purchase_order = PurchaseOrder.create(values) else: # update origin of existing PO so_name = line.order_id.name origins = [] if purchase_order.origin: origins = purchase_order.origin.split(', ') + origins if so_name not in origins: origins += [so_name] purchase_order.write({'origin': ', '.join(origins)}) supplier_po_map[partner_supplier.id] = purchase_order # add a PO line to the PO values = line._purchase_service_prepare_line_values( purchase_order, quantity=quantity) purchase_line = line.env['purchase.order.line'].create(values) # link the generated purchase to the SO line sale_line_purchase_map.setdefault(line, line.env['purchase.order.line']) sale_line_purchase_map[line] |= purchase_line return sale_line_purchase_map def _purchase_service_generation(self): """ Create a Purchase for the first time from the sale line. If the SO line already created a PO, it will not create a second one. """ sale_line_purchase_map = {} for line in self: # Do not regenerate PO line if the SO line has already created one in the past (SO cancel/reconfirmation case) if line.product_id.service_to_purchase and not line.purchase_line_count: result = line._purchase_service_create() sale_line_purchase_map.update(result) return sale_line_purchase_map
class ImLivechatReportChannel(models.Model): """ Livechat Support Report on the Channels """ _name = "im_livechat.report.channel" _description = "Livechat Support Channel Report" _order = 'start_date, technical_name' _auto = False uuid = fields.Char('UUID', readonly=True) channel_id = fields.Many2one('mail.channel', 'Conversation', readonly=True) channel_name = fields.Char('Channel Name', readonly=True) technical_name = fields.Char('Code', readonly=True) livechat_channel_id = fields.Many2one('im_livechat.channel', 'Channel', readonly=True) start_date = fields.Datetime('Start Date of session', readonly=True, help="Start date of the conversation") start_hour = fields.Char('Start Hour of session', readonly=True, help="Start hour of the conversation") day_number = fields.Char( 'Day Number', readonly=True, help="Day number of the session (1 is Monday, 7 is Sunday)") time_to_answer = fields.Float( 'Time to answer (sec)', digits=(16, 2), readonly=True, group_operator="avg", help="Average time in seconds to give the first answer to the visitor") start_date_hour = fields.Char('Hour of start Date of session', readonly=True) duration = fields.Float('Average duration', digits=(16, 2), readonly=True, group_operator="avg", help="Duration of the conversation (in seconds)") nbr_speaker = fields.Integer('# of speakers', readonly=True, group_operator="avg", help="Number of different speakers") nbr_message = fields.Integer('Average message', readonly=True, group_operator="avg", help="Number of message in the conversation") is_without_answer = fields.Integer( 'Session(s) without answer', readonly=True, group_operator="sum", help="""A session is without answer if the operator did not answer. If the visitor is also the operator, the session will always be answered.""" ) days_of_activity = fields.Integer( 'Days of activity', group_operator="max", readonly=True, help="Number of days since the first session of the operator") is_anonymous = fields.Integer('Is visitor anonymous', readonly=True) country_id = fields.Many2one('res.country', 'Country of the visitor', readonly=True) is_happy = fields.Integer('Visitor is Happy', readonly=True) rating = fields.Integer('Rating', group_operator="avg", readonly=True) # TODO DBE : Use Selection field - Need : Pie chart must show labels, not keys. rating_text = fields.Char('Satisfaction Rate', readonly=True) is_unrated = fields.Integer('Session not rated', readonly=True) partner_id = fields.Many2one('res.partner', 'Operator', readonly=True) def init(self): # Note : start_date_hour must be remove when the read_group will allow grouping on the hour of a datetime. Don't forget to change the view ! tools.drop_view_if_exists(self.env.cr, 'im_livechat_report_channel') self.env.cr.execute(""" CREATE OR REPLACE VIEW im_livechat_report_channel AS ( SELECT C.id as id, C.uuid as uuid, C.id as channel_id, C.name as channel_name, CONCAT(L.name, ' / ', C.id) as technical_name, C.livechat_channel_id as livechat_channel_id, C.create_date as start_date, to_char(date_trunc('hour', C.create_date), 'YYYY-MM-DD HH24:MI:SS') as start_date_hour, to_char(date_trunc('hour', C.create_date), 'HH24') as start_hour, extract(dow from C.create_date) as day_number, EXTRACT('epoch' FROM MAX(M.create_date) - MIN(M.create_date)) AS duration, EXTRACT('epoch' FROM MIN(MO.create_date) - MIN(M.create_date)) AS time_to_answer, count(distinct C.livechat_operator_id) as nbr_speaker, count(distinct M.id) as nbr_message, CASE WHEN EXISTS (select distinct M.author_id FROM mail_message M, mail_message_mail_channel_rel R WHERE M.author_id=C.livechat_operator_id AND R.mail_channel_id = C.id AND R.mail_message_id = M.id and C.livechat_operator_id = M.author_id) THEN 0 ELSE 1 END as is_without_answer, (DATE_PART('day', date_trunc('day', now()) - date_trunc('day', C.create_date)) + 1) as days_of_activity, CASE WHEN C.anonymous_name IS NULL THEN 0 ELSE 1 END as is_anonymous, C.country_id, CASE WHEN rate.rating = 10 THEN 1 ELSE 0 END as is_happy, Rate.rating as rating, CASE WHEN Rate.rating = 1 THEN 'Unhappy' WHEN Rate.rating = 10 THEN 'Happy' WHEN Rate.rating = 5 THEN 'Neutral' ELSE null END as rating_text, CASE WHEN rate.rating > 0 THEN 0 ELSE 1 END as is_unrated, C.livechat_operator_id as partner_id FROM mail_channel C JOIN mail_message_mail_channel_rel R ON (C.id = R.mail_channel_id) JOIN mail_message M ON (M.id = R.mail_message_id) JOIN im_livechat_channel L ON (L.id = C.livechat_channel_id) LEFT JOIN mail_message MO ON (R.mail_message_id = MO.id AND MO.author_id = C.livechat_operator_id) LEFT JOIN rating_rating Rate ON (Rate.res_id = C.id and Rate.res_model = 'mail.channel' and Rate.parent_res_model = 'im_livechat.channel') WHERE C.livechat_operator_id is not null GROUP BY C.livechat_operator_id, C.id, C.name, C.livechat_channel_id, L.name, C.create_date, C.uuid, Rate.rating ) """)
class BlogPost(models.Model): _name = "blog.post" _description = "Blog Post" _inherit = ['mail.thread', 'website.seo.metadata', 'website.published.multi.mixin'] _order = 'id DESC' _mail_post_access = 'read' def _compute_website_url(self): super(BlogPost, self)._compute_website_url() for blog_post in self: blog_post.website_url = "/blog/%s/post/%s" % (slug(blog_post.blog_id), slug(blog_post)) def _default_content(self): return ''' <p class="o_default_snippet_text">''' + _("Start writing here...") + '''</p> ''' name = fields.Char('Title', required=True, translate=True, default='') subtitle = fields.Char('Sub Title', translate=True) author_id = fields.Many2one('res.partner', 'Author', default=lambda self: self.env.user.partner_id) active = fields.Boolean('Active', default=True) cover_properties = fields.Text( 'Cover Properties', default='{"background-image": "none", "background-color": "oe_black", "opacity": "0.2", "resize_class": "cover_mid"}') blog_id = fields.Many2one('blog.blog', 'Blog', required=True, ondelete='cascade') tag_ids = fields.Many2many('blog.tag', string='Tags') content = fields.Html('Content', default=_default_content, translate=html_translate, sanitize=False) teaser = fields.Text('Teaser', compute='_compute_teaser', inverse='_set_teaser') teaser_manual = fields.Text(string='Teaser Content') website_message_ids = fields.One2many(domain=lambda self: [('model', '=', self._name), ('message_type', '=', 'comment')]) # creation / update stuff create_date = fields.Datetime('Created on', index=True, readonly=True) published_date = fields.Datetime('Published Date') post_date = fields.Datetime('Publishing date', compute='_compute_post_date', inverse='_set_post_date', store=True, help="The blog post will be visible for your visitors as of this date on the website if it is set as published.") create_uid = fields.Many2one('res.users', 'Created by', index=True, readonly=True) write_date = fields.Datetime('Last Updated on', index=True, readonly=True) write_uid = fields.Many2one('res.users', 'Last Contributor', index=True, readonly=True) author_avatar = fields.Binary(related='author_id.image_128', string="Avatar", readonly=False) visits = fields.Integer('No of Views', copy=False) website_id = fields.Many2one(related='blog_id.website_id', readonly=True) @api.depends('content', 'teaser_manual') def _compute_teaser(self): for blog_post in self: if blog_post.teaser_manual: blog_post.teaser = blog_post.teaser_manual else: content = html2plaintext(blog_post.content).replace('\n', ' ') blog_post.teaser = content[:200] + '...' def _set_teaser(self): for blog_post in self: blog_post.teaser_manual = blog_post.teaser @api.depends('create_date', 'published_date') def _compute_post_date(self): for blog_post in self: if blog_post.published_date: blog_post.post_date = blog_post.published_date else: blog_post.post_date = blog_post.create_date def _set_post_date(self): for blog_post in self: blog_post.published_date = blog_post.post_date if not blog_post.published_date: blog_post._write(dict(post_date=blog_post.create_date)) # dont trigger inverse function def _check_for_publication(self, vals): if vals.get('is_published'): for post in self: post.blog_id.message_post_with_view( 'website_blog.blog_post_template_new_post', subject=post.name, values={'post': post}, subtype_id=self.env['ir.model.data'].xmlid_to_res_id('website_blog.mt_blog_blog_published')) return True return False @api.model def create(self, vals): post_id = super(BlogPost, self.with_context(mail_create_nolog=True)).create(vals) post_id._check_for_publication(vals) return post_id def write(self, vals): result = True for post in self: copy_vals = dict(vals) published_in_vals = set(vals.keys()) & {'is_published', 'website_published'} if (published_in_vals and 'published_date' not in vals and (not post.published_date or post.published_date <= fields.Datetime.now())): copy_vals['published_date'] = vals[list(published_in_vals)[0]] and fields.Datetime.now() or False result &= super(BlogPost, self).write(copy_vals) self._check_for_publication(vals) return result def get_access_action(self, access_uid=None): """ Instead of the classic form view, redirect to the post on website directly if user is an employee or if the post is published. """ self.ensure_one() user = access_uid and self.env['res.users'].sudo().browse(access_uid) or self.env.user if user.share and not self.sudo().website_published: return super(BlogPost, self).get_access_action(access_uid) return { 'type': 'ir.actions.act_url', 'url': self.website_url, 'target': 'self', 'target_type': 'public', 'res_id': self.id, } def _notify_get_groups(self): """ Add access button to everyone if the document is published. """ groups = super(BlogPost, self)._notify_get_groups() if self.website_published: for group_name, group_method, group_data in groups: group_data['has_button_access'] = True return groups def _notify_record_by_inbox(self, message, recipients_data, msg_vals=False, **kwargs): """ Override to avoid keeping all notified recipients of a comment. We avoid tracking needaction on post comments. Only emails should be sufficient. """ if msg_vals.get('message_type', message.message_type) == 'comment': return return super(BlogPost, self)._notify_record_by_inbox(message, recipients_data, msg_vals=msg_vals, **kwargs) def _default_website_meta(self): res = super(BlogPost, self)._default_website_meta() res['default_opengraph']['og:description'] = res['default_twitter']['twitter:description'] = self.subtitle res['default_opengraph']['og:type'] = 'article' res['default_opengraph']['article:published_time'] = self.post_date res['default_opengraph']['article:modified_time'] = self.write_date res['default_opengraph']['article:tag'] = self.tag_ids.mapped('name') # background-image might contain single quotes eg `url('/my/url')` res['default_opengraph']['og:image'] = res['default_twitter']['twitter:image'] = json.loads(self.cover_properties).get('background-image', 'none')[4:-1].strip("'") res['default_opengraph']['og:title'] = res['default_twitter']['twitter:title'] = self.name res['default_meta_description'] = self.subtitle return res
class MailMessageSubtype(models.Model): """ Class holding subtype definition for messages. Subtypes allow to tune the follower subscription, allowing only some subtypes to be pushed on the Wall. """ _name = 'mail.message.subtype' _description = 'Message subtypes' _order = 'sequence, id' name = fields.Char( 'Message Type', required=True, translate=True, help='Message subtype gives a more precise type on the message, ' 'especially for system notifications. For example, it can be ' 'a notification related to a new record (New), or to a stage ' 'change in a process (Stage change). Message subtypes allow to ' 'precisely tune the notifications the user want to receive on its wall.' ) description = fields.Text( 'Description', translate=True, help='Description that will be added in the message posted for this ' 'subtype. If void, the name will be added instead.') internal = fields.Boolean( 'Internal Only', help= 'Messages with internal subtypes will be visible only by employees, aka members of base_user group' ) parent_id = fields.Many2one( 'mail.message.subtype', string='Parent', ondelete='set null', help= 'Parent subtype, used for automatic subscription. This field is not ' 'correctly named. For example on a project, the parent_id of project ' 'subtypes refers to task-related subtypes.') relation_field = fields.Char( 'Relation field', help='Field used to link the related model to the subtype model when ' 'using automatic subscription on a related document. The field ' 'is used to compute getattr(related_document.relation_field).') res_model = fields.Char( 'Model', help= "Model the subtype applies to. If False, this subtype applies to all models." ) default = fields.Boolean('Default', default=True, help="Activated by default when subscribing.") sequence = fields.Integer('Sequence', default=1, help="Used to order subtypes.") hidden = fields.Boolean('Hidden', help="Hide the subtype in the follower options") @api.model def create(self, vals): self.clear_caches() return super(MailMessageSubtype, self).create(vals) def write(self, vals): self.clear_caches() return super(MailMessageSubtype, self).write(vals) def unlink(self): self.clear_caches() return super(MailMessageSubtype, self).unlink() @tools.ormcache('model_name') def _get_auto_subscription_subtypes(self, model_name): """ Return data related to auto subscription based on subtype matching. Example with tasks and project : * generic: discussion, res_model = False * task: new, res_model = project.task * project: task_new, parent_id = new, res_model = project.project, field = project_id Returned data * all_ids: all subtypes that are generic or related to task and project * def_ids: for task, default subtypes ids * int_ids: for task, internal-only default subtypes ids * parent: dict(parent subtype id, child subtype id), i.e. {task_new.id: new.id} * relation: dict(parent_model, relation_fields), i.e. {'project.project': ['project_id']} """ all_ids, def_ids, int_ids, parent, relation = list(), list(), list( ), dict(), dict() subtypes = self.sudo().search([ '|', '|', ('res_model', '=', False), ('res_model', '=', model_name), ('parent_id.res_model', '=', model_name) ]) for subtype in subtypes: if not subtype.res_model or subtype.res_model == model_name: all_ids += subtype.ids if subtype.default: def_ids += subtype.ids elif subtype.relation_field: parent[subtype.id] = subtype.parent_id.id relation.setdefault(subtype.res_model, set()).add(subtype.relation_field) if subtype.internal: int_ids += subtype.ids return all_ids, def_ids, int_ids, parent, relation @api.model def default_subtypes(self, model_name): """ Retrieve the default subtypes (all, internal, external) for the given model. """ subtype_ids, internal_ids, external_ids = self._default_subtypes( model_name) return self.browse(subtype_ids), self.browse( internal_ids), self.browse(external_ids) @tools.ormcache('self.env.uid', 'self.env.su', 'model_name') def _default_subtypes(self, model_name): domain = [('default', '=', True), '|', ('res_model', '=', model_name), ('res_model', '=', False)] subtypes = self.search(domain) internal = subtypes.filtered('internal') return subtypes.ids, internal.ids, (subtypes - internal).ids
class EventMailScheduler(models.Model): """ Event automated mailing. This model replaces all existing fields and configuration allowing to send emails on events since Harpiya 9. A cron exists that periodically checks for mailing to run. """ _name = 'event.mail' _rec_name = 'event_id' _description = 'Event Automated Mailing' event_id = fields.Many2one('event.event', string='Event', required=True, ondelete='cascade') sequence = fields.Integer('Display order') notification_type = fields.Selection([('mail', 'Mail')], string='Send', default='mail', required=True) interval_nbr = fields.Integer('Interval', default=1) interval_unit = fields.Selection([('now', 'Immediately'), ('hours', 'Hours'), ('days', 'Days'), ('weeks', 'Weeks'), ('months', 'Months')], string='Unit', default='hours', required=True) interval_type = fields.Selection([('after_sub', 'After each registration'), ('before_event', 'Before the event'), ('after_event', 'After the event')], string='Trigger ', default="before_event", required=True) template_id = fields.Many2one( 'mail.template', string='Email Template', domain=[('model', '=', 'event.registration')], ondelete='restrict', help= 'This field contains the template of the mail that will be automatically sent' ) scheduled_date = fields.Datetime('Scheduled Sent Mail', compute='_compute_scheduled_date', store=True) mail_registration_ids = fields.One2many('event.mail.registration', 'scheduler_id') mail_sent = fields.Boolean('Mail Sent on Event', copy=False) done = fields.Boolean('Sent', compute='_compute_done', store=True) @api.depends('mail_sent', 'interval_type', 'event_id.registration_ids', 'mail_registration_ids') def _compute_done(self): for mail in self: if mail.interval_type in ['before_event', 'after_event']: mail.done = mail.mail_sent else: mail.done = len(mail.mail_registration_ids) == len( mail.event_id.registration_ids) and all( mail.mail_sent for mail in mail.mail_registration_ids) @api.depends('event_id.state', 'event_id.date_begin', 'interval_type', 'interval_unit', 'interval_nbr') def _compute_scheduled_date(self): for mail in self: if mail.event_id.state not in ['confirm', 'done']: mail.scheduled_date = False else: if mail.interval_type == 'after_sub': date, sign = mail.event_id.create_date, 1 elif mail.interval_type == 'before_event': date, sign = mail.event_id.date_begin, -1 else: date, sign = mail.event_id.date_end, 1 mail.scheduled_date = date + _INTERVALS[mail.interval_unit]( sign * mail.interval_nbr) def execute(self): for mail in self: now = fields.Datetime.now() if mail.interval_type == 'after_sub': # update registration lines lines = [(0, 0, { 'registration_id': registration.id }) for registration in ( mail.event_id.registration_ids - mail.mapped('mail_registration_ids.registration_id'))] if lines: mail.write({'mail_registration_ids': lines}) # execute scheduler on registrations mail.mail_registration_ids.filtered( lambda reg: reg.scheduled_date and reg.scheduled_date <= now).execute() else: # Do not send emails if the mailing was scheduled before the event but the event is over if not mail.mail_sent and ( mail.interval_type != 'before_event' or mail.event_id.date_end > now ) and mail.notification_type == 'mail': mail.event_id.mail_attendees(mail.template_id.id) mail.write({'mail_sent': True}) return True @api.model def _warn_template_error(self, scheduler, exception): # We warn ~ once by hour ~ instead of every 10 min if the interval unit is more than 'hours'. if random.random() < 0.1666 or scheduler.interval_unit in ('now', 'hours'): ex_s = exception_to_unicode(exception) try: event, template = scheduler.event_id, scheduler.template_id emails = list( set([ event.organizer_id.email, event.user_id.email, template.write_uid.email ])) subject = _("WARNING: Event Scheduler Error for event: %s" % event.name) body = _("""Event Scheduler for: - Event: %s (%s) - Scheduled: %s - Template: %s (%s) Failed with error: - %s You receive this email because you are: - the organizer of the event, - or the responsible of the event, - or the last writer of the template.""" % (event.name, event.id, scheduler.scheduled_date, template.name, template.id, ex_s)) email = self.env['ir.mail_server'].build_email( email_from=self.env.user.email, email_to=emails, subject=subject, body=body, ) self.env['ir.mail_server'].send_email(email) except Exception as e: _logger.error( "Exception while sending traceback by email: %s.\n Original Traceback:\n%s", e, exception) pass @api.model def run(self, autocommit=False): schedulers = self.search([ ('done', '=', False), ('scheduled_date', '<=', datetime.strftime(fields.datetime.now(), tools.DEFAULT_SERVER_DATETIME_FORMAT)) ]) for scheduler in schedulers: try: with self.env.cr.savepoint(): scheduler.execute() except Exception as e: _logger.exception(e) self.invalidate_cache() self._warn_template_error(scheduler, e) else: if autocommit: self.env.cr.commit() return True
class WebsiteVisitor(models.Model): _name = 'website.visitor' _description = 'Website Visitor' _order = 'last_connection_datetime DESC' name = fields.Char('Name') access_token = fields.Char(required=True, default=lambda x: uuid.uuid4().hex, index=True, copy=False, groups='base.group_website_publisher') active = fields.Boolean('Active', default=True) website_id = fields.Many2one('website', "Website", readonly=True) partner_id = fields.Many2one('res.partner', string="Linked Partner", help="Partner of the last logged in user.") partner_image = fields.Binary(related='partner_id.image_1920') # localisation and info country_id = fields.Many2one('res.country', 'Country', readonly=True) country_flag = fields.Binary(related="country_id.image", string="Country Flag") lang_id = fields.Many2one( 'res.lang', string='Language', help="Language from the website when visitor has been created") timezone = fields.Selection(_tz_get, string='Timezone') email = fields.Char(string='Email', compute='_compute_email_phone') mobile = fields.Char(string='Mobile Phone', compute='_compute_email_phone') # Visit fields visit_count = fields.Integer( 'Number of visits', default=1, readonly=True, help= "A new visit is considered if last connection was more than 8 hours ago." ) website_track_ids = fields.One2many('website.track', 'visitor_id', string='Visited Pages History', readonly=True) visitor_page_count = fields.Integer( 'Page Views', compute="_compute_page_statistics", help="Total number of visits on tracked pages") page_ids = fields.Many2many('website.page', string="Visited Pages", compute="_compute_page_statistics") page_count = fields.Integer('# Visited Pages', compute="_compute_page_statistics", help="Total number of tracked page visited") last_visited_page_id = fields.Many2one( 'website.page', string="Last Visited Page", compute="_compute_last_visited_page_id") # Time fields create_date = fields.Datetime('First connection date', readonly=True) last_connection_datetime = fields.Datetime('Last Connection', default=fields.Datetime.now, help="Last page view date", readonly=True) time_since_last_action = fields.Char( 'Last action', compute="_compute_time_statistics", help='Time since last page view. E.g.: 2 minutes ago') is_connected = fields.Boolean( 'Is connected ?', compute='_compute_time_statistics', help= 'A visitor is considered as connected if his last page view was within the last 5 minutes.' ) _sql_constraints = [ ('access_token_unique', 'unique(access_token)', 'Access token should be unique.'), ('partner_uniq', 'unique(partner_id)', 'A partner is linked to only one visitor.'), ] @api.depends('name') def name_get(self): return [(record.id, (record.name or _('Website Visitor #%s') % record.id)) for record in self] @api.depends('partner_id.email_normalized', 'partner_id.mobile', 'partner_id.phone') def _compute_email_phone(self): results = self.env['res.partner'].search_read( [('id', 'in', self.partner_id.ids)], ['id', 'email_normalized', 'mobile', 'phone'], ) mapped_data = { result['id']: { 'email_normalized': result['email_normalized'], 'mobile': result['mobile'] if result['mobile'] else result['phone'] } for result in results } for visitor in self: visitor.email = mapped_data.get(visitor.partner_id.id, {}).get('email_normalized') visitor.mobile = mapped_data.get(visitor.partner_id.id, {}).get('mobile') @api.depends('website_track_ids') def _compute_page_statistics(self): results = self.env['website.track'].read_group( [('visitor_id', 'in', self.ids), ('url', '!=', False)], ['visitor_id', 'page_id', 'url'], ['visitor_id', 'page_id', 'url'], lazy=False) mapped_data = {} for result in results: visitor_info = mapped_data.get(result['visitor_id'][0], { 'page_count': 0, 'visitor_page_count': 0, 'page_ids': set() }) visitor_info['visitor_page_count'] += result['__count'] visitor_info['page_count'] += 1 if result['page_id']: visitor_info['page_ids'].add(result['page_id'][0]) mapped_data[result['visitor_id'][0]] = visitor_info for visitor in self: visitor_info = mapped_data.get(visitor.id, { 'page_count': 0, 'visitor_page_count': 0, 'page_ids': set() }) visitor.page_ids = [(6, 0, visitor_info['page_ids'])] visitor.visitor_page_count = visitor_info['visitor_page_count'] visitor.page_count = visitor_info['page_count'] @api.depends('website_track_ids.page_id') def _compute_last_visited_page_id(self): results = self.env['website.track'].read_group( [('visitor_id', 'in', self.ids)], ['visitor_id', 'page_id', 'visit_datetime:max'], ['visitor_id', 'page_id'], lazy=False) mapped_data = { result['visitor_id'][0]: result['page_id'][0] for result in results if result['page_id'] } for visitor in self: visitor.last_visited_page_id = mapped_data.get(visitor.id, False) @api.depends('last_connection_datetime') def _compute_time_statistics(self): for visitor in self: visitor.time_since_last_action = _format_time_ago( self.env, (datetime.now() - visitor.last_connection_datetime)) visitor.is_connected = ( datetime.now() - visitor.last_connection_datetime) < timedelta(minutes=5) def _prepare_visitor_send_mail_values(self): if self.partner_id.email: return { 'res_model': 'res.partner', 'res_id': self.partner_id.id, 'partner_ids': [self.partner_id.id], } return {} def action_send_mail(self): self.ensure_one() visitor_mail_values = self._prepare_visitor_send_mail_values() if not visitor_mail_values: raise UserError(_("There is no email linked this visitor.")) compose_form = self.env.ref('mail.email_compose_message_wizard_form', False) ctx = dict( default_model=visitor_mail_values.get('res_model'), default_res_id=visitor_mail_values.get('res_id'), default_use_template=False, default_partner_ids=[(6, 0, visitor_mail_values.get('partner_ids')) ], default_composition_mode='comment', default_reply_to=self.env.user.partner_id.email, ) return { 'name': _('Compose Email'), 'type': 'ir.actions.act_window', 'view_mode': 'form', 'res_model': 'mail.compose.message', 'views': [(compose_form.id, 'form')], 'view_id': compose_form.id, 'target': 'new', 'context': ctx, } def _get_visitor_from_request(self, force_create=False): """ Return the visitor as sudo from the request if there is a visitor_uuid cookie. It is possible that the partner has changed or has disconnected. In that case the cookie is still referencing the old visitor and need to be replaced with the one of the visitor returned !!!. """ # This function can be called in json with mobile app. # In case of mobile app, no uid is set on the jsonRequest env. # In case of multi db, _env is None on request, and request.env unbound. if not request: return None Visitor = self.env['website.visitor'].sudo() visitor = Visitor access_token = request.httprequest.cookies.get('visitor_uuid') if access_token: visitor = Visitor.with_context(active_test=False).search([ ('access_token', '=', access_token) ]) # Prefetch access_token and other fields. Since access_token has a restricted group and we access # a non restricted field (partner_id) first it is not fetched and will require an additional query to be retrieved. visitor.access_token if not self.env.user._is_public(): partner_id = self.env.user.partner_id if not visitor or visitor.partner_id and visitor.partner_id != partner_id: # Partner and no cookie or wrong cookie visitor = Visitor.with_context(active_test=False).search([ ('partner_id', '=', partner_id.id) ]) elif visitor and visitor.partner_id: # Cookie associated to a Partner visitor = Visitor if force_create and not visitor: visitor = self._create_visitor() return visitor def _handle_webpage_dispatch(self, response, website_page): # get visitor. Done here to avoid having to do it multiple times in case of override. visitor_sudo = self._get_visitor_from_request(force_create=True) if request.httprequest.cookies.get('visitor_uuid', '') != visitor_sudo.access_token: expiration_date = datetime.now() + timedelta(days=365) response.set_cookie('visitor_uuid', visitor_sudo.access_token, expires=expiration_date) self._handle_website_page_visit(response, website_page, visitor_sudo) def _handle_website_page_visit(self, response, website_page, visitor_sudo): """ Called on dispatch. This will create a website.visitor if the http request object is a tracked website page or a tracked view. Only on tracked elements to avoid having too much operations done on every page or other http requests. Note: The side effect is that the last_connection_datetime is updated ONLY on tracked elements.""" url = request.httprequest.url website_track_values = { 'url': url, 'visit_datetime': datetime.now(), } if website_page: website_track_values['page_id'] = website_page.id domain = [('page_id', '=', website_page.id)] else: domain = [('url', '=', url)] visitor_sudo._add_tracking(domain, website_track_values) if visitor_sudo.lang_id.id != request.lang.id: visitor_sudo.write({'lang_id': request.lang.id}) def _add_tracking(self, domain, website_track_values): """ Add the track and update the visitor""" domain = expression.AND([domain, [('visitor_id', '=', self.id)]]) last_view = self.env['website.track'].sudo().search(domain, limit=1) if not last_view or last_view.visit_datetime < datetime.now( ) - timedelta(minutes=30): website_track_values['visitor_id'] = self.id self.env['website.track'].create(website_track_values) self._update_visitor_last_visit() def _create_visitor(self, website_track_values=None): """ Create a visitor and add a track to it if website_track_values is set.""" country_code = request.session.get('geoip', {}).get('country_code', False) country_id = request.env['res.country'].sudo().search( [('code', '=', country_code)], limit=1).id if country_code else False vals = { 'lang_id': request.lang.id, 'country_id': country_id, 'website_id': request.website.id, } if not self.env.user._is_public(): vals['partner_id'] = self.env.user.partner_id.id vals['name'] = self.env.user.partner_id.name if website_track_values: vals['website_track_ids'] = [(0, 0, website_track_values)] return self.sudo().create(vals) def _cron_archive_visitors(self): one_week_ago = datetime.now() - timedelta(days=7) visitors_to_archive = self.env['website.visitor'].sudo().search([ ('last_connection_datetime', '<', one_week_ago) ]) visitors_to_archive.write({'active': False}) def _update_visitor_last_visit(self): """ We need to do this part here to avoid concurrent updates error. """ try: with self.env.cr.savepoint(): query_lock = "SELECT * FROM website_visitor where id = %s FOR NO KEY UPDATE NOWAIT" self.env.cr.execute(query_lock, (self.id, ), log_exceptions=False) date_now = datetime.now() query = "UPDATE website_visitor SET " if self.last_connection_datetime < (date_now - timedelta(hours=8)): query += "visit_count = visit_count + 1," query += """ active = True, last_connection_datetime = %s WHERE id = %s """ self.env.cr.execute(query, (date_now, self.id), log_exceptions=False) except Exception: pass
class MassMailing(models.Model): """ MassMailing models a wave of emails for a mass mailign campaign. A mass mailing is an occurence of sending emails. """ _name = 'mailing.mailing' _description = 'Mass Mailing' _inherit = ['mail.thread', 'mail.activity.mixin'] # number of periods for tracking mail_mail statistics _period_number = 6 _order = 'sent_date DESC' _inherits = {'utm.source': 'source_id'} _rec_name = "subject" @api.model def _get_default_mail_server_id(self): server_id = self.env['ir.config_parameter'].sudo().get_param( 'mass_mailing.mail_server_id') try: server_id = literal_eval(server_id) if server_id else False return self.env['ir.mail_server'].search([('id', '=', server_id) ]).id except ValueError: return False @api.model def default_get(self, fields): res = super(MassMailing, self).default_get(fields) if 'reply_to_mode' in fields and not 'reply_to_mode' in res and res.get( 'mailing_model_real'): if res['mailing_model_real'] in ['res.partner', 'mailing.contact']: res['reply_to_mode'] = 'email' else: res['reply_to_mode'] = 'thread' return res active = fields.Boolean(default=True, tracking=True) subject = fields.Char('Subject', help='Subject of emails to send', required=True, translate=True) email_from = fields.Char( string='Send From', required=True, default=lambda self: self.env['mail.message']._get_default_from()) sent_date = fields.Datetime(string='Sent Date', copy=False) schedule_date = fields.Datetime(string='Scheduled for', tracking=True) # don't translate 'body_arch', the translations are only on 'body_html' body_arch = fields.Html(string='Body', translate=False) body_html = fields.Html(string='Body converted to be send by mail', sanitize_attributes=False) attachment_ids = fields.Many2many('ir.attachment', 'mass_mailing_ir_attachments_rel', 'mass_mailing_id', 'attachment_id', string='Attachments') keep_archives = fields.Boolean(string='Keep Archives') campaign_id = fields.Many2one('utm.campaign', string='UTM Campaign') source_id = fields.Many2one( 'utm.source', string='Source', required=True, ondelete='cascade', help= "This is the link source, e.g. Search Engine, another domain, or name of email list" ) medium_id = fields.Many2one('utm.medium', string='Medium', help="Delivery method: Email") clicks_ratio = fields.Integer(compute="_compute_clicks_ratio", string="Number of Clicks") state = fields.Selection([('draft', 'Draft'), ('in_queue', 'In Queue'), ('sending', 'Sending'), ('done', 'Sent')], string='Status', required=True, tracking=True, copy=False, default='draft', group_expand='_group_expand_states') color = fields.Integer(string='Color Index') user_id = fields.Many2one('res.users', string='Responsible', tracking=True, default=lambda self: self.env.user) # mailing options mailing_type = fields.Selection([('mail', 'Email')], string="Mailing Type", default="mail", required=True) reply_to_mode = fields.Selection([('thread', 'Recipient Followers'), ('email', 'Specified Email Address')], string='Reply-To Mode', required=True) reply_to = fields.Char( string='Reply To', help='Preferred Reply-To Address', default=lambda self: self.env['mail.message']._get_default_from()) # recipients mailing_model_real = fields.Char(compute='_compute_model', string='Recipients Real Model', default='mailing.contact', required=True) mailing_model_id = fields.Many2one( 'ir.model', string='Recipients Model', domain=[('model', 'in', MASS_MAILING_BUSINESS_MODELS)], default=lambda self: self.env.ref('mass_mailing.model_mailing_list' ).id) mailing_model_name = fields.Char(related='mailing_model_id.model', string='Recipients Model Name', readonly=True, related_sudo=True) mailing_domain = fields.Char(string='Domain', default=[]) mail_server_id = fields.Many2one( 'ir.mail_server', string='Mail Server', default=_get_default_mail_server_id, help= "Use a specific mail server in priority. Otherwise Harpiya relies on the first outgoing mail server available (based on their sequencing) as it does for normal mails." ) contact_list_ids = fields.Many2many('mailing.list', 'mail_mass_mailing_list_rel', string='Mailing Lists') contact_ab_pc = fields.Integer( string='A/B Testing percentage', help= 'Percentage of the contacts that will be mailed. Recipients will be taken randomly.', default=100) unique_ab_testing = fields.Boolean( string='Allow A/B Testing', default=False, help= 'If checked, recipients will be mailed only once for the whole campaign. ' 'This lets you send different mailings to randomly selected recipients and test ' 'the effectiveness of the mailings, without causing duplicate messages.' ) # statistics data mailing_trace_ids = fields.One2many('mailing.trace', 'mass_mailing_id', string='Emails Statistics') total = fields.Integer(compute="_compute_total") scheduled = fields.Integer(compute="_compute_statistics") expected = fields.Integer(compute="_compute_statistics") ignored = fields.Integer(compute="_compute_statistics") sent = fields.Integer(compute="_compute_statistics") delivered = fields.Integer(compute="_compute_statistics") opened = fields.Integer(compute="_compute_statistics") clicked = fields.Integer(compute="_compute_statistics") replied = fields.Integer(compute="_compute_statistics") bounced = fields.Integer(compute="_compute_statistics") failed = fields.Integer(compute="_compute_statistics") received_ratio = fields.Integer(compute="_compute_statistics", string='Received Ratio') opened_ratio = fields.Integer(compute="_compute_statistics", string='Opened Ratio') replied_ratio = fields.Integer(compute="_compute_statistics", string='Replied Ratio') bounced_ratio = fields.Integer(compute="_compute_statistics", string='Bounced Ratio') next_departure = fields.Datetime(compute="_compute_next_departure", string='Scheduled date') def _compute_total(self): for mass_mailing in self: mass_mailing.total = len(mass_mailing.sudo()._get_recipients()) def _compute_clicks_ratio(self): self.env.cr.execute( """ SELECT COUNT(DISTINCT(stats.id)) AS nb_mails, COUNT(DISTINCT(clicks.mailing_trace_id)) AS nb_clicks, stats.mass_mailing_id AS id FROM mailing_trace AS stats LEFT OUTER JOIN link_tracker_click AS clicks ON clicks.mailing_trace_id = stats.id WHERE stats.mass_mailing_id IN %s GROUP BY stats.mass_mailing_id """, (tuple(self.ids), )) mass_mailing_data = self.env.cr.dictfetchall() mapped_data = dict([(m['id'], 100 * m['nb_clicks'] / m['nb_mails']) for m in mass_mailing_data]) for mass_mailing in self: mass_mailing.clicks_ratio = mapped_data.get(mass_mailing.id, 0) @api.depends('mailing_model_id') def _compute_model(self): for record in self: record.mailing_model_real = ( record.mailing_model_name != 'mailing.list' ) and record.mailing_model_name or 'mailing.contact' def _compute_statistics(self): """ Compute statistics of the mass mailing """ self.env.cr.execute( """ SELECT m.id as mailing_id, COUNT(s.id) AS expected, COUNT(CASE WHEN s.sent is not null THEN 1 ELSE null END) AS sent, COUNT(CASE WHEN s.scheduled is not null AND s.sent is null AND s.exception is null AND s.ignored is null AND s.bounced is null THEN 1 ELSE null END) AS scheduled, COUNT(CASE WHEN s.scheduled is not null AND s.sent is null AND s.exception is null AND s.ignored is not null THEN 1 ELSE null END) AS ignored, COUNT(CASE WHEN s.sent is not null AND s.exception is null AND s.bounced is null THEN 1 ELSE null END) AS delivered, COUNT(CASE WHEN s.opened is not null THEN 1 ELSE null END) AS opened, COUNT(CASE WHEN s.clicked is not null THEN 1 ELSE null END) AS clicked, COUNT(CASE WHEN s.replied is not null THEN 1 ELSE null END) AS replied, COUNT(CASE WHEN s.bounced is not null THEN 1 ELSE null END) AS bounced, COUNT(CASE WHEN s.exception is not null THEN 1 ELSE null END) AS failed FROM mailing_trace s RIGHT JOIN mailing_mailing m ON (m.id = s.mass_mailing_id) WHERE m.id IN %s GROUP BY m.id """, (tuple(self.ids), )) for row in self.env.cr.dictfetchall(): total = row['expected'] = (row['expected'] - row['ignored']) or 1 row['received_ratio'] = 100.0 * row['delivered'] / total row['opened_ratio'] = 100.0 * row['opened'] / total row['clicks_ratio'] = 100.0 * row['clicked'] / total row['replied_ratio'] = 100.0 * row['replied'] / total row['bounced_ratio'] = 100.0 * row['bounced'] / total self.browse(row.pop('mailing_id')).update(row) def _compute_next_departure(self): cron_next_call = self.env.ref( 'mass_mailing.ir_cron_mass_mailing_queue').sudo().nextcall str2dt = fields.Datetime.from_string cron_time = str2dt(cron_next_call) for mass_mailing in self: if mass_mailing.schedule_date: schedule_date = str2dt(mass_mailing.schedule_date) mass_mailing.next_departure = max(schedule_date, cron_time) else: mass_mailing.next_departure = cron_time @api.onchange('mailing_model_name', 'contact_list_ids') def _onchange_model_and_list(self): mailing_domain = literal_eval( self.mailing_domain) if self.mailing_domain else [] if self.mailing_model_name: if mailing_domain: try: self.env[self.mailing_model_name].search(mailing_domain, limit=1) except: mailing_domain = [] if not mailing_domain: if self.mailing_model_name == 'mailing.list' and self.contact_list_ids: mailing_domain = [('list_ids', 'in', self.contact_list_ids.ids)] elif 'is_blacklisted' in self.env[ self. mailing_model_name]._fields and not self.mailing_domain: mailing_domain = [('is_blacklisted', '=', False)] elif 'opt_out' in self.env[ self. mailing_model_name]._fields and not self.mailing_domain: mailing_domain = [('opt_out', '=', False)] else: mailing_domain = [] self.mailing_domain = repr(mailing_domain) @api.onchange('mailing_type') def _onchange_mailing_type(self): if self.mailing_type == 'mail' and not self.medium_id: self.medium_id = self.env.ref('utm.utm_medium_email').id # ------------------------------------------------------ # ORM # ------------------------------------------------------ @api.model def create(self, values): if values.get('subject') and not values.get('name'): values['name'] = "%s %s" % ( values['subject'], datetime.strftime(fields.datetime.now(), tools.DEFAULT_SERVER_DATETIME_FORMAT)) if values.get('body_html'): values['body_html'] = self._convert_inline_images_to_urls( values['body_html']) if 'medium_id' not in values and values.get('mailing_type', 'mail') == 'mail': values['medium_id'] = self.env.ref('utm.utm_medium_email').id return super(MassMailing, self).create(values) def write(self, values): if values.get('body_html'): values['body_html'] = self._convert_inline_images_to_urls( values['body_html']) return super(MassMailing, self).write(values) @api.returns('self', lambda value: value.id) def copy(self, default=None): self.ensure_one() default = dict(default or {}, name=_('%s (copy)') % self.name, contact_list_ids=self.contact_list_ids.ids) res = super(MassMailing, self).copy(default=default) # Re-evaluating the domain res._onchange_model_and_list() return res def _group_expand_states(self, states, domain, order): return [key for key, val in type(self).state.selection] # ------------------------------------------------------ # ACTIONS # ------------------------------------------------------ def action_duplicate(self): self.ensure_one() mass_mailing_copy = self.copy() if mass_mailing_copy: context = dict(self.env.context) context['form_view_initial_mode'] = 'edit' return { 'type': 'ir.actions.act_window', 'view_mode': 'form', 'res_model': 'mailing.mailing', 'res_id': mass_mailing_copy.id, 'context': context, } return False def action_test(self): self.ensure_one() ctx = dict(self.env.context, default_mass_mailing_id=self.id) return { 'name': _('Test Mailing'), 'type': 'ir.actions.act_window', 'view_mode': 'form', 'res_model': 'mailing.mailing.test', 'target': 'new', 'context': ctx, } def action_schedule(self): self.ensure_one() action = self.env.ref( 'mass_mailing.mailing_mailing_schedule_date_action').read()[0] action['context'] = dict(self.env.context, default_mass_mailing_id=self.id) return action def action_put_in_queue(self): self.write({'state': 'in_queue'}) def action_cancel(self): self.write({'state': 'draft', 'schedule_date': False}) def action_retry_failed(self): failed_mails = self.env['mail.mail'].sudo().search([ ('mailing_id', 'in', self.ids), ('state', '=', 'exception') ]) failed_mails.mapped('mailing_trace_ids').unlink() failed_mails.unlink() self.write({'state': 'in_queue'}) def action_view_traces_scheduled(self): return self._action_view_traces_filtered('scheduled') def action_view_traces_ignored(self): return self._action_view_traces_filtered('ignored') def action_view_traces_failed(self): return self._action_view_traces_filtered('failed') def _action_view_traces_filtered(self, view_filter): action = self.env.ref('mass_mailing.mailing_trace_action').read()[0] action['name'] = _('%s Traces') % (self.name) action['context'] = { 'search_default_mass_mailing_id': self.id, } filter_key = 'search_default_filter_%s' % (view_filter) action['context'][filter_key] = True return action def action_view_sent(self): return self._action_view_documents_filtered('sent') def action_view_opened(self): return self._action_view_documents_filtered('opened') def action_view_replied(self): return self._action_view_documents_filtered('replied') def action_view_bounced(self): return self._action_view_documents_filtered('bounced') def action_view_clicked(self): return self._action_view_documents_filtered('clicked') def action_view_delivered(self): return self._action_view_documents_filtered('delivered') def _action_view_documents_filtered(self, view_filter): if view_filter in ('sent', 'opened', 'replied', 'bounced', 'clicked'): opened_stats = self.mailing_trace_ids.filtered( lambda stat: stat[view_filter]) elif view_filter == ('delivered'): opened_stats = self.mailing_trace_ids.filtered( lambda stat: stat.sent and not stat.bounced) else: opened_stats = self.env['mailing.trace'] res_ids = opened_stats.mapped('res_id') model_name = self.env['ir.model']._get( self.mailing_model_real).display_name return { 'name': model_name, 'type': 'ir.actions.act_window', 'view_mode': 'tree', 'res_model': self.mailing_model_real, 'domain': [('id', 'in', res_ids)], 'context': dict(self._context, create=False) } def update_opt_out(self, email, list_ids, value): if len(list_ids) > 0: model = self.env['mailing.contact'].with_context(active_test=False) records = model.search([('email_normalized', '=', tools.email_normalize(email))]) opt_out_records = self.env['mailing.contact.subscription'].search([ ('contact_id', 'in', records.ids), ('list_id', 'in', list_ids), ('opt_out', '!=', value) ]) opt_out_records.write({'opt_out': value}) message = _('The recipient <strong>unsubscribed from %s</strong> mailing list(s)') \ if value else _('The recipient <strong>subscribed to %s</strong> mailing list(s)') for record in records: # filter the list_id by record record_lists = opt_out_records.filtered( lambda rec: rec.contact_id.id == record.id) if len(record_lists) > 0: record.sudo().message_post(body=_(message % ', '.join( str(list.name) for list in record_lists.mapped('list_id')))) # ------------------------------------------------------ # Email Sending # ------------------------------------------------------ def _get_opt_out_list(self): """Returns a set of emails opted-out in target model""" self.ensure_one() opt_out = {} target = self.env[self.mailing_model_real] if self.mailing_model_real == "mailing.contact": # if user is opt_out on One list but not on another # or if two user with same email address, one opted in and the other one opted out, send the mail anyway # TODO DBE Fixme : Optimise the following to get real opt_out and opt_in target_list_contacts = self.env[ 'mailing.contact.subscription'].search([ ('list_id', 'in', self.contact_list_ids.ids) ]) opt_out_contacts = target_list_contacts.filtered( lambda rel: rel.opt_out).mapped('contact_id.email_normalized') opt_in_contacts = target_list_contacts.filtered( lambda rel: not rel.opt_out).mapped( 'contact_id.email_normalized') opt_out = set(c for c in opt_out_contacts if c not in opt_in_contacts) _logger.info("Mass-mailing %s targets %s, blacklist: %s emails", self, target._name, len(opt_out)) else: _logger.info( "Mass-mailing %s targets %s, no opt out list available", self, target._name) return opt_out def _get_link_tracker_values(self): self.ensure_one() vals = {'mass_mailing_id': self.id} if self.campaign_id: vals['campaign_id'] = self.campaign_id.id if self.source_id: vals['source_id'] = self.source_id.id if self.medium_id: vals['medium_id'] = self.medium_id.id return vals def _get_seen_list(self): """Returns a set of emails already targeted by current mailing/campaign (no duplicates)""" self.ensure_one() target = self.env[self.mailing_model_real] # avoid loading a large number of records in memory # + use a basic heuristic for extracting emails query = """ SELECT lower(substring(t.%(mail_field)s, '([^ ,;<@]+@[^> ,;]+)')) FROM mailing_trace s JOIN %(target)s t ON (s.res_id = t.id) WHERE substring(t.%(mail_field)s, '([^ ,;<@]+@[^> ,;]+)') IS NOT NULL """ # Apply same 'get email field' rule from mail_thread.message_get_default_recipients if 'partner_id' in target._fields: mail_field = 'email' query = """ SELECT lower(substring(p.%(mail_field)s, '([^ ,;<@]+@[^> ,;]+)')) FROM mailing_trace s JOIN %(target)s t ON (s.res_id = t.id) JOIN res_partner p ON (t.partner_id = p.id) WHERE substring(p.%(mail_field)s, '([^ ,;<@]+@[^> ,;]+)') IS NOT NULL """ elif issubclass(type(target), self.pool['mail.address.mixin']): mail_field = 'email_normalized' elif 'email_from' in target._fields: mail_field = 'email_from' elif 'partner_email' in target._fields: mail_field = 'partner_email' elif 'email' in target._fields: mail_field = 'email' else: raise UserError( _("Unsupported mass mailing model %s") % self.mailing_model_id.name) if self.unique_ab_testing: query += """ AND s.campaign_id = %%(mailing_campaign_id)s; """ else: query += """ AND s.mass_mailing_id = %%(mailing_id)s AND s.model = %%(target_model)s; """ query = query % {'target': target._table, 'mail_field': mail_field} params = { 'mailing_id': self.id, 'mailing_campaign_id': self.campaign_id.id, 'target_model': self.mailing_model_real } self._cr.execute(query, params) seen_list = set(m[0] for m in self._cr.fetchall()) _logger.info("Mass-mailing %s has already reached %s %s emails", self, len(seen_list), target._name) return seen_list def _get_mass_mailing_context(self): """Returns extra context items with pre-filled blacklist and seen list for massmailing""" return { 'mass_mailing_opt_out_list': self._get_opt_out_list(), 'mass_mailing_seen_list': self._get_seen_list(), 'post_convert_links': self._get_link_tracker_values(), } def _get_recipients(self): if self.mailing_domain: domain = safe_eval(self.mailing_domain) try: res_ids = self.env[self.mailing_model_real].search(domain).ids except ValueError: res_ids = [] _logger.exception( 'Cannot get the mass mailing recipients, model: %s, domain: %s', self.mailing_model_real, domain) else: res_ids = [] domain = [('id', 'in', res_ids)] # randomly choose a fragment if self.contact_ab_pc < 100: contact_nbr = self.env[self.mailing_model_real].search_count( domain) topick = int(contact_nbr / 100.0 * self.contact_ab_pc) if self.campaign_id and self.unique_ab_testing: already_mailed = self.campaign_id._get_mailing_recipients()[ self.campaign_id.id] else: already_mailed = set([]) remaining = set(res_ids).difference(already_mailed) if topick > len(remaining): topick = len(remaining) res_ids = random.sample(remaining, topick) return res_ids def _get_remaining_recipients(self): res_ids = self._get_recipients() already_mailed = self.env['mailing.trace'].search_read( [('model', '=', self.mailing_model_real), ('res_id', 'in', res_ids), ('mass_mailing_id', '=', self.id)], ['res_id']) done_res_ids = [record['res_id'] for record in already_mailed] return [rid for rid in res_ids if rid not in done_res_ids] def action_send_mail(self, res_ids=None): author_id = self.env.user.partner_id.id for mailing in self: if not res_ids: res_ids = mailing._get_remaining_recipients() if not res_ids: raise UserError(_('There are no recipients selected.')) composer_values = { 'author_id': author_id, 'attachment_ids': [(4, attachment.id) for attachment in mailing.attachment_ids], 'body': mailing.body_html, 'subject': mailing.subject, 'model': mailing.mailing_model_real, 'email_from': mailing.email_from, 'record_name': False, 'composition_mode': 'mass_mail', 'mass_mailing_id': mailing.id, 'mailing_list_ids': [(4, l.id) for l in mailing.contact_list_ids], 'no_auto_thread': mailing.reply_to_mode != 'thread', 'template_id': None, 'mail_server_id': mailing.mail_server_id.id, } if mailing.reply_to_mode == 'email': composer_values['reply_to'] = mailing.reply_to composer = self.env['mail.compose.message'].with_context( active_ids=res_ids).create(composer_values) extra_context = self._get_mass_mailing_context() composer = composer.with_context(active_ids=res_ids, **extra_context) # auto-commit except in testing mode auto_commit = not getattr(threading.currentThread(), 'testing', False) composer.send_mail(auto_commit=auto_commit) mailing.write({ 'state': 'done', 'sent_date': fields.Datetime.now() }) return True def convert_links(self): res = {} for mass_mailing in self: html = mass_mailing.body_html if mass_mailing.body_html else '' vals = {'mass_mailing_id': mass_mailing.id} if mass_mailing.campaign_id: vals['campaign_id'] = mass_mailing.campaign_id.id if mass_mailing.source_id: vals['source_id'] = mass_mailing.source_id.id if mass_mailing.medium_id: vals['medium_id'] = mass_mailing.medium_id.id res[mass_mailing.id] = self.env['link.tracker'].convert_links( html, vals, blacklist=['/unsubscribe_from_list']) return res @api.model def _process_mass_mailing_queue(self): mass_mailings = self.search([('state', 'in', ('in_queue', 'sending')), '|', ('schedule_date', '<', fields.Datetime.now()), ('schedule_date', '=', False)]) for mass_mailing in mass_mailings: user = mass_mailing.write_uid or self.env.user mass_mailing = mass_mailing.with_context( **user.with_user(user).context_get()) if len(mass_mailing._get_remaining_recipients()) > 0: mass_mailing.state = 'sending' mass_mailing.action_send_mail() else: mass_mailing.write({ 'state': 'done', 'sent_date': fields.Datetime.now() }) # ------------------------------------------------------ # TOOLS # ------------------------------------------------------ def _unsubscribe_token(self, res_id, email): """Generate a secure hash for this mailing list and parameters. This is appended to the unsubscription URL and then checked at unsubscription time to ensure no malicious unsubscriptions are performed. :param int res_id: ID of the resource that will be unsubscribed. :param str email: Email of the resource that will be unsubscribed. """ secret = self.env["ir.config_parameter"].sudo().get_param( "database.secret") token = (self.env.cr.dbname, self.id, int(res_id), tools.ustr(email)) return hmac.new(secret.encode('utf-8'), repr(token).encode('utf-8'), hashlib.sha512).hexdigest() def _convert_inline_images_to_urls(self, body_html): """ Find inline base64 encoded images, make an attachement out of them and replace the inline image with an url to the attachement. """ def _image_to_url(b64image: bytes): """Store an image in an attachement and returns an url""" attachment = self.env['ir.attachment'].create({ 'datas': b64image, 'name': "cropped_image_mailing_{}".format(self.id), 'type': 'binary', }) attachment.generate_access_token() return '/web/image/%s?access_token=%s' % (attachment.id, attachment.access_token) modified = False root = lxml.html.fromstring(body_html) for node in root.iter('img'): match = image_re.match(node.attrib.get('src', '')) if match: mime = match.group(1) # unsed image = match.group(2).encode() # base64 image as bytes node.attrib['src'] = _image_to_url(image) modified = True if modified: return lxml.html.tostring(root) return body_html
class FleetVehicle(models.Model): _inherit = ['mail.thread', 'mail.activity.mixin'] _name = 'fleet.vehicle' _description = 'Vehicle' _order = 'license_plate asc, acquisition_date asc' def _get_default_state(self): state = self.env.ref('fleet.fleet_vehicle_state_registered', raise_if_not_found=False) return state if state and state.id else False name = fields.Char(compute="_compute_vehicle_name", store=True) active = fields.Boolean('Active', default=True, tracking=True) company_id = fields.Many2one('res.company', 'Company', default=lambda self: self.env.company) currency_id = fields.Many2one('res.currency', related='company_id.currency_id') license_plate = fields.Char(tracking=True, help='License plate number of the vehicle (i = plate number for a car)') vin_sn = fields.Char('Chassis Number', help='Unique number written on the vehicle motor (VIN/SN number)', copy=False) driver_id = fields.Many2one('res.partner', 'Driver', tracking=True, help='Driver of the vehicle', copy=False) future_driver_id = fields.Many2one('res.partner', 'Future Driver', tracking=True, help='Next Driver of the vehicle', copy=False, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]") model_id = fields.Many2one('fleet.vehicle.model', 'Model', tracking=True, required=True, help='Model of the vehicle') manager_id = fields.Many2one('res.users', related='model_id.manager_id') brand_id = fields.Many2one('fleet.vehicle.model.brand', 'Brand', related="model_id.brand_id", store=True, readonly=False) log_drivers = fields.One2many('fleet.vehicle.assignation.log', 'vehicle_id', string='Assignation Logs') log_fuel = fields.One2many('fleet.vehicle.log.fuel', 'vehicle_id', 'Fuel Logs') log_services = fields.One2many('fleet.vehicle.log.services', 'vehicle_id', 'Services Logs') log_contracts = fields.One2many('fleet.vehicle.log.contract', 'vehicle_id', 'Contracts') cost_count = fields.Integer(compute="_compute_count_all", string="Costs") contract_count = fields.Integer(compute="_compute_count_all", string='Contract Count') service_count = fields.Integer(compute="_compute_count_all", string='Services') fuel_logs_count = fields.Integer(compute="_compute_count_all", string='Fuel Log Count') odometer_count = fields.Integer(compute="_compute_count_all", string='Odometer') history_count = fields.Integer(compute="_compute_count_all", string="Drivers History Count") next_assignation_date = fields.Date('Assignation Date', help='This is the date at which the car will be available, if not set it means available instantly') acquisition_date = fields.Date('Immatriculation Date', required=False, default=fields.Date.today, help='Date when the vehicle has been immatriculated') first_contract_date = fields.Date(string="First Contract Date", default=fields.Date.today) color = fields.Char(help='Color of the vehicle') state_id = fields.Many2one('fleet.vehicle.state', 'State', default=_get_default_state, group_expand='_read_group_stage_ids', tracking=True, help='Current state of the vehicle', ondelete="set null") location = fields.Char(help='Location of the vehicle (garage, ...)') seats = fields.Integer('Seats Number', help='Number of seats of the vehicle') model_year = fields.Char('Model Year', help='Year of the model') doors = fields.Integer('Doors Number', help='Number of doors of the vehicle', default=5) tag_ids = fields.Many2many('fleet.vehicle.tag', 'fleet_vehicle_vehicle_tag_rel', 'vehicle_tag_id', 'tag_id', 'Tags', copy=False) odometer = fields.Float(compute='_get_odometer', inverse='_set_odometer', string='Last Odometer', help='Odometer measure of the vehicle at the moment of this log') odometer_unit = fields.Selection([ ('kilometers', 'Kilometers'), ('miles', 'Miles') ], 'Odometer Unit', default='kilometers', help='Unit of the odometer ', required=True) transmission = fields.Selection([('manual', 'Manual'), ('automatic', 'Automatic')], 'Transmission', help='Transmission Used by the vehicle') fuel_type = fields.Selection([ ('gasoline', 'Gasoline'), ('diesel', 'Diesel'), ('lpg', 'LPG'), ('electric', 'Electric'), ('hybrid', 'Hybrid') ], 'Fuel Type', help='Fuel Used by the vehicle') horsepower = fields.Integer() horsepower_tax = fields.Float('Horsepower Taxation') power = fields.Integer('Power', help='Power in kW of the vehicle') co2 = fields.Float('CO2 Emissions', help='CO2 emissions of the vehicle') image_128 = fields.Image(related='model_id.image_128', readonly=False) contract_renewal_due_soon = fields.Boolean(compute='_compute_contract_reminder', search='_search_contract_renewal_due_soon', string='Has Contracts to renew', multi='contract_info') contract_renewal_overdue = fields.Boolean(compute='_compute_contract_reminder', search='_search_get_overdue_contract_reminder', string='Has Contracts Overdue', multi='contract_info') contract_renewal_name = fields.Text(compute='_compute_contract_reminder', string='Name of contract to renew soon', multi='contract_info') contract_renewal_total = fields.Text(compute='_compute_contract_reminder', string='Total of contracts due or overdue minus one', multi='contract_info') car_value = fields.Float(string="Catalog Value (VAT Incl.)", help='Value of the bought vehicle') net_car_value = fields.Float(string="Purchase Value", help="Purchase Value of the car") residual_value = fields.Float() plan_to_change_car = fields.Boolean(related='driver_id.plan_to_change_car', store=True, readonly=False) @api.depends('model_id.brand_id.name', 'model_id.name', 'license_plate') def _compute_vehicle_name(self): for record in self: record.name = (record.model_id.brand_id.name or '') + '/' + (record.model_id.name or '') + '/' + (record.license_plate or _('No Plate')) def _get_odometer(self): FleetVehicalOdometer = self.env['fleet.vehicle.odometer'] for record in self: vehicle_odometer = FleetVehicalOdometer.search([('vehicle_id', '=', record.id)], limit=1, order='value desc') if vehicle_odometer: record.odometer = vehicle_odometer.value else: record.odometer = 0 def _set_odometer(self): for record in self: if record.odometer: date = fields.Date.context_today(record) data = {'value': record.odometer, 'date': date, 'vehicle_id': record.id} self.env['fleet.vehicle.odometer'].create(data) def _compute_count_all(self): Odometer = self.env['fleet.vehicle.odometer'] LogFuel = self.env['fleet.vehicle.log.fuel'] LogService = self.env['fleet.vehicle.log.services'] LogContract = self.env['fleet.vehicle.log.contract'] Cost = self.env['fleet.vehicle.cost'] for record in self: record.odometer_count = Odometer.search_count([('vehicle_id', '=', record.id)]) record.fuel_logs_count = LogFuel.search_count([('vehicle_id', '=', record.id)]) record.service_count = LogService.search_count([('vehicle_id', '=', record.id)]) record.contract_count = LogContract.search_count([('vehicle_id', '=', record.id), ('state', '!=', 'closed')]) record.cost_count = Cost.search_count([('vehicle_id', '=', record.id), ('parent_id', '=', False)]) record.history_count = self.env['fleet.vehicle.assignation.log'].search_count([('vehicle_id', '=', record.id)]) @api.depends('log_contracts') def _compute_contract_reminder(self): params = self.env['ir.config_parameter'].sudo() delay_alert_contract = int(params.get_param('hr_fleet.delay_alert_contract', default=30)) for record in self: overdue = False due_soon = False total = 0 name = '' for element in record.log_contracts: if element.state in ('open', 'diesoon', 'expired') and element.expiration_date: current_date_str = fields.Date.context_today(record) due_time_str = element.expiration_date current_date = fields.Date.from_string(current_date_str) due_time = fields.Date.from_string(due_time_str) diff_time = (due_time - current_date).days if diff_time < 0: overdue = True total += 1 if diff_time < delay_alert_contract: due_soon = True total += 1 if overdue or due_soon: log_contract = self.env['fleet.vehicle.log.contract'].search([ ('vehicle_id', '=', record.id), ('state', 'in', ('open', 'diesoon', 'expired')) ], limit=1, order='expiration_date asc') if log_contract: # we display only the name of the oldest overdue/due soon contract name = log_contract.cost_subtype_id.name record.contract_renewal_overdue = overdue record.contract_renewal_due_soon = due_soon record.contract_renewal_total = total - 1 # we remove 1 from the real total for display purposes record.contract_renewal_name = name def _search_contract_renewal_due_soon(self, operator, value): params = self.env['ir.config_parameter'].sudo() delay_alert_contract = int(params.get_param('hr_fleet.delay_alert_contract', default=30)) res = [] assert operator in ('=', '!=', '<>') and value in (True, False), 'Operation not supported' if (operator == '=' and value is True) or (operator in ('<>', '!=') and value is False): search_operator = 'in' else: search_operator = 'not in' today = fields.Date.context_today(self) datetime_today = fields.Datetime.from_string(today) limit_date = fields.Datetime.to_string(datetime_today + relativedelta(days=+delay_alert_contract)) self.env.cr.execute("""SELECT cost.vehicle_id, count(contract.id) AS contract_number FROM fleet_vehicle_cost cost LEFT JOIN fleet_vehicle_log_contract contract ON contract.cost_id = cost.id WHERE contract.expiration_date IS NOT NULL AND contract.expiration_date > %s AND contract.expiration_date < %s AND contract.state IN ('open', 'diesoon', 'expired') GROUP BY cost.vehicle_id""", (today, limit_date)) res_ids = [x[0] for x in self.env.cr.fetchall()] res.append(('id', search_operator, res_ids)) return res def _search_get_overdue_contract_reminder(self, operator, value): res = [] assert operator in ('=', '!=', '<>') and value in (True, False), 'Operation not supported' if (operator == '=' and value is True) or (operator in ('<>', '!=') and value is False): search_operator = 'in' else: search_operator = 'not in' today = fields.Date.context_today(self) self.env.cr.execute('''SELECT cost.vehicle_id, count(contract.id) AS contract_number FROM fleet_vehicle_cost cost LEFT JOIN fleet_vehicle_log_contract contract ON contract.cost_id = cost.id WHERE contract.expiration_date IS NOT NULL AND contract.expiration_date < %s AND contract.state IN ('open', 'diesoon', 'expired') GROUP BY cost.vehicle_id ''', (today,)) res_ids = [x[0] for x in self.env.cr.fetchall()] res.append(('id', search_operator, res_ids)) return res @api.model def create(self, vals): res = super(FleetVehicle, self).create(vals) if 'driver_id' in vals and vals['driver_id']: res.create_driver_history(vals['driver_id']) if 'future_driver_id' in vals and vals['future_driver_id']: state_waiting_list = self.env.ref('fleet.fleet_vehicle_state_waiting_list', raise_if_not_found=False) states = res.mapped('state_id').ids if not state_waiting_list or state_waiting_list.id not in states: future_driver = self.env['res.partner'].browse(vals['future_driver_id']) future_driver.sudo().write({'plan_to_change_car': True}) return res def write(self, vals): if 'driver_id' in vals and vals['driver_id']: driver_id = vals['driver_id'] self.filtered(lambda v: v.driver_id.id != driver_id).create_driver_history(driver_id) if 'future_driver_id' in vals and vals['future_driver_id']: state_waiting_list = self.env.ref('fleet.fleet_vehicle_state_waiting_list', raise_if_not_found=False) states = self.mapped('state_id').ids if 'state_id' not in vals else [vals['state_id']] if not state_waiting_list or state_waiting_list.id not in states: future_driver = self.env['res.partner'].browse(vals['future_driver_id']) future_driver.sudo().write({'plan_to_change_car': True}) res = super(FleetVehicle, self).write(vals) if 'active' in vals and not vals['active']: self.mapped('log_contracts').write({'active': False}) return res def _close_driver_history(self): self.env['fleet.vehicle.assignation.log'].search([ ('vehicle_id', 'in', self.ids), ('driver_id', 'in', self.mapped('driver_id').ids), ('date_end', '=', False) ]).write({'date_end': fields.Date.today()}) def create_driver_history(self, driver_id): for vehicle in self: self.env['fleet.vehicle.assignation.log'].create({ 'vehicle_id': vehicle.id, 'driver_id': driver_id, 'date_start': fields.Date.today(), }) def action_accept_driver_change(self): # Find all the vehicles for which the driver is the future_driver_id # remove their driver_id and close their history using current date vehicles = self.search([('driver_id', 'in', self.mapped('future_driver_id').ids)]) vehicles.write({'driver_id': False}) vehicles._close_driver_history() for vehicle in self: vehicle.future_driver_id.sudo().write({'plan_to_change_car': False}) vehicle.driver_id = vehicle.future_driver_id vehicle.future_driver_id = False @api.model def _read_group_stage_ids(self, stages, domain, order): return self.env['fleet.vehicle.state'].search([], order=order) @api.model def _name_search(self, name, args=None, operator='ilike', limit=100, name_get_uid=None): args = args or [] if operator == 'ilike' and not (name or '').strip(): domain = [] else: domain = ['|', ('name', operator, name), ('driver_id.name', operator, name)] rec = self._search(expression.AND([domain, args]), limit=limit, access_rights_uid=name_get_uid) return models.lazy_name_get(self.browse(rec).with_user(name_get_uid)) def return_action_to_open(self): """ This opens the xml view specified in xml_id for the current vehicle """ self.ensure_one() xml_id = self.env.context.get('xml_id') if xml_id: res = self.env['ir.actions.act_window'].for_xml_id('fleet', xml_id) res.update( context=dict(self.env.context, default_vehicle_id=self.id, group_by=False), domain=[('vehicle_id', '=', self.id)] ) return res return False def act_show_log_cost(self): """ This opens log view to view and add new log for this vehicle, groupby default to only show effective costs @return: the costs log view """ self.ensure_one() copy_context = dict(self.env.context) copy_context.pop('group_by', None) res = self.env['ir.actions.act_window'].for_xml_id('fleet', 'fleet_vehicle_costs_action') res.update( context=dict(copy_context, default_vehicle_id=self.id, search_default_parent_false=True), domain=[('vehicle_id', '=', self.id)] ) return res def _track_subtype(self, init_values): self.ensure_one() if 'driver_id' in init_values or 'future_driver_id' in init_values: return self.env.ref('fleet.mt_fleet_driver_updated') return super(FleetVehicle, self)._track_subtype(init_values) def open_assignation_logs(self): self.ensure_one() return { 'type': 'ir.actions.act_window', 'name': 'Assignation Logs', 'view_mode': 'tree', 'res_model': 'fleet.vehicle.assignation.log', 'domain': [('vehicle_id', '=', self.id)], 'context': {'default_driver_id': self.driver_id.id, 'default_vehicle_id': self.id} }
class Job(models.Model): _name = "hr.job" _description = "Job Position" _inherit = ['mail.thread'] name = fields.Char(string='Job Position', required=True, index=True, translate=True) expected_employees = fields.Integer( compute='_compute_employees', string='Total Forecasted Employees', store=True, help= 'Expected number of employees for this job position after new recruitment.' ) no_of_employee = fields.Integer( compute='_compute_employees', string="Current Number of Employees", store=True, help='Number of employees currently occupying this job position.') no_of_recruitment = fields.Integer( string='Expected New Employees', copy=False, help='Number of new employees you expect to recruit.', default=1) no_of_hired_employee = fields.Integer( string='Hired Employees', copy=False, help= 'Number of hired employees for this job position during recruitment phase.' ) employee_ids = fields.One2many('hr.employee', 'job_id', string='Employees', groups='base.group_user') description = fields.Text(string='Job Description') requirements = fields.Text('Requirements') department_id = fields.Many2one( 'hr.department', string='Department', domain= "['|', ('company_id', '=', False), ('company_id', '=', company_id)]") company_id = fields.Many2one('res.company', string='Company', default=lambda self: self.env.company) state = fields.Selection( [('recruit', 'Recruitment in Progress'), ('open', 'Not Recruiting')], string='Status', readonly=True, required=True, tracking=True, copy=False, default='recruit', help= "Set whether the recruitment process is open or closed for this job position." ) _sql_constraints = [ ('name_company_uniq', 'unique(name, company_id, department_id)', 'The name of the job position must be unique per department in company!' ), ] @api.depends('no_of_recruitment', 'employee_ids.job_id', 'employee_ids.active') def _compute_employees(self): employee_data = self.env['hr.employee'].read_group( [('job_id', 'in', self.ids)], ['job_id'], ['job_id']) result = dict((data['job_id'][0], data['job_id_count']) for data in employee_data) for job in self: job.no_of_employee = result.get(job.id, 0) job.expected_employees = result.get(job.id, 0) + job.no_of_recruitment @api.model def create(self, values): """ We don't want the current user to be follower of all created job """ return super( Job, self.with_context(mail_create_nosubscribe=True)).create(values) @api.returns('self', lambda value: value.id) def copy(self, default=None): self.ensure_one() default = dict(default or {}) if 'name' not in default: default['name'] = _("%s (copy)") % (self.name) return super(Job, self).copy(default=default) def set_recruit(self): for record in self: no_of_recruitment = 1 if record.no_of_recruitment == 0 else record.no_of_recruitment record.write({ 'state': 'recruit', 'no_of_recruitment': no_of_recruitment }) return True def set_open(self): return self.write({ 'state': 'open', 'no_of_recruitment': 0, 'no_of_hired_employee': 0 })
class FetchmailServer(models.Model): """Incoming POP/IMAP mail server account""" _name = 'fetchmail.server' _description = 'Incoming Mail Server' _order = 'priority' name = fields.Char('Name', required=True) active = fields.Boolean('Active', default=True) state = fields.Selection([ ('draft', 'Not Confirmed'), ('done', 'Confirmed'), ], string='Status', index=True, readonly=True, copy=False, default='draft') server = fields.Char(string='Server Name', readonly=True, help="Hostname or IP of the mail server", states={'draft': [('readonly', False)]}) port = fields.Integer(readonly=True, states={'draft': [('readonly', False)]}) server_type = fields.Selection([ ('pop', 'POP Server'), ('imap', 'IMAP Server'), ('local', 'Local Server'), ], string='Server Type', index=True, required=True, default='pop') is_ssl = fields.Boolean( 'SSL/TLS', help= "Connections are encrypted with SSL/TLS through a dedicated port (default: IMAPS=993, POP3S=995)" ) attach = fields.Boolean( 'Keep Attachments', help="Whether attachments should be downloaded. " "If not enabled, incoming emails will be stripped of any attachments before being processed", default=True) original = fields.Boolean( 'Keep Original', help= "Whether a full original copy of each email should be kept for reference " "and attached to each processed message. This will usually double the size of your message database." ) date = fields.Datetime(string='Last Fetch Date', readonly=True) user = fields.Char(string='Username', readonly=True, states={'draft': [('readonly', False)]}) password = fields.Char(readonly=True, states={'draft': [('readonly', False)]}) object_id = fields.Many2one( 'ir.model', string="Create a New Record", help="Process each incoming mail as part of a conversation " "corresponding to this document type. This will create " "new documents for new conversations, or attach follow-up " "emails to the existing conversations (documents).") priority = fields.Integer( string='Server Priority', readonly=True, states={'draft': [('readonly', False)]}, help= "Defines the order of processing, lower values mean higher priority", default=5) message_ids = fields.One2many('mail.mail', 'fetchmail_server_id', string='Messages', readonly=True) configuration = fields.Text('Configuration', readonly=True) script = fields.Char(readonly=True, default='/mail/static/scripts/harpiya-mailgate.py') @api.onchange('server_type', 'is_ssl', 'object_id') def onchange_server_type(self): self.port = 0 if self.server_type == 'pop': self.port = self.is_ssl and 995 or 110 elif self.server_type == 'imap': self.port = self.is_ssl and 993 or 143 else: self.server = '' conf = { 'dbname': self.env.cr.dbname, 'uid': self.env.uid, 'model': self.object_id.model if self.object_id else 'MODELNAME' } self.configuration = """Use the below script with the following command line options with your Mail Transport Agent (MTA) harpiya-mailgate.py --host=HOSTNAME --port=PORT -u %(uid)d -p PASSWORD -d %(dbname)s Example configuration for the postfix mta running locally: /etc/postfix/virtual_aliases: @youdomain harpiya_mailgate@localhost /etc/aliases: harpiya_mailgate: "|/path/to/harpiya-mailgate.py --host=localhost -u %(uid)d -p PASSWORD -d %(dbname)s" """ % conf @api.model def create(self, values): res = super(FetchmailServer, self).create(values) self._update_cron() return res def write(self, values): res = super(FetchmailServer, self).write(values) self._update_cron() return res def unlink(self): res = super(FetchmailServer, self).unlink() self._update_cron() return res def set_draft(self): self.write({'state': 'draft'}) return True def connect(self): self.ensure_one() if self.server_type == 'imap': if self.is_ssl: connection = IMAP4_SSL(self.server, int(self.port)) else: connection = IMAP4(self.server, int(self.port)) connection.login(self.user, self.password) elif self.server_type == 'pop': if self.is_ssl: connection = POP3_SSL(self.server, int(self.port)) else: connection = POP3(self.server, int(self.port)) #TODO: use this to remove only unread messages #connection.user("recent:"+server.user) connection.user(self.user) connection.pass_(self.password) # Add timeout on socket connection.sock.settimeout(MAIL_TIMEOUT) return connection def button_confirm_login(self): for server in self: try: connection = server.connect() server.write({'state': 'done'}) except Exception as err: _logger.info("Failed to connect to %s server %s.", server.server_type, server.name, exc_info=True) raise UserError( _("Connection test failed: %s") % tools.ustr(err)) finally: try: if connection: if server.server_type == 'imap': connection.close() elif server.server_type == 'pop': connection.quit() except Exception: # ignored, just a consequence of the previous exception pass return True @api.model def _fetch_mails(self): """ Method called by cron to fetch mails from servers """ return self.search([('state', '=', 'done'), ('server_type', 'in', ['pop', 'imap'])]).fetch_mail() def fetch_mail(self): """ WARNING: meant for cron usage only - will commit() after each email! """ additionnal_context = {'fetchmail_cron_running': True} MailThread = self.env['mail.thread'] for server in self: _logger.info('start checking for new emails on %s server %s', server.server_type, server.name) additionnal_context['default_fetchmail_server_id'] = server.id additionnal_context['server_type'] = server.server_type count, failed = 0, 0 imap_server = None pop_server = None if server.server_type == 'imap': try: imap_server = server.connect() imap_server.select() result, data = imap_server.search(None, '(UNSEEN)') for num in data[0].split(): res_id = None result, data = imap_server.fetch(num, '(RFC822)') imap_server.store(num, '-FLAGS', '\\Seen') try: res_id = MailThread.with_context( **additionnal_context).message_process( server.object_id.model, data[0][1], save_original=server.original, strip_attachments=(not server.attach)) except Exception: _logger.info( 'Failed to process mail from %s server %s.', server.server_type, server.name, exc_info=True) failed += 1 imap_server.store(num, '+FLAGS', '\\Seen') self._cr.commit() count += 1 _logger.info( "Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", count, server.server_type, server.name, (count - failed), failed) except Exception: _logger.info( "General failure when trying to fetch mail from %s server %s.", server.server_type, server.name, exc_info=True) finally: if imap_server: imap_server.close() imap_server.logout() elif server.server_type == 'pop': try: while True: pop_server = server.connect() (num_messages, total_size) = pop_server.stat() pop_server.list() for num in range( 1, min(MAX_POP_MESSAGES, num_messages) + 1): (header, messages, octets) = pop_server.retr(num) message = (b'\n').join(messages) res_id = None try: res_id = MailThread.with_context( **additionnal_context).message_process( server.object_id.model, message, save_original=server.original, strip_attachments=(not server.attach)) pop_server.dele(num) except Exception: _logger.info( 'Failed to process mail from %s server %s.', server.server_type, server.name, exc_info=True) failed += 1 self.env.cr.commit() if num_messages < MAX_POP_MESSAGES: break pop_server.quit() _logger.info( "Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", num_messages, server.server_type, server.name, (num_messages - failed), failed) except Exception: _logger.info( "General failure when trying to fetch mail from %s server %s.", server.server_type, server.name, exc_info=True) finally: if pop_server: pop_server.quit() server.write({'date': fields.Datetime.now()}) return True @api.model def _update_cron(self): if self.env.context.get('fetchmail_cron_running'): return try: # Enabled/Disable cron based on the number of 'done' server of type pop or imap cron = self.env.ref('fetchmail.ir_cron_mail_gateway_action') cron.toggle(model=self._name, domain=[('state', '=', 'done'), ('server_type', 'in', ['pop', 'imap'])]) except ValueError: pass
class Digest(models.Model): _name = 'digest.digest' _description = 'Digest' # Digest description name = fields.Char(string='Name', required=True, translate=True) user_ids = fields.Many2many('res.users', string='Recipients', domain="[('share', '=', False)]") periodicity = fields.Selection([('weekly', 'Weekly'), ('monthly', 'Monthly'), ('quarterly', 'Quarterly')], string='Periodicity', default='weekly', required=True) next_run_date = fields.Date(string='Next Send Date') template_id = fields.Many2one('mail.template', string='Email Template', domain="[('model','=','digest.digest')]", default=lambda self: self.env.ref('digest.digest_mail_template'), required=True) currency_id = fields.Many2one(related="company_id.currency_id", string='Currency', readonly=False) company_id = fields.Many2one('res.company', string='Company', default=lambda self: self.env.company.id) available_fields = fields.Char(compute='_compute_available_fields') is_subscribed = fields.Boolean('Is user subscribed', compute='_compute_is_subscribed') state = fields.Selection([('activated', 'Activated'), ('deactivated', 'Deactivated')], string='Status', readonly=True, default='activated') # First base-related KPIs kpi_res_users_connected = fields.Boolean('Connected Users') kpi_res_users_connected_value = fields.Integer(compute='_compute_kpi_res_users_connected_value') kpi_mail_message_total = fields.Boolean('Messages') kpi_mail_message_total_value = fields.Integer(compute='_compute_kpi_mail_message_total_value') def _compute_is_subscribed(self): for digest in self: digest.is_subscribed = self.env.user in digest.user_ids def _compute_available_fields(self): for digest in self: kpis_values_fields = [] for field_name, field in digest._fields.items(): if field.type == 'boolean' and field_name.startswith(('kpi_', 'x_kpi_', 'x_studio_kpi_')) and digest[field_name]: kpis_values_fields += [field_name + '_value'] digest.available_fields = ', '.join(kpis_values_fields) def _get_kpi_compute_parameters(self): return fields.Date.to_string(self._context.get('start_date')), fields.Date.to_string(self._context.get('end_date')), self._context.get('company') def _compute_kpi_res_users_connected_value(self): for record in self: start, end, company = record._get_kpi_compute_parameters() user_connected = self.env['res.users'].search_count([('company_id', '=', company.id), ('login_date', '>=', start), ('login_date', '<', end)]) record.kpi_res_users_connected_value = user_connected def _compute_kpi_mail_message_total_value(self): discussion_subtype_id = self.env.ref('mail.mt_comment').id for record in self: start, end, company = record._get_kpi_compute_parameters() total_messages = self.env['mail.message'].search_count([('create_date', '>=', start), ('create_date', '<', end), ('subtype_id', '=', discussion_subtype_id), ('message_type', 'in', ['comment', 'email'])]) record.kpi_mail_message_total_value = total_messages @api.onchange('periodicity') def _onchange_periodicity(self): self.next_run_date = self._get_next_run_date() @api.model def create(self, vals): vals['next_run_date'] = date.today() + relativedelta(days=3) return super(Digest, self).create(vals) def action_subscribe(self): if self.env.user not in self.user_ids: self.sudo().user_ids |= self.env.user def action_unsubcribe(self): if self.env.user in self.user_ids: self.sudo().user_ids -= self.env.user def action_activate(self): self.state = 'activated' def action_deactivate(self): self.state = 'deactivated' def action_send(self): for digest in self: for user in digest.user_ids: subject = '%s: %s' % (user.company_id.name, digest.name) digest.template_id.with_context(user=user).send_mail(digest.id, force_send=True, raise_exception=True, email_values={'email_to': user.email, 'subject': subject}) digest.next_run_date = digest._get_next_run_date() def compute_kpis(self, company, user): self.ensure_one() res = {} for tf_name, tf in self._compute_timeframes(company).items(): digest = self.with_context(start_date=tf[0][0], end_date=tf[0][1], company=company).with_user(user) previous_digest = self.with_context(start_date=tf[1][0], end_date=tf[1][1], company=company).with_user(user) kpis = {} for field_name, field in self._fields.items(): if field.type == 'boolean' and field_name.startswith(('kpi_', 'x_kpi_', 'x_studio_kpi_')) and self[field_name]: try: compute_value = digest[field_name + '_value'] # Context start and end date is different each time so invalidate to recompute. digest.invalidate_cache([field_name + '_value']) previous_value = previous_digest[field_name + '_value'] # Context start and end date is different each time so invalidate to recompute. previous_digest.invalidate_cache([field_name + '_value']) except AccessError: # no access rights -> just skip that digest details from that user's digest email continue margin = self._get_margin_value(compute_value, previous_value) if self._fields[field_name+'_value'].type == 'monetary': converted_amount = self._format_human_readable_amount(compute_value) kpis.update({field_name: {field_name: self._format_currency_amount(converted_amount, company.currency_id), 'margin': margin}}) else: kpis.update({field_name: {field_name: compute_value, 'margin': margin}}) res.update({tf_name: kpis}) return res def compute_tips(self, company, user): tip = self.env['digest.tip'].search([('user_ids', '!=', user.id), '|', ('group_id', 'in', user.groups_id.ids), ('group_id', '=', False)], limit=1) if not tip: return False tip.user_ids += user body = tools.html_sanitize(tip.tip_description) tip_description = self.env['mail.template']._render_template(body, 'digest.tip', self.id) return tip_description def compute_kpis_actions(self, company, user): """ Give an optional action to display in digest email linked to some KPIs. :return dict: key: kpi name (field name), value: an action that will be concatenated with /web#action={action} """ return {} def _get_next_run_date(self): self.ensure_one() if self.periodicity == 'weekly': delta = relativedelta(weeks=1) elif self.periodicity == 'monthly': delta = relativedelta(months=1) elif self.periodicity == 'quarterly': delta = relativedelta(months=3) return date.today() + delta def _compute_timeframes(self, company): now = datetime.utcnow() tz_name = company.resource_calendar_id.tz if tz_name: now = pytz.timezone(tz_name).localize(now) start_date = now.date() return { 'yesterday': ( (start_date + relativedelta(days=-1), start_date), (start_date + relativedelta(days=-2), start_date + relativedelta(days=-1))), 'lastweek': ( (start_date + relativedelta(weeks=-1), start_date), (start_date + relativedelta(weeks=-2), start_date + relativedelta(weeks=-1))), 'lastmonth': ( (start_date + relativedelta(months=-1), start_date), (start_date + relativedelta(months=-2), start_date + relativedelta(months=-1))), } def _get_margin_value(self, value, previous_value=0.0): margin = 0.0 if (value != previous_value) and (value != 0.0 and previous_value != 0.0): margin = float_round((float(value-previous_value) / previous_value or 1) * 100, precision_digits=2) return margin def _format_currency_amount(self, amount, currency_id): pre = currency_id.position == 'before' symbol = u'{symbol}'.format(symbol=currency_id.symbol or '') return u'{pre}{0}{post}'.format(amount, pre=symbol if pre else '', post=symbol if not pre else '') def _format_human_readable_amount(self, amount, suffix=''): for unit in ['', 'K', 'M', 'G']: if abs(amount) < 1000.0: return "%3.2f%s%s" % (amount, unit, suffix) amount /= 1000.0 return "%.2f%s%s" % (amount, 'T', suffix) @api.model def _cron_send_digest_email(self): digests = self.search([('next_run_date', '=', fields.Date.today()), ('state', '=', 'activated')]) for digest in digests: try: digest.action_send() except MailDeliveryException as e: _logger.warning('MailDeliveryException while sending digest %d. Digest is now scheduled for next cron update.')
class MaintenanceEquipmentCategory(models.Model): _name = 'maintenance.equipment.category' _inherit = ['mail.alias.mixin', 'mail.thread'] _description = 'Maintenance Equipment Category' @api.depends('equipment_ids') def _compute_fold(self): # fix mutual dependency: 'fold' depends on 'equipment_count', which is # computed with a read_group(), which retrieves 'fold'! self.fold = False for category in self: category.fold = False if category.equipment_count else True name = fields.Char('Category Name', required=True, translate=True) company_id = fields.Many2one('res.company', string='Company', default=lambda self: self.env.company) technician_user_id = fields.Many2one('res.users', 'Responsible', tracking=True, default=lambda self: self.env.uid) color = fields.Integer('Color Index') note = fields.Text('Comments', translate=True) equipment_ids = fields.One2many('maintenance.equipment', 'category_id', string='Equipments', copy=False) equipment_count = fields.Integer(string="Equipment", compute='_compute_equipment_count') maintenance_ids = fields.One2many('maintenance.request', 'category_id', copy=False) maintenance_count = fields.Integer(string="Maintenance Count", compute='_compute_maintenance_count') alias_id = fields.Many2one( 'mail.alias', 'Alias', ondelete='restrict', required=True, help= "Email alias for this equipment category. New emails will automatically " "create a new equipment under this category.") fold = fields.Boolean(string='Folded in Maintenance Pipe', compute='_compute_fold', store=True) def _compute_equipment_count(self): equipment_data = self.env['maintenance.equipment'].read_group( [('category_id', 'in', self.ids)], ['category_id'], ['category_id']) mapped_data = dict([(m['category_id'][0], m['category_id_count']) for m in equipment_data]) for category in self: category.equipment_count = mapped_data.get(category.id, 0) def _compute_maintenance_count(self): maintenance_data = self.env['maintenance.request'].read_group( [('category_id', 'in', self.ids)], ['category_id'], ['category_id']) mapped_data = dict([(m['category_id'][0], m['category_id_count']) for m in maintenance_data]) for category in self: category.maintenance_count = mapped_data.get(category.id, 0) @api.model def create(self, vals): self = self.with_context(alias_model_name='maintenance.request', alias_parent_model_name=self._name) if not vals.get('alias_name'): vals['alias_name'] = vals.get('name') category_id = super(MaintenanceEquipmentCategory, self).create(vals) category_id.alias_id.write({ 'alias_parent_thread_id': category_id.id, 'alias_defaults': { 'category_id': category_id.id } }) return category_id def unlink(self): MailAlias = self.env['mail.alias'] for category in self: if category.equipment_ids or category.maintenance_ids: raise UserError( _("You cannot delete an equipment category containing equipments or maintenance requests." )) MailAlias += category.alias_id res = super(MaintenanceEquipmentCategory, self).unlink() MailAlias.unlink() return res def get_alias_model_name(self, vals): return vals.get('alias_model', 'maintenance.request') def get_alias_values(self): values = super(MaintenanceEquipmentCategory, self).get_alias_values() values['alias_defaults'] = {'category_id': self.id} return values