def action_feedback_schedule_next(self, feedback=False): ctx = dict( clean_context(self.env.context), default_previous_activity_type_id=self.activity_type_id.id, activity_previous_deadline=self.date_deadline, default_res_id=self.res_id, default_res_model=self.res_model, ) force_next = self.force_next self.action_feedback(feedback) # will unlink activity, dont access self after that if force_next: Activity = self.env['mail.activity'].with_context(ctx) res = Activity.new(Activity.default_get(Activity.fields_get())) res._onchange_previous_activity_type_id() res._onchange_activity_type_id() Activity.create(res._convert_to_write(res._cache)) return False else: return { 'name': _('Schedule an Activity'), 'context': ctx, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'mail.activity', 'views': [(False, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', }
def launch_replenishment(self): uom_reference = self.product_id.uom_id self.quantity = self.product_uom_id._compute_quantity(self.quantity, uom_reference) try: self.env['procurement.group'].with_context(clean_context(self.env.context)).run( self.product_id, self.quantity, uom_reference, self.warehouse_id.lot_stock_id, # Location "Manual Replenishment", # Name "Manual Replenishment", # Origin self._prepare_run_values() # Values ) except UserError as error: raise UserError(error)
def action_feedback_schedule_next(self, feedback=False): ctx = dict( clean_context(self.env.context), default_previous_activity_type_id=self.activity_type_id.id, activity_previous_deadline=self.date_deadline, default_res_id=self.res_id, default_res_model=self.res_model, ) messages, next_activities = self._action_done(feedback=feedback) # will unlink activity, dont access self after that if next_activities: return False return { 'name': _('Schedule an Activity'), 'context': ctx, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'mail.activity', 'views': [(False, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', }
def action_feedback_schedule_next(self, feedback=False): ctx = dict( clean_context(self.env.context), default_previous_activity_type_id=self.activity_type_id.id, activity_previous_deadline=self.date_deadline, default_res_id=self.res_id, default_res_model=self.res_model, ) messages, next_activities = self._action_done( feedback=feedback ) # will unlink activity, dont access self after that if next_activities: return False return { 'name': _('Schedule an Activity'), 'context': ctx, 'view_mode': 'form', 'res_model': 'mail.activity', 'views': [(False, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', }
def create(self, values_list): tracking_values_list = [] for values in values_list: if 'email_from' not in values: # needed to compute reply_to author_id, email_from = self.env['mail.thread']._message_compute_author(values.get('author_id'), email_from=None, raise_exception=False) values['email_from'] = email_from if not values.get('message_id'): values['message_id'] = self._get_message_id(values) if 'reply_to' not in values: values['reply_to'] = self._get_reply_to(values) if 'record_name' not in values and 'default_record_name' not in self.env.context: values['record_name'] = self._get_record_name(values) if 'attachment_ids' not in values: values['attachment_ids'] = [] # extract base64 images if 'body' in values: Attachments = self.env['ir.attachment'].with_context(clean_context(self._context)) data_to_url = {} def base64_to_boundary(match): key = match.group(2) if not data_to_url.get(key): name = match.group(4) if match.group(4) else 'image%s' % len(data_to_url) try: attachment = Attachments.create({ 'name': name, 'datas': match.group(2), 'res_model': values.get('model'), 'res_id': values.get('res_id'), }) except binascii_error: _logger.warning("Impossible to create an attachment out of badly formated base64 embedded image. Image has been removed.") return match.group(3) # group(3) is the url ending single/double quote matched by the regexp else: attachment.generate_access_token() values['attachment_ids'].append((4, attachment.id)) data_to_url[key] = ['/web/image/%s?access_token=%s' % (attachment.id, attachment.access_token), name] return '%s%s alt="%s"' % (data_to_url[key][0], match.group(3), data_to_url[key][1]) values['body'] = _image_dataurl.sub(base64_to_boundary, tools.ustr(values['body'])) # delegate creation of tracking after the create as sudo to avoid access rights issues tracking_values_list.append(values.pop('tracking_value_ids', False)) messages = super(Message, self).create(values_list) check_attachment_access = [] if all(isinstance(command, int) or command[0] in (4, 6) for values in values_list for command in values.get('attachment_ids')): for values in values_list: for command in values.get('attachment_ids'): if isinstance(command, int): check_attachment_access += [command] elif command[0] == 6: check_attachment_access += command[2] else: # command[0] == 4: check_attachment_access += [command[1]] else: check_attachment_access = messages.mapped('attachment_ids').ids # fallback on read if any unknow command if check_attachment_access: self.env['ir.attachment'].browse(check_attachment_access).check(mode='read') for message, values, tracking_values_cmd in zip(messages, values_list, tracking_values_list): if tracking_values_cmd: vals_lst = [dict(cmd[2], mail_message_id=message.id) for cmd in tracking_values_cmd if len(cmd) == 3 and cmd[0] == 0] other_cmd = [cmd for cmd in tracking_values_cmd if len(cmd) != 3 or cmd[0] != 0] if vals_lst: self.env['mail.tracking.value'].sudo().create(vals_lst) if other_cmd: message.sudo().write({'tracking_value_ids': tracking_values_cmd}) if message.is_thread_message(values): message._invalidate_documents(values.get('model'), values.get('res_id')) return messages
def action_feedback(self, feedback=False, attachment_ids=None): self = self.with_context(clean_context(self.env.context)) messages, next_activities = self._action_done( feedback=feedback, attachment_ids=attachment_ids) return messages.ids and messages.ids[0] or False
def action_import(self): """Import each lines of "contact_list" as a new contact.""" self.ensure_one() contacts = tools.email_split_tuples(self.contact_list) if not contacts: return { 'type': 'ir.actions.client', 'tag': 'display_notification', 'params': { 'message': _('No valid email address found.'), 'next': {'type': 'ir.actions.act_window_close'}, 'sticky': False, 'type': 'warning', } } if len(contacts) > 5000: return { 'type': 'ir.actions.client', 'tag': 'display_notification', 'params': { 'message': _('You have to much emails, please upload a file.'), 'type': 'warning', 'sticky': False, 'next': self.action_open_base_import(), } } all_emails = list({values[1].lower() for values in contacts}) existing_contacts = self.env['mailing.contact'].search([ ('email_normalized', 'in', all_emails), ('list_ids', 'in', self.mailing_list_ids.ids), ]) existing_contacts = { contact.email_normalized: contact for contact in existing_contacts } # Remove duplicated record, keep only the first non-empty name for each email address unique_contacts = {} for name, email in contacts: email = email.lower() if unique_contacts.get(email, {}).get('name'): continue if email in existing_contacts and not self.mailing_list_ids < existing_contacts[email].list_ids: existing_contacts[email].list_ids |= self.mailing_list_ids if email not in existing_contacts: unique_contacts[email] = { 'name': name, 'list_ids': self.mailing_list_ids.ids, } if not unique_contacts: return { 'type': 'ir.actions.client', 'tag': 'display_notification', 'params': { 'message': _('No contacts were imported. All email addresses are already in the mailing list.'), 'next': {'type': 'ir.actions.act_window_close'}, 'sticky': False, 'type': 'warning', } } new_contacts = self.env['mailing.contact'].with_context(clean_context(self.env.context)).create([ { 'email': email, **values, } for email, values in unique_contacts.items() ]) ignored = len(contacts) - len(unique_contacts) return { 'type': 'ir.actions.client', 'tag': 'display_notification', 'params': { 'message': ( _('%i Contacts have been imported.', len(unique_contacts)) + (_(' %i duplicates have been ignored.', ignored) if ignored else '') ), 'type': 'success', 'sticky': False, 'next': { 'context': self.env.context, 'domain': [('id', 'in', new_contacts.ids)], 'name': _('New contacts imported'), 'res_model': 'mailing.contact', 'type': 'ir.actions.act_window', 'view_mode': 'list', 'views': [[False, 'list'], [False, 'form']], }, } }
def mark_done_activity_with_followup(self): self.ensure_one() if self.res_model != 'crm.lead': return activity_result = self.activity_result_id if not activity_result: raise UserError(_("Missing result of activity to make it done.")) activity_type = self.activity_type_id activity_type_followups = activity_type.followup_ids is_followup_result = activity_result.follow_up current_lead = self.env['crm.lead'].browse(self.res_id) activity_history_ids = current_lead.crm_activity_history_ids.filtered( lambda act_ht: act_ht.followup_id in activity_type_followups and act_ht.activity_type_id == self.activity_type_id) ctx = dict( clean_context(self.env.context), default_previous_activity_type_id=self.activity_type_id.id, default_res_id=self.res_id, default_res_model=self.res_model, activity_previous_deadline=self.date_deadline, ) previous_deadline = fields.Date.from_string(self.date_deadline) possible_activity_type_result = \ activity_type.possible_result_ids.filtered( lambda ps: ps.result_id == activity_result ) possible_activity_type_result = possible_activity_type_result and \ possible_activity_type_result[0] or False next_stage = destination_stage = possible_activity_type_result \ and possible_activity_type_result.destination_stage_id or False if activity_type_followups and is_followup_result: if len(activity_history_ids) < len(activity_type_followups): # Update stage for lead if destination_stage \ and destination_stage != current_lead.stage_id: current_lead.stage_id = destination_stage previous_followups = activity_history_ids.mapped('followup_id') un_used_followups = [ flu for flu in activity_type_followups if flu not in previous_followups ] followup = un_used_followups and un_used_followups[0] or False new_deadline = previous_deadline + relativedelta( **{'hours': followup.hours_to_followup}) ctx.update( dict( default_date_deadline=fields.Date.to_string( new_deadline), default_followup_id=followup.id, default_activity_type_id=self.activity_type_id.id, default_summary=self.activity_type_id.summary, )) # Create new followup activity Activity = self.env['mail.activity'].with_context(ctx) res = Activity.new(Activity.default_get(Activity.fields_get())) Activity.create(res._convert_to_write(res._cache)) return True else: # Update next stage when reached the last follow up next_stage = possible_activity_type_result \ and possible_activity_type_result.final_stage_id or False # Update next_state and create new activity if not next_stage: next_stage = self.env['crm.stage'].search( [('sequence', '>=', current_lead.stage_id.sequence)], order="sequence", limit=1) if not next_stage: return if current_lead.stage_id != next_stage: current_lead.stage_id = next_stage next_activity_type = self.env['mail.activity.type'].search( [('stage_id', '=', next_stage.id)], order="sequence", limit=1) if not next_activity_type: return ctx.update( dict( activity_previous_deadline=previous_deadline, default_activity_type_id=next_activity_type.id, )) Activity = self.env['mail.activity'].with_context(ctx) res = Activity.new(Activity.default_get(Activity.fields_get())) res._onchange_activity_type_id() Activity.create(res._convert_to_write(res._cache)) return True
def action_feedback(self, feedback=False, attachment_ids=None): messages, _next_activities = self.with_context( clean_context(self.env.context))._action_done( feedback=feedback, attachment_ids=attachment_ids) return messages[0].id if messages else False
def _create(self, data_list): """ Create records from the stored field values in ``data_list``. """ assert data_list cr = self.env.cr quote = '"{}"'.format # insert rows ids = [] # ids of created records other_fields = set() # non-column fields translated_fields = set() # translated fields # column names, formats and values (for common fields) columns0 = [('id', "nextval(%s)", self._sequence)] if self._log_access and not data_list[0]['audit_hash']: columns0.append(('create_uid', "%s", self._uid)) columns0.append( ('create_date', "%s", AsIs("(now() at time zone 'UTC')"))) columns0.append(('write_uid', "%s", self._uid)) columns0.append( ('write_date', "%s", AsIs("(now() at time zone 'UTC')"))) for data in data_list: # determine column values stored = data['stored'] columns = [column for column in columns0 if column[0] not in stored] for name, val in sorted(stored.items()): field = self._fields[name] assert field.store if field.column_type: col_val = field.convert_to_column(val, self, stored) columns.append((name, field.column_format, col_val)) if field.translate is True: translated_fields.add(field) else: other_fields.add(field) # insert a row with the given columns query = "INSERT INTO {} ({}) VALUES ({}) RETURNING id".format( quote(self._table), ", ".join(quote(name) for name, fmt, val in columns), ", ".join(fmt for name, fmt, val in columns), ) params = [val for name, fmt, val in columns] cr.execute(query, params) ids.append(cr.fetchone()[0]) # put the new records in cache, and update inverse fields, for many2one # # cachetoclear is an optimization to avoid modified()'s cost until # other_fields are processed cachetoclear = [] records = self.browse(ids) inverses_update = defaultdict(list) # {(field, value): ids} for data, record in zip(data_list, records): data['record'] = record # DLE P104: test_inherit.py, test_50_search_one2many vals = dict( {k: v for d in data['inherited'].values() for k, v in d.items()}, **data['stored']) set_vals = list(vals) + LOG_ACCESS_COLUMNS + \ [self.CONCURRENCY_CHECK_FIELD, 'id', 'parent_path'] for field in self._fields.values(): if field.type in ('one2many', 'many2many'): self.env.cache.set(record, field, ()) elif field.related and not field.column_type: self.env.cache.set(record, field, field.convert_to_cache(None, record)) # DLE P123: `test_adv_activity`, `test_message_assignation_inbox`, `test_message_log`, `test_create_mail_simple`, ... # Set `mail.message.parent_id` to False in cache so it doesn't do the useless SELECT when computing the modified of `child_ids` # in other words, if `parent_id` is not set, no other message `child_ids` are impacted. # + avoid the fetch of fields which are False. e.g. if a boolean field is not passed in vals and as no default set in the field attributes, # then we know it can be set to False in the cache in the case # of a create. elif field.name not in set_vals and not field.compute: self.env.cache.set(record, field, field.convert_to_cache(None, record)) for fname, value in vals.items(): field = self._fields[fname] if field.type in ('one2many', 'many2many'): cachetoclear.append((record, field)) else: cache_value = field.convert_to_cache(value, record) self.env.cache.set(record, field, cache_value) if field.type in ('many2one', 'many2one_reference' ) and record._field_inverses[field]: inverses_update[(field, cache_value)].append(record.id) for (field, value), record_ids in inverses_update.items(): field._update_inverses(self.browse(record_ids), value) # update parent_path records._parent_store_create() # protect fields being written against recomputation protected = [(data['protected'], data['record']) for data in data_list] with self.env.protecting(protected): # mark computed fields as todo records.modified(self._fields, create=True) if other_fields: # discard default values from context for other fields others = records.with_context(clean_context(self._context)) for field in sorted(other_fields, key=attrgetter('_sequence')): field.create([(other, data['stored'][field.name]) for other, data in zip(others, data_list) if field.name in data['stored']]) # mark fields to recompute records.modified([field.name for field in other_fields], create=True) # if value in cache has not been updated by other_fields, remove it for record, field in cachetoclear: if self.env.cache.contains( record, field) and not self.env.cache.get(record, field): self.env.cache.remove(record, field) # check Python constraints for stored fields records._validate_fields(name for data in data_list for name in data['stored']) records.check_access_rule('create') # add translations if self.env.lang and self.env.lang != 'en_US': Translations = self.env['ir.translation'] for field in translated_fields: tname = "%s,%s" % (field.model_name, field.name) for data in data_list: if field.name in data['stored']: record = data['record'] val = data['stored'][field.name] Translations._set_ids(tname, 'model', self.env.lang, record.ids, val, val) return records
def _save_form(self, ocr_results, no_ref=False): supplier_ocr = ocr_results['supplier']['selected_value'][ 'content'] if 'supplier' in ocr_results else "" date_ocr = ocr_results['date']['selected_value'][ 'content'] if 'date' in ocr_results else "" due_date_ocr = ocr_results['due_date']['selected_value'][ 'content'] if 'due_date' in ocr_results else "" total_ocr = ocr_results['total']['selected_value'][ 'content'] if 'total' in ocr_results else "" subtotal_ocr = ocr_results['subtotal']['selected_value'][ 'content'] if 'subtotal' in ocr_results else "" invoice_id_ocr = ocr_results['invoice_id']['selected_value'][ 'content'] if 'invoice_id' in ocr_results else "" currency_ocr = ocr_results['currency']['selected_value'][ 'content'] if 'currency' in ocr_results else "" vat_number_ocr = ocr_results['VAT_Number']['selected_value'][ 'content'] if 'VAT_Number' in ocr_results else "" payment_ref_ocr = ocr_results['payment_ref']['selected_value'][ 'content'] if 'payment_ref' in ocr_results else "" iban_ocr = ocr_results['iban']['selected_value'][ 'content'] if 'iban' in ocr_results else "" SWIFT_code_ocr = json.loads( ocr_results['SWIFT_code']['selected_value'] ['content']) if 'SWIFT_code' in ocr_results else None invoice_lines = ocr_results[ 'invoice_lines'] if 'invoice_lines' in ocr_results else [] vals_invoice_lines = self._get_invoice_lines(invoice_lines, subtotal_ocr) if 'default_journal_id' in self._context: self_ctx = self else: # we need to make sure the type is in the context as _get_default_journal uses it self_ctx = self.with_context( default_move_type=self.move_type ) if 'default_move_type' not in self._context else self self_ctx = self_ctx.with_company(self.company_id.id) self_ctx = self_ctx.with_context( default_journal_id=self_ctx._get_default_journal().id) with Form(self_ctx) as move_form: if not move_form.partner_id: if vat_number_ocr: partner_vat = self.env["res.partner"].search( [("vat", "=ilike", vat_number_ocr)], limit=1) if not partner_vat: partner_vat = self.env["res.partner"].search( [("vat", "=ilike", vat_number_ocr[2:])], limit=1) if not partner_vat: for partner in self.env["res.partner"].search( [("vat", "!=", False)], limit=1000): vat = partner.vat.upper() vat_cleaned = vat.replace("BTW", "").replace( "MWST", "").replace("ABN", "") vat_cleaned = re.sub(r'[^A-Z0-9]', '', vat_cleaned) if vat_cleaned == vat_number_ocr or vat_cleaned == vat_number_ocr[ 2:]: partner_vat = partner break if partner_vat: move_form.partner_id = partner_vat if not move_form.partner_id: partner_id = self.find_partner_id_with_name(supplier_ocr) if partner_id != 0: move_form.partner_id = self.env["res.partner"].browse( partner_id) if not move_form.partner_id and vat_number_ocr: created_supplier = self._create_supplier_from_vat( vat_number_ocr) if created_supplier: move_form.partner_id = created_supplier if iban_ocr and not move_form.partner_bank_id: bank_account = self.env['res.partner.bank'].search( [('acc_number', '=ilike', iban_ocr)]) if bank_account.exists(): if bank_account.partner_id == move_form.partner_id.id: move_form.partner_bank_id = bank_account else: vals = { 'partner_id': move_form.partner_id.id, 'acc_number': iban_ocr } if SWIFT_code_ocr: bank_id = self.env['res.bank'].search( [('bic', '=', SWIFT_code_ocr['bic'])], limit=1) if bank_id.exists(): vals['bank_id'] = bank_id.id if not bank_id.exists( ) and SWIFT_code_ocr['verified_bic']: country_id = self.env[ 'res.country'].search([( 'code', '=', SWIFT_code_ocr['country_code']) ], limit=1) if country_id.exists(): vals['bank_id'] = self.env[ 'res.bank'].create({ 'name': SWIFT_code_ocr['name'], 'country': country_id.id, 'city': SWIFT_code_ocr['city'], 'bic': SWIFT_code_ocr['bic'] }).id move_form.partner_bank_id = self.with_context( clean_context(self.env.context) ).env['res.partner.bank'].create(vals) due_date_move_form = move_form.invoice_date_due # remember the due_date, as it could be modified by the onchange() of invoice_date context_create_date = str( fields.Date.context_today(self, self.create_date)) if date_ocr and (not move_form.invoice_date or move_form.invoice_date == context_create_date): move_form.invoice_date = date_ocr if due_date_ocr and (not due_date_move_form or due_date_move_form == context_create_date): move_form.invoice_date_due = due_date_ocr if not move_form.ref and not no_ref: move_form.ref = invoice_id_ocr if self.user_has_groups('base.group_multi_currency') and ( not move_form.currency_id or move_form.currency_id == self._get_default_currency()): currency = self.env["res.currency"].search([ '|', '|', ('currency_unit_label', 'ilike', currency_ocr), ('name', 'ilike', currency_ocr), ('symbol', 'ilike', currency_ocr) ], limit=1) if currency: move_form.currency_id = currency if payment_ref_ocr and not move_form.payment_reference: move_form.payment_reference = payment_ref_ocr if not move_form.invoice_line_ids: for i, line_val in enumerate(vals_invoice_lines): with move_form.invoice_line_ids.new() as line: line.name = line_val['name'] line.price_unit = line_val['price_unit'] line.quantity = line_val['quantity'] if not line.account_id: raise ValidationError( _( "The OCR module is not able to generate the invoice lines because the default accounts are not correctly set on the %s journal.", move_form.journal_id.name_get()[0][1])) with move_form.invoice_line_ids.edit(i) as line: line.tax_ids.clear() for taxes_record in line_val['tax_ids']: if taxes_record.price_include: line.price_unit *= 1 + taxes_record.amount / 100 line.tax_ids.add(taxes_record) # if the total on the invoice doesn't match the total computed by Odoo, adjust the taxes so that it matches for i in range(len(move_form.line_ids)): with move_form.line_ids.edit(i) as line: if line.tax_repartition_line_id and total_ocr: rounding_error = move_form.amount_total - total_ocr threshold = len(vals_invoice_lines ) * move_form.currency_id.rounding if not move_form.currency_id.is_zero( rounding_error) and abs( rounding_error) < threshold: line.debit -= rounding_error break