def _run_scheduler_tasks(self, use_new_cursor=False, company_id=False): # Minimum stock rules domain = self._get_orderpoint_domain(company_id=company_id) orderpoints = self.env['stock.warehouse.orderpoint'].search(domain) # ensure that qty_* which depends on datetime.now() are correctly # recomputed orderpoints.sudo()._compute_qty_to_order() orderpoints.sudo()._procure_orderpoint_confirm(use_new_cursor=use_new_cursor, company_id=company_id, raise_user_error=False) if use_new_cursor: self._cr.commit() # Search all confirmed stock_moves and try to assign them domain = self._get_moves_to_assign_domain(company_id) moves_to_assign = self.env['stock.move'].search(domain, limit=None, order='priority desc, date asc, id asc') for moves_chunk in split_every(100, moves_to_assign.ids): self.env['stock.move'].browse(moves_chunk).sudo()._action_assign() if use_new_cursor: self._cr.commit() # Merge duplicated quants self.env['stock.quant']._quant_tasks() if use_new_cursor: self._cr.commit()
def _run_scheduler_tasks(self, use_new_cursor=False, company_id=False): # Minimum stock rules self.sudo()._procure_orderpoint_confirm(use_new_cursor=use_new_cursor, company_id=company_id) # Search all confirmed stock_moves and try to assign them confirmed_moves = self.env['stock.move'].search([('state', '=', 'confirmed')], limit=None, order='priority desc, date_expected asc') for moves_chunk in split_every(100, confirmed_moves.ids): self.env['stock.move'].browse(moves_chunk)._action_assign() if use_new_cursor: self._cr.commit() exception_moves = self.env['stock.move'].search(self._get_exceptions_domain()) for move in exception_moves: values = move._prepare_procurement_values() try: with self._cr.savepoint(): origin = (move.group_id and (move.group_id.name + ":") or "") + (move.rule_id and move.rule_id.name or move.origin or move.picking_id.name or "/") self.run(move.product_id, move.product_uom_qty, move.product_uom, move.location_id, move.rule_id and move.rule_id.name or "/", origin, values) except UserError as error: self.env['procurement.rule']._log_next_activity(move.product_id, error.name) if use_new_cursor: self._cr.commit() # Merge duplicated quants self.env['stock.quant']._merge_quants()
def _notify_send(self, body, subject, recipients, **mail_values): emails = self.env['mail.mail'] recipients_nbr = len(recipients) for email_chunk in split_every(50, recipients.ids): # TDE FIXME: missing message parameter. So we will find mail_message_id # in the mail_values and browse it. It should already be in the # cache so should not impact performances. mail_message_id = mail_values.get('mail_message_id') message = self.env['mail.message'].browse( mail_message_id) if mail_message_id else None if message and message.model and message.res_id and message.model in self.env and hasattr( self.env[message.model], 'message_get_recipient_values'): tig = self.env[message.model].browse(message.res_id) recipient_values = tig.message_get_recipient_values( notif_message=message, recipient_ids=email_chunk) else: recipient_values = self.env[ 'mail.thread'].message_get_recipient_values( notif_message=None, recipient_ids=email_chunk) create_values = { 'body_html': body, 'subject': subject, } create_values.update(mail_values) create_values.update(recipient_values) emails |= self.env['mail.mail'].create(create_values) return emails, recipients_nbr