class PosDetails(models.TransientModel): _name = 'pos.details.wizard' _description = 'Point of Sale Details Report' def _default_start_date(self): """ Find the earliest start_date of the latests sessions """ # restrict to configs available to the user config_ids = self.env['pos.config'].search([]).ids # exclude configs has not been opened for 2 days self.env.cr.execute( """ SELECT max(start_at) as start, config_id FROM pos_session WHERE config_id = ANY(%s) AND start_at > (NOW() - INTERVAL '2 DAYS') GROUP BY config_id """, (config_ids, )) latest_start_dates = [ res['start'] for res in self.env.cr.dictfetchall() ] # earliest of the latest sessions return latest_start_dates and min( latest_start_dates) or fields.Datetime.now() start_date = fields.Datetime(required=True, default=_default_start_date) end_date = fields.Datetime(required=True, default=fields.Datetime.now) pos_config_ids = fields.Many2many( 'pos.config', 'pos_detail_configs', default=lambda s: s.env['pos.config'].search([])) @api.onchange('start_date') def _onchange_start_date(self): if self.start_date and self.end_date and self.end_date < self.start_date: self.end_date = self.start_date @api.onchange('end_date') def _onchange_end_date(self): if self.end_date and self.end_date < self.start_date: self.start_date = self.end_date @api.multi def generate_report(self): if (not self.env.user.company_id.logo): raise UserError( _("You have to set a logo or a layout for your company.")) elif (not self.env.user.company_id.external_report_layout_id): raise UserError( _("You have to set your reports's header and footer layout.")) data = { 'date_start': self.start_date, 'date_stop': self.end_date, 'config_ids': self.pos_config_ids.ids } return self.env.ref('point_of_sale.sale_details_report').report_action( [], data=data)
class MrpWorkcenterProductivity(models.Model): _name = "mrp.workcenter.productivity" _description = "Workcenter Productivity Log" _order = "id desc" _rec_name = "loss_id" production_id = fields.Many2one('mrp.production', string='Manufacturing Order', related='workorder_id.production_id', readonly='True') workcenter_id = fields.Many2one('mrp.workcenter', "Work Center", required=True) workorder_id = fields.Many2one('mrp.workorder', 'Work Order') user_id = fields.Many2one('res.users', "User", default=lambda self: self.env.uid) loss_id = fields.Many2one('mrp.workcenter.productivity.loss', "Loss Reason", ondelete='restrict', required=True) loss_type = fields.Selection("Effectiveness", related='loss_id.loss_type', store=True, readonly=False) description = fields.Text('Description') date_start = fields.Datetime('Start Date', default=fields.Datetime.now, required=True) date_end = fields.Datetime('End Date') duration = fields.Float('Duration', compute='_compute_duration', store=True) @api.depends('date_end', 'date_start') def _compute_duration(self): for blocktime in self: if blocktime.date_end: d1 = fields.Datetime.from_string(blocktime.date_start) d2 = fields.Datetime.from_string(blocktime.date_end) diff = d2 - d1 if (blocktime.loss_type not in ('productive', 'performance') ) and blocktime.workcenter_id.resource_calendar_id: r = blocktime.workcenter_id.get_work_days_data(d1, d2)['hours'] blocktime.duration = round(r * 60, 2) else: blocktime.duration = round(diff.total_seconds() / 60.0, 2) else: blocktime.duration = 0.0 @api.multi def button_block(self): self.ensure_one() self.workcenter_id.order_ids.end_all()
class User(models.Model): _inherit = 'res.users' google_calendar_rtoken = fields.Char('Refresh Token', copy=False) google_calendar_token = fields.Char('User token', copy=False) google_calendar_token_validity = fields.Datetime('Token Validity', copy=False) google_calendar_last_sync_date = fields.Datetime('Last synchro date', copy=False) google_calendar_cal_id = fields.Char( 'Calendar ID', copy=False, help= 'Last Calendar ID who has been synchronized. If it is changed, we remove all links between GoogleID and Swerp Google Internal ID' )
class Partner(models.Model): _inherit = 'res.partner' calendar_last_notif_ack = fields.Datetime( 'Last notification marked as read from base Calendar', default=fields.Datetime.now) @api.multi def get_attendee_detail(self, meeting_id): """ Return a list of tuple (id, name, status) Used by base_calendar.js : Many2ManyAttendee """ datas = [] meeting = None if meeting_id: meeting = self.env['calendar.event'].browse( get_real_ids(meeting_id)) for partner in self: data = partner.name_get()[0] data = [data[0], data[1], False, partner.color] if meeting: for attendee in meeting.attendee_ids: if attendee.partner_id.id == partner.id: data[2] = attendee.state datas.append(data) return datas @api.model def _set_calendar_last_notif_ack(self): partner = self.env['res.users'].browse( self.env.context.get('uid', self.env.uid)).partner_id partner.write({'calendar_last_notif_ack': datetime.now()}) return
class Attendee(models.Model): _inherit = 'calendar.attendee' google_internal_event_id = fields.Char('Google Calendar Event Id') oe_synchro_date = fields.Datetime('Swerp Synchro Date') _sql_constraints = [ ('google_id_uniq', 'unique(google_internal_event_id,partner_id,event_id)', 'Google ID should be unique!') ] @api.multi def write(self, values): for attendee in self: meeting_id_to_update = values.get('event_id', attendee.event_id.id) # If attendees are updated, we need to specify that next synchro need an action # Except if it come from an update_from_google if not self._context.get('curr_attendee', False) and not self._context.get( 'NewMeeting', False): self.env['calendar.event'].browse(meeting_id_to_update).write( {'oe_update_date': fields.Datetime.now()}) return super(Attendee, self).write(values)
class LeadTest(models.Model): _name = "base.automation.lead.test" _description = "Automated Rule Test" name = fields.Char(string='Subject', required=True, index=True) user_id = fields.Many2one('res.users', string='Responsible') state = fields.Selection([('draft', 'New'), ('cancel', 'Cancelled'), ('open', 'In Progress'), ('pending', 'Pending'), ('done', 'Closed')], string="Status", readonly=True, default='draft') active = fields.Boolean(default=True) partner_id = fields.Many2one('res.partner', string='Partner') date_action_last = fields.Datetime(string='Last Action', readonly=True) customer = fields.Boolean(related='partner_id.customer', readonly=True, store=True) line_ids = fields.One2many('base.automation.line.test', 'lead_id') priority = fields.Boolean() deadline = fields.Boolean(compute='_compute_deadline', store=True) is_assigned_to_admin = fields.Boolean(string='Assigned to admin user') @api.depends('priority') def _compute_deadline(self): for record in self: if not record.priority: record.deadline = False else: record.deadline = fields.Datetime.from_string(record.create_date) + relativedelta.relativedelta(days=3)
class ConverterTest(models.Model): _name = 'web_editor.converter.test' _description = 'Web Editor Converter Test' # disable translation export for those brilliant field labels and values _translate = False char = fields.Char() integer = fields.Integer() float = fields.Float() numeric = fields.Float(digits=(16, 2)) many2one = fields.Many2one('web_editor.converter.test.sub') binary = fields.Binary() date = fields.Date() datetime = fields.Datetime() selection = fields.Selection([ (1, "réponse A"), (2, "réponse B"), (3, "réponse C"), (4, "réponse <D>"), ]) selection_str = fields.Selection([ ('A', "Qu'il n'est pas arrivé à Toronto"), ('B', "Qu'il était supposé arriver à Toronto"), ('C', "Qu'est-ce qu'il fout ce maudit pancake, tabernacle ?"), ('D', "La réponse D"), ], string=u"Lorsqu'un pancake prend l'avion à destination de Toronto et " u"qu'il fait une escale technique à St Claude, on dit:") html = fields.Html() text = fields.Text()
class IrLogging(models.Model): _name = 'ir.logging' _description = 'Logging' _order = 'id DESC' # The _log_access fields are defined manually for the following reasons: # # - The entries in ir_logging are filled in with sql queries bypassing the orm. As the --log-db # cli option allows to insert ir_logging entries into a remote database, the one2many *_uid # fields make no sense in the first place but we will keep it for backward compatibility. # # - Also, when an ir_logging entry is triggered by the orm (when using --log-db) at the moment # it is making changes to the res.users model, the ALTER TABLE will aquire an exclusive lock # on res_users, preventing the ir_logging INSERT to be processed, hence the ongoing module # install/update will hang forever as the orm is blocked by the ir_logging query that will # never occur. create_uid = fields.Integer(string='Created by', readonly=True) create_date = fields.Datetime(string='Created on', readonly=True) write_uid = fields.Integer(string='Last Updated by', readonly=True) write_date = fields.Datetime(string='Last Updated on', readonly=True) name = fields.Char(required=True) type = fields.Selection([('client', 'Client'), ('server', 'Server')], required=True, index=True) dbname = fields.Char(string='Database Name', index=True) level = fields.Char(index=True) message = fields.Text(required=True) path = fields.Char(required=True) func = fields.Char(string='Function', required=True) line = fields.Char(required=True) @api.model_cr def init(self): super(IrLogging, self).init() self._cr.execute( "select 1 from information_schema.constraint_column_usage where table_name = 'ir_logging' and constraint_name = 'ir_logging_write_uid_fkey'" ) if self._cr.rowcount: # DROP CONSTRAINT unconditionally takes an ACCESS EXCLUSIVE lock # on the table, even "IF EXISTS" is set and not matching; disabling # the relevant trigger instead acquires SHARE ROW EXCLUSIVE, which # still conflicts with the ROW EXCLUSIVE needed for an insert self._cr.execute( "ALTER TABLE ir_logging DROP CONSTRAINT ir_logging_write_uid_fkey" )
class ImLivechatReportOperator(models.Model): """ Livechat Support Report on the Operator """ _name = "im_livechat.report.operator" _description = "Livechat Support Operator Report" _order = 'livechat_channel_id, partner_id' _auto = False partner_id = fields.Many2one('res.partner', 'Operator', readonly=True) livechat_channel_id = fields.Many2one('im_livechat.channel', 'Channel', readonly=True) nbr_channel = fields.Integer('# of Sessions', readonly=True, group_operator="sum", help="Number of conversation") channel_id = fields.Many2one('mail.channel', 'Conversation', readonly=True) start_date = fields.Datetime('Start Date of session', readonly=True, help="Start date of the conversation") time_to_answer = fields.Float( 'Time to answer', digits=(16, 2), readonly=True, group_operator="avg", help="Average time to give the first answer to the visitor") duration = fields.Float('Average duration', digits=(16, 2), readonly=True, group_operator="avg", help="Duration of the conversation (in seconds)") @api.model_cr def init(self): # Note : start_date_hour must be remove when the read_group will allow grouping on the hour of a datetime. Don't forget to change the view ! tools.drop_view_if_exists(self.env.cr, 'im_livechat_report_operator') self.env.cr.execute(""" CREATE OR REPLACE VIEW im_livechat_report_operator AS ( SELECT row_number() OVER () AS id, P.id as partner_id, L.id as livechat_channel_id, count(C.id) as nbr_channel, C.id as channel_id, C.create_date as start_date, EXTRACT('epoch' FROM (max((SELECT (max(M.create_date)) FROM mail_message M JOIN mail_message_mail_channel_rel R ON (R.mail_message_id = M.id) WHERE R.mail_channel_id = C.id))-C.create_date)) as duration, EXTRACT('epoch' from ((SELECT min(M.create_date) FROM mail_message M, mail_message_mail_channel_rel R WHERE M.author_id=P.id AND R.mail_channel_id = C.id AND R.mail_message_id = M.id)-(SELECT min(M.create_date) FROM mail_message M, mail_message_mail_channel_rel R WHERE M.author_id IS NULL AND R.mail_channel_id = C.id AND R.mail_message_id = M.id))) as time_to_answer FROM im_livechat_channel_im_user O JOIN res_users U ON (O.user_id = U.id) JOIN res_partner P ON (U.partner_id = P.id) JOIN im_livechat_channel L ON (L.id = O.channel_id) JOIN mail_channel C ON (C.livechat_channel_id = L.id) JOIN mail_channel_partner MCP ON (MCP.partner_id = P.id AND MCP.channel_id = C.id) GROUP BY P.id, L.id, C.id, C.create_date ) """)
class ComplexModel(models.Model): _name = name('complex') _description = 'Tests: Base Import Model Complex' f = fields.Float() m = fields.Monetary() c = fields.Char() currency_id = fields.Many2one('res.currency') d = fields.Date() dt = fields.Datetime()
class StockQuant(models.Model): _inherit = 'stock.quant' removal_date = fields.Datetime(related='lot_id.removal_date', store=True, readonly=False) @api.model def _get_removal_strategy_order(self, removal_strategy): if removal_strategy == 'fefo': return 'removal_date, in_date, id' return super(StockQuant, self)._get_removal_strategy_order(removal_strategy)
class pos_config(models.Model): _inherit = 'pos.config' @api.one @api.depends('cache_ids') def _get_oldest_cache_time(self): pos_cache = self.env['pos.cache'] oldest_cache = pos_cache.search([('config_id', '=', self.id)], order='write_date', limit=1) if oldest_cache: self.oldest_cache_time = oldest_cache.write_date # Use a related model to avoid the load of the cache when the pos load his config cache_ids = fields.One2many('pos.cache', 'config_id') oldest_cache_time = fields.Datetime(compute='_get_oldest_cache_time', string='Oldest cache time', readonly=True) def _get_cache_for_user(self): pos_cache = self.env['pos.cache'] cache_for_user = pos_cache.search([('id', 'in', self.cache_ids.ids), ('compute_user_id', '=', self.env.uid)]) if cache_for_user: return cache_for_user[0] else: return None @api.multi def get_products_from_cache(self, fields, domain): cache_for_user = self._get_cache_for_user() if cache_for_user: return cache_for_user.get_cache(domain, fields) else: pos_cache = self.env['pos.cache'] pos_cache.create({ 'config_id': self.id, 'product_domain': str(domain), 'product_fields': str(fields), 'compute_user_id': self.env.uid }) new_cache = self._get_cache_for_user() return new_cache.get_cache(domain, fields) @api.one def delete_cache(self): # throw away the old caches self.cache_ids.unlink()
class MassMailingReport(models.Model): _name = 'mail.statistics.report' _auto = False _description = 'Mass Mailing Statistics' scheduled_date = fields.Datetime(stirng='Scheduled Date', readonly=True) name = fields.Char(string='Mass Mail', readonly=True) campaign = fields.Char(string='Mass Mail Campaign', readonly=True) sent = fields.Integer(readonly=True) delivered = fields.Integer(readonly=True) opened = fields.Integer(readonly=True) bounced = fields.Integer(readonly=True) replied = fields.Integer(readonly=True) clicked = fields.Integer(readonly=True) state = fields.Selection([('draft', 'Draft'), ('test', 'Tested'), ('done', 'Sent')], string='Status', readonly=True) email_from = fields.Char('From', readonly=True) @api.model_cr def init(self): """Mass Mail Statistical Report: based on mail.mail.statistics that models the various statistics collected for each mailing, and mail.mass_mailing model that models the various mailing performed. """ tools.drop_view_if_exists(self.env.cr, 'mail_statistics_report') self.env.cr.execute(""" CREATE OR REPLACE VIEW mail_statistics_report AS ( SELECT min(ms.id) as id, ms.scheduled as scheduled_date, utm_source.name as name, utm_campaign.name as campaign, count(ms.bounced) as bounced, count(ms.sent) as sent, (count(ms.sent) - count(ms.bounced)) as delivered, count(ms.opened) as opened, count(ms.replied) as replied, count(ms.clicked) as clicked, mm.state, mm.email_from FROM mail_mail_statistics as ms left join mail_mass_mailing as mm ON (ms.mass_mailing_id=mm.id) left join mail_mass_mailing_campaign as mc ON (ms.mass_mailing_campaign_id=mc.id) left join utm_campaign as utm_campaign ON (mc.campaign_id = utm_campaign.id) left join utm_source as utm_source ON (mm.source_id = utm_source.id) GROUP BY ms.scheduled, utm_source.name, utm_campaign.name, mm.state, mm.email_from )""")
class Meeting(models.Model): _inherit = "calendar.event" oe_update_date = fields.Datetime('Swerp Update Date') @api.model def get_fields_need_update_google(self): recurrent_fields = self._get_recurrent_fields() return recurrent_fields + [ 'name', 'description', 'allday', 'start', 'date_end', 'stop', 'attendee_ids', 'alarm_ids', 'location', 'privacy', 'active', 'start_date', 'start_datetime', 'stop_date', 'stop_datetime' ] @api.multi def write(self, values): sync_fields = set(self.get_fields_need_update_google()) if ( set(values) and sync_fields ) and 'oe_update_date' not in values and 'NewMeeting' not in self._context: if 'oe_update_date' in self._context: values['oe_update_date'] = self._context.get('oe_update_date') else: values['oe_update_date'] = fields.Datetime.now() return super(Meeting, self).write(values) @api.multi @api.returns('self', lambda value: value.id) def copy(self, default=None): default = default or {} if default.get('write_type', False): del default['write_type'] elif default.get('recurrent_id', False): default['oe_update_date'] = fields.Datetime.now() else: default['oe_update_date'] = False return super(Meeting, self).copy(default) @api.multi def unlink(self, can_be_deleted=False): return super(Meeting, self).unlink(can_be_deleted=can_be_deleted)
class MassMailingScheduleDate(models.TransientModel): _name = 'mass.mailing.schedule.date' _description = 'Mass Mailing Scheduling' schedule_date = fields.Datetime(string='Schedule in the Future') mass_mailing_id = fields.Many2one('mail.mass_mailing', required=True) @api.constrains('schedule_date') def _check_schedule_date(self): for scheduler in self: if scheduler.schedule_date < fields.Datetime.now(): raise ValidationError( _('Please select a date equal/or greater than the current date.' )) def set_schedule_date(self): self.mass_mailing_id.write({ 'schedule_date': self.schedule_date, 'state': 'in_queue' })
class MailTestFull(models.Model): """ This model can be used in tests when complex chatter features are required like modeling tasks or tickets. """ _description = 'Full Chatter Model' _name = 'mail.test.full' _inherit = ['mail.thread'] name = fields.Char() email_from = fields.Char(track_visibility='always') count = fields.Integer(default=1) datetime = fields.Datetime(default=fields.Datetime.now) mail_template = fields.Many2one('mail.template', 'Template') customer_id = fields.Many2one('res.partner', 'Customer', track_visibility='onchange', track_sequence=2) user_id = fields.Many2one('res.users', 'Responsible', track_visibility='onchange', track_sequence=1) umbrella_id = fields.Many2one('mail.test', track_visibility='onchange') def _track_template(self, tracking): res = super(MailTestFull, self)._track_template(tracking) record = self[0] changes, tracking_value_ids = tracking[record.id] if 'customer_id' in changes and record.mail_template: res['customer_id'] = (record.mail_template, { 'composition_mode': 'mass_mail' }) elif 'datetime' in changes: res['datetime'] = ('test_mail.mail_test_full_tracking_view', { 'composition_mode': 'mass_mail' }) return res def _track_subtype(self, init_values): self.ensure_one() if 'umbrella_id' in init_values and self.umbrella_id: return 'test_mail.st_mail_test_full_umbrella_upd' return super(MailTestFull, self)._track_subtype(init_values)
class EventMailRegistration(models.Model): _name = 'event.mail.registration' _description = 'Registration Mail Scheduler' _rec_name = 'scheduler_id' _order = 'scheduled_date DESC' scheduler_id = fields.Many2one('event.mail', 'Mail Scheduler', required=True, ondelete='cascade') registration_id = fields.Many2one('event.registration', 'Attendee', required=True, ondelete='cascade') scheduled_date = fields.Datetime('Scheduled Time', compute='_compute_scheduled_date', store=True) mail_sent = fields.Boolean('Mail Sent') @api.one def execute(self): if self.registration_id.state in ['open', 'done' ] and not self.mail_sent: self.scheduler_id.template_id.send_mail(self.registration_id.id) self.write({'mail_sent': True}) @api.one @api.depends('registration_id', 'scheduler_id.interval_unit', 'scheduler_id.interval_type') def _compute_scheduled_date(self): if self.registration_id: date_open = self.registration_id.date_open date_open_datetime = date_open or fields.Datetime.now() self.scheduled_date = date_open_datetime + _INTERVALS[ self.scheduler_id.interval_unit]( self.scheduler_id.interval_nbr) else: self.scheduled_date = False
class StockQuantityHistory(models.TransientModel): _name = 'stock.quantity.history' _description = 'Stock Quantity History' compute_at_date = fields.Selection( [(0, 'Current Inventory'), (1, 'At a Specific Date')], string="Compute", help= "Choose to analyze the current inventory or from a specific date in the past." ) date = fields.Datetime( 'Inventory at Date', help="Choose a date to get the inventory at that date", default=fields.Datetime.now) def open_table(self): self.ensure_one() if self.compute_at_date: tree_view_id = self.env.ref('stock.view_stock_product_tree').id form_view_id = self.env.ref( 'stock.product_form_view_procurement_button').id # We pass `to_date` in the context so that `qty_available` will be computed across # moves until date. action = { 'type': 'ir.actions.act_window', 'views': [(tree_view_id, 'tree'), (form_view_id, 'form')], 'view_mode': 'tree,form', 'name': _('Products'), 'res_model': 'product.product', 'domain': "[('type', '=', 'product')]", 'context': dict(self.env.context, to_date=self.date), } return action else: self.env['stock.quant']._merge_quants() self.env['stock.quant']._unlink_zero_quants() return self.env.ref('stock.quantsact').read()[0]
class PurchaseRequisition(models.Model): _name = "purchase.requisition" _description = "Purchase Requisition" _inherit = ['mail.thread'] _order = "id desc" def _get_picking_in(self): pick_in = self.env.ref('stock.picking_type_in', raise_if_not_found=False) company = self.env['res.company']._company_default_get( 'purchase.requisition') if not pick_in or pick_in.sudo( ).warehouse_id.company_id.id != company.id: pick_in = self.env['stock.picking.type'].search( [('warehouse_id.company_id', '=', company.id), ('code', '=', 'incoming')], limit=1, ) return pick_in def _get_type_id(self): return self.env['purchase.requisition.type'].search([], limit=1) name = fields.Char(string='Agreement Reference', required=True, copy=False, default='New', readonly=True) origin = fields.Char(string='Source Document') order_count = fields.Integer(compute='_compute_orders_number', string='Number of Orders') vendor_id = fields.Many2one('res.partner', string="Vendor") type_id = fields.Many2one('purchase.requisition.type', string="Agreement Type", required=True, default=_get_type_id) ordering_date = fields.Date(string="Ordering Date", track_visibility='onchange') date_end = fields.Datetime(string='Agreement Deadline', track_visibility='onchange') schedule_date = fields.Date( string='Delivery Date', index=True, help= "The expected and scheduled delivery date where all the products are received", track_visibility='onchange') user_id = fields.Many2one('res.users', string='Purchase Representative', default=lambda self: self.env.user) description = fields.Text() company_id = fields.Many2one('res.company', string='Company', required=True, default=lambda self: self.env['res.company']. _company_default_get('purchase.requisition')) purchase_ids = fields.One2many('purchase.order', 'requisition_id', string='Purchase Orders', states={'done': [('readonly', True)]}) line_ids = fields.One2many('purchase.requisition.line', 'requisition_id', string='Products to Purchase', states={'done': [('readonly', True)]}, copy=True) warehouse_id = fields.Many2one('stock.warehouse', string='Warehouse') state = fields.Selection(PURCHASE_REQUISITION_STATES, 'Status', track_visibility='onchange', required=True, copy=False, default='draft') state_blanket_order = fields.Selection(PURCHASE_REQUISITION_STATES, compute='_set_state') picking_type_id = fields.Many2one('stock.picking.type', 'Operation Type', required=True, default=_get_picking_in) is_quantity_copy = fields.Selection(related='type_id.quantity_copy', readonly=True) currency_id = fields.Many2one( 'res.currency', 'Currency', required=True, default=lambda self: self.env.user.company_id.currency_id.id) @api.depends('state') def _set_state(self): for requisition in self: requisition.state_blanket_order = requisition.state @api.onchange('vendor_id') def _onchange_vendor(self): if not self.vendor_id: self.currency_id = self.env.user.company_id.currency_id.id else: self.currency_id = self.vendor_id.property_purchase_currency_id.id or self.env.user.company_id.currency_id.id requisitions = self.env['purchase.requisition'].search([ ('vendor_id', '=', self.vendor_id.id), ('state', '=', 'ongoing'), ('type_id.quantity_copy', '=', 'none'), ]) if any(requisitions): title = _("Warning for %s") % self.vendor_id.name message = _( "There is already an open blanket order for this supplier. We suggest you to use to complete this open blanket order instead of creating a new one." ) warning = {'title': title, 'message': message} return {'warning': warning} @api.multi @api.depends('purchase_ids') def _compute_orders_number(self): for requisition in self: requisition.order_count = len(requisition.purchase_ids) @api.multi def action_cancel(self): # try to set all associated quotations to cancel state for requisition in self: for requisition_line in requisition.line_ids: requisition_line.supplier_info_ids.unlink() requisition.purchase_ids.button_cancel() for po in requisition.purchase_ids: po.message_post(body=_( 'Cancelled by the agreement associated to this quotation.') ) self.write({'state': 'cancel'}) @api.multi def action_in_progress(self): self.ensure_one() if not all(obj.line_ids for obj in self): raise UserError( _("You cannot confirm agreement '%s' because there is no product line." ) % self.name) if self.type_id.quantity_copy == 'none' and self.vendor_id: for requisition_line in self.line_ids: if requisition_line.price_unit <= 0.0: raise UserError( _('You cannot confirm the blanket order without price.' )) if requisition_line.product_qty <= 0.0: raise UserError( _('You cannot confirm the blanket order without quantity.' )) requisition_line.create_supplier_info() self.write({'state': 'ongoing'}) else: self.write({'state': 'in_progress'}) # Set the sequence number regarding the requisition type if self.name == 'New': if self.is_quantity_copy != 'none': self.name = self.env['ir.sequence'].next_by_code( 'purchase.requisition.purchase.tender') else: self.name = self.env['ir.sequence'].next_by_code( 'purchase.requisition.blanket.order') @api.multi def action_open(self): self.write({'state': 'open'}) def action_draft(self): self.ensure_one() self.name = 'New' self.write({'state': 'draft'}) @api.multi def action_done(self): """ Generate all purchase order based on selected lines, should only be called on one agreement at a time """ if any(purchase_order.state in ['draft', 'sent', 'to approve'] for purchase_order in self.mapped('purchase_ids')): raise UserError( _('You have to cancel or validate every RfQ before closing the purchase requisition.' )) for requisition in self: for requisition_line in requisition.line_ids: requisition_line.supplier_info_ids.unlink() self.write({'state': 'done'}) def _prepare_tender_values(self, product_id, product_qty, product_uom, location_id, name, origin, values): return { 'origin': origin, 'date_end': values['date_planned'], 'warehouse_id': values.get('warehouse_id') and values['warehouse_id'].id or False, 'company_id': values['company_id'].id, 'line_ids': [(0, 0, { 'product_id': product_id.id, 'product_uom_id': product_uom.id, 'product_qty': product_qty, 'move_dest_id': values.get('move_dest_ids') and values['move_dest_ids'][0].id or False, })], } def unlink(self): if any(requisition.state not in ('draft', 'cancel') for requisition in self): raise UserError(_('You can only delete draft requisitions.')) # Draft requisitions could have some requisition lines. self.mapped('line_ids').unlink() return super(PurchaseRequisition, self).unlink()
class AccountClosing(models.Model): """ This object holds an interval total and a grand total of the accounts of type receivable for a company, as well as the last account_move that has been counted in a previous object It takes its earliest brother to infer from when the computation needs to be done in order to compute its own data. """ _name = 'account.sale.closing' _order = 'date_closing_stop desc, sequence_number desc' _description = "Sale Closing" name = fields.Char(help="Frequency and unique sequence number", required=True) company_id = fields.Many2one('res.company', string='Company', readonly=True, required=True) date_closing_stop = fields.Datetime( string="Closing Date", help='Date to which the values are computed', readonly=True, required=True) date_closing_start = fields.Datetime( string="Starting Date", help='Date from which the total interval is computed', readonly=True, required=True) frequency = fields.Selection(string='Closing Type', selection=[('daily', 'Daily'), ('monthly', 'Monthly'), ('annually', 'Annual')], readonly=True, required=True) total_interval = fields.Monetary( string="Period Total", help= 'Total in receivable accounts during the interval, excluding overlapping periods', readonly=True, required=True) cumulative_total = fields.Monetary( string="Cumulative Grand Total", help='Total in receivable accounts since the beginnig of times', readonly=True, required=True) sequence_number = fields.Integer('Sequence #', readonly=True, required=True) last_move_id = fields.Many2one( 'account.move', string='Last journal entry', help='Last Journal entry included in the grand total', readonly=True) last_move_hash = fields.Char( string='Last journal entry\'s inalteralbility hash', readonly=True) currency_id = fields.Many2one('res.currency', string='Currency', help="The company's currency", readonly=True, related='company_id.currency_id', store=True) def _query_for_aml(self, company, first_move_sequence_number, date_start): params = {'company_id': company.id} query = '''WITH aggregate AS (SELECT m.id AS move_id, aml.balance AS balance, aml.id as line_id FROM account_move_line aml JOIN account_journal j ON aml.journal_id = j.id JOIN account_account acc ON acc.id = aml.account_id JOIN account_account_type t ON (t.id = acc.user_type_id AND t.type = 'receivable') JOIN account_move m ON m.id = aml.move_id WHERE j.type = 'sale' AND aml.company_id = %(company_id)s AND m.state = 'posted' ''' if first_move_sequence_number is not False and first_move_sequence_number is not None: params['first_move_sequence_number'] = first_move_sequence_number query += '''AND m.l10n_fr_secure_sequence_number > %(first_move_sequence_number)s''' elif date_start: #the first time we compute the closing, we consider only from the installation of the module params['date_start'] = date_start query += '''AND m.date >= %(date_start)s''' query += " ORDER BY m.l10n_fr_secure_sequence_number DESC) " query += '''SELECT array_agg(move_id) AS move_ids, array_agg(line_id) AS line_ids, sum(balance) AS balance FROM aggregate''' self.env.cr.execute(query, params) return self.env.cr.dictfetchall()[0] def _compute_amounts(self, frequency, company): """ Method used to compute all the business data of the new object. It will search for previous closings of the same frequency to infer the move from which account move lines should be fetched. @param {string} frequency: a valid value of the selection field on the object (daily, monthly, annually) frequencies are literal (daily means 24 hours and so on) @param {recordset} company: the company for which the closing is done @return {dict} containing {field: value} for each business field of the object """ interval_dates = self._interval_dates(frequency, company) previous_closing = self.search([('frequency', '=', frequency), ('company_id', '=', company.id)], limit=1, order='sequence_number desc') first_move = self.env['account.move'] date_start = interval_dates['interval_from'] cumulative_total = 0 if previous_closing: first_move = previous_closing.last_move_id date_start = previous_closing.create_date cumulative_total += previous_closing.cumulative_total aml_aggregate = self._query_for_aml( company, first_move.l10n_fr_secure_sequence_number, date_start) total_interval = aml_aggregate['balance'] or 0 cumulative_total += total_interval # We keep the reference to avoid gaps (like daily object during the weekend) last_move = first_move if aml_aggregate['move_ids']: last_move = last_move.browse(aml_aggregate['move_ids'][0]) return { 'total_interval': total_interval, 'cumulative_total': cumulative_total, 'last_move_id': last_move.id, 'last_move_hash': last_move.l10n_fr_hash, 'date_closing_stop': interval_dates['date_stop'], 'date_closing_start': date_start, 'name': interval_dates['name_interval'] + ' - ' + interval_dates['date_stop'][:10] } def _interval_dates(self, frequency, company): """ Method used to compute the theoretical date from which account move lines should be fetched @param {string} frequency: a valid value of the selection field on the object (daily, monthly, annually) frequencies are literal (daily means 24 hours and so on) @param {recordset} company: the company for which the closing is done @return {dict} the theoretical date from which account move lines are fetched. date_stop date to which the move lines are fetched, always now() the dates are in their Swerp Database string representation """ date_stop = datetime.utcnow() interval_from = None name_interval = '' if frequency == 'daily': interval_from = date_stop - timedelta(days=1) name_interval = _('Daily Closing') elif frequency == 'monthly': month_target = date_stop.month > 1 and date_stop.month - 1 or 12 year_target = month_target < 12 and date_stop.year or date_stop.year - 1 interval_from = date_stop.replace(year=year_target, month=month_target) name_interval = _('Monthly Closing') elif frequency == 'annually': year_target = date_stop.year - 1 interval_from = date_stop.replace(year=year_target) name_interval = _('Annual Closing') return { 'interval_from': FieldDateTime.to_string(interval_from), 'date_stop': FieldDateTime.to_string(date_stop), 'name_interval': name_interval } @api.multi def write(self, vals): raise UserError( _('Sale Closings are not meant to be written or deleted under any circumstances.' )) @api.multi def unlink(self): raise UserError( _('Sale Closings are not meant to be written or deleted under any circumstances.' )) @api.model def _automated_closing(self, frequency='daily'): """To be executed by the CRON to create an object of the given frequency for each company that needs it @param {string} frequency: a valid value of the selection field on the object (daily, monthly, annually) frequencies are literal (daily means 24 hours and so on) @return {recordset} all the objects created for the given frequency """ res_company = self.env['res.company'].search([]) account_closings = self.env['account.sale.closing'] for company in res_company.filtered( lambda c: c._is_accounting_unalterable()): new_sequence_number = company.l10n_fr_closing_sequence_id.next_by_id( ) values = self._compute_amounts(frequency, company) values['frequency'] = frequency values['company_id'] = company.id values['sequence_number'] = new_sequence_number account_closings |= account_closings.create(values) return account_closings
class MailTracking(models.Model): _name = 'mail.tracking.value' _description = 'Mail Tracking Value' _rec_name = 'field' _order = 'track_sequence asc' # TDE CLEANME: why not a m2o to ir model field ? field = fields.Char('Changed Field', required=True, readonly=1) field_desc = fields.Char('Field Description', required=True, readonly=1) field_type = fields.Char('Field Type') old_value_integer = fields.Integer('Old Value Integer', readonly=1) old_value_float = fields.Float('Old Value Float', readonly=1) old_value_monetary = fields.Float('Old Value Monetary', readonly=1) old_value_char = fields.Char('Old Value Char', readonly=1) old_value_text = fields.Text('Old Value Text', readonly=1) old_value_datetime = fields.Datetime('Old Value DateTime', readonly=1) new_value_integer = fields.Integer('New Value Integer', readonly=1) new_value_float = fields.Float('New Value Float', readonly=1) new_value_monetary = fields.Float('New Value Monetary', readonly=1) new_value_char = fields.Char('New Value Char', readonly=1) new_value_text = fields.Text('New Value Text', readonly=1) new_value_datetime = fields.Datetime('New Value Datetime', readonly=1) mail_message_id = fields.Many2one('mail.message', 'Message ID', required=True, index=True, ondelete='cascade') track_sequence = fields.Integer('Tracking field sequence', readonly=1, default=100) @api.model def create_tracking_values(self, initial_value, new_value, col_name, col_info, track_sequence): tracked = True values = { 'field': col_name, 'field_desc': col_info['string'], 'field_type': col_info['type'], 'track_sequence': track_sequence } if col_info['type'] in [ 'integer', 'float', 'char', 'text', 'datetime', 'monetary' ]: values.update({ 'old_value_%s' % col_info['type']: initial_value, 'new_value_%s' % col_info['type']: new_value }) elif col_info['type'] == 'date': values.update({ 'old_value_datetime': initial_value and fields.Datetime.to_string( datetime.combine(fields.Date.from_string(initial_value), datetime.min.time())) or False, 'new_value_datetime': new_value and fields.Datetime.to_string( datetime.combine(fields.Date.from_string(new_value), datetime.min.time())) or False, }) elif col_info['type'] == 'boolean': values.update({ 'old_value_integer': initial_value, 'new_value_integer': new_value }) elif col_info['type'] == 'selection': values.update({ 'old_value_char': initial_value and dict(col_info['selection'])[initial_value] or '', 'new_value_char': new_value and dict(col_info['selection'])[new_value] or '' }) elif col_info['type'] == 'many2one': values.update({ 'old_value_integer': initial_value and initial_value.id or 0, 'new_value_integer': new_value and new_value.id or 0, 'old_value_char': initial_value and initial_value.sudo().name_get()[0][1] or '', 'new_value_char': new_value and new_value.sudo().name_get()[0][1] or '' }) else: tracked = False if tracked: return values return {} @api.multi def get_display_value(self, type): assert type in ('new', 'old') result = [] for record in self: if record.field_type in [ 'integer', 'float', 'char', 'text', 'monetary' ]: result.append( getattr(record, '%s_value_%s' % (type, record.field_type))) elif record.field_type == 'datetime': if record['%s_value_datetime' % type]: new_datetime = getattr(record, '%s_value_datetime' % type) result.append('%sZ' % new_datetime) else: result.append(record['%s_value_datetime' % type]) elif record.field_type == 'date': if record['%s_value_datetime' % type]: new_date = record['%s_value_datetime' % type] result.append(fields.Date.to_string(new_date)) else: result.append(record['%s_value_datetime' % type]) elif record.field_type == 'boolean': result.append(bool(record['%s_value_integer' % type])) else: result.append(record['%s_value_char' % type]) return result @api.multi def get_old_display_value(self): # grep : # old_value_integer | old_value_datetime | old_value_char return self.get_display_value('old') @api.multi def get_new_display_value(self): # grep : # new_value_integer | new_value_datetime | new_value_char return self.get_display_value('new')
class StockProductionLot(models.Model): _inherit = 'stock.production.lot' life_date = fields.Datetime( string='End of Life Date', help= 'This is the date on which the goods with this Serial Number may become dangerous and must not be consumed.' ) use_date = fields.Datetime( string='Best before Date', help= 'This is the date on which the goods with this Serial Number start deteriorating, without being dangerous yet.' ) removal_date = fields.Datetime( string='Removal Date', help= 'This is the date on which the goods with this Serial Number should be removed from the stock.' ) alert_date = fields.Datetime( string='Alert Date', help= 'Date to determine the expired lots and serial numbers using the filter "Expiration Alerts".' ) product_expiry_alert = fields.Boolean( compute='_compute_product_expiry_alert', help="The Alert Date has been reached.") @api.depends('alert_date') def _compute_product_expiry_alert(self): current_date = fields.Datetime.now() for lot in self.filtered(lambda l: l.alert_date): lot.product_expiry_alert = lot.alert_date <= current_date def _get_dates(self, product_id=None): """Returns dates based on number of days configured in current lot's product.""" mapped_fields = { 'life_date': 'life_time', 'use_date': 'use_time', 'removal_date': 'removal_time', 'alert_date': 'alert_time' } res = dict.fromkeys(mapped_fields, False) product = self.env['product.product'].browse( product_id) or self.product_id if product: for field in mapped_fields: duration = getattr(product, mapped_fields[field]) if duration: date = datetime.datetime.now() + datetime.timedelta( days=duration) res[field] = fields.Datetime.to_string(date) return res # Assign dates according to products data @api.model def create(self, vals): dates = self._get_dates( vals.get('product_id') or self.env.context.get('default_product_id')) for d in dates: if not vals.get(d): vals[d] = dates[d] return super(StockProductionLot, self).create(vals) @api.onchange('product_id') def _onchange_product(self): dates_dict = self._get_dates() for field, value in dates_dict.items(): setattr(self, field, value) @api.model def _alert_date_exceeded(self): # if the alert_date is in the past and the lot is linked to an internal quant # log a next activity on the next.production.lot alert_lot_ids = self.env['stock.production.lot'].search([ ('alert_date', '<=', fields.Date.today()) ]) mail_activity_type = self.env.ref( 'product_expiry.mail_activity_type_alert_date_reached').id stock_quants = self.env['stock.quant'].search([ ('lot_id', 'in', alert_lot_ids.ids), ('quantity', '>', 0) ]).filtered(lambda quant: quant.location_id.usage == 'internal') lots = stock_quants.mapped('lot_id') # only for lots that do not have already an activity # or that do not have a done alert activity, i.e. a mail.message lots = lots.filtered( lambda lot: not self.env['mail.activity'].search_count([ ('res_model', '=', 'stock.production.lot'), ('res_id', '=', lot.id), ('activity_type_id', '=', mail_activity_type) ]) and not self.env['mail.message'].search_count( [('model', '=', 'stock.production.lot'), ('res_id', '=', lot.id), ('subtype_id', '=', self.env.ref('mail.mt_activities').id), ('mail_activity_type_id', '=', mail_activity_type)])) for lot in lots: lot.activity_schedule( 'product_expiry.mail_activity_type_alert_date_reached', user_id=lot.product_id.responsible_id.id, note=_( "The alert date has been reached for this lot/serial number" ))
class StockScrap(models.Model): _name = 'stock.scrap' _order = 'id desc' _description = 'Scrap' def _get_default_scrap_location_id(self): return self.env['stock.location'].search( [('scrap_location', '=', True), ('company_id', 'in', [self.env.user.company_id.id, False])], limit=1).id def _get_default_location_id(self): company_user = self.env.user.company_id warehouse = self.env['stock.warehouse'].search( [('company_id', '=', company_user.id)], limit=1) if warehouse: return warehouse.lot_stock_id.id return None name = fields.Char('Reference', default=lambda self: _('New'), copy=False, readonly=True, required=True, states={'done': [('readonly', True)]}) origin = fields.Char(string='Source Document') product_id = fields.Many2one('product.product', 'Product', domain=[('type', 'in', ['product', 'consu'])], required=True, states={'done': [('readonly', True)]}) product_uom_id = fields.Many2one('uom.uom', 'Unit of Measure', required=True, states={'done': [('readonly', True)]}) tracking = fields.Selection('Product Tracking', readonly=True, related="product_id.tracking") lot_id = fields.Many2one('stock.production.lot', 'Lot', states={'done': [('readonly', True)]}, domain="[('product_id', '=', product_id)]") package_id = fields.Many2one('stock.quant.package', 'Package', states={'done': [('readonly', True)]}) owner_id = fields.Many2one('res.partner', 'Owner', states={'done': [('readonly', True)]}) move_id = fields.Many2one('stock.move', 'Scrap Move', readonly=True) picking_id = fields.Many2one('stock.picking', 'Picking', states={'done': [('readonly', True)]}) location_id = fields.Many2one('stock.location', 'Location', domain="[('usage', '=', 'internal')]", required=True, states={'done': [('readonly', True)]}, default=_get_default_location_id) scrap_location_id = fields.Many2one( 'stock.location', 'Scrap Location', default=_get_default_scrap_location_id, domain="[('scrap_location', '=', True)]", required=True, states={'done': [('readonly', True)]}) scrap_qty = fields.Float('Quantity', default=1.0, required=True, states={'done': [('readonly', True)]}) state = fields.Selection([('draft', 'Draft'), ('done', 'Done')], string='Status', default="draft") date_expected = fields.Datetime('Expected Date', default=fields.Datetime.now) @api.onchange('picking_id') def _onchange_picking_id(self): if self.picking_id: self.location_id = ( self.picking_id.state == 'done' ) and self.picking_id.location_dest_id.id or self.picking_id.location_id.id @api.onchange('product_id') def onchange_product_id(self): if self.product_id: self.product_uom_id = self.product_id.uom_id.id @api.model def create(self, vals): if 'name' not in vals or vals['name'] == _('New'): vals['name'] = self.env['ir.sequence'].next_by_code( 'stock.scrap') or _('New') scrap = super(StockScrap, self).create(vals) return scrap def unlink(self): if 'done' in self.mapped('state'): raise UserError(_('You cannot delete a scrap which is done.')) return super(StockScrap, self).unlink() def _get_origin_moves(self): return self.picking_id and self.picking_id.move_lines.filtered( lambda x: x.product_id == self.product_id) def _prepare_move_values(self): self.ensure_one() return { 'name': self.name, 'origin': self.origin or self.picking_id.name or self.name, 'product_id': self.product_id.id, 'product_uom': self.product_uom_id.id, 'product_uom_qty': self.scrap_qty, 'location_id': self.location_id.id, 'scrapped': True, 'location_dest_id': self.scrap_location_id.id, 'move_line_ids': [(0, 0, { 'product_id': self.product_id.id, 'product_uom_id': self.product_uom_id.id, 'qty_done': self.scrap_qty, 'location_id': self.location_id.id, 'location_dest_id': self.scrap_location_id.id, 'package_id': self.package_id.id, 'owner_id': self.owner_id.id, 'lot_id': self.lot_id.id, })], # 'restrict_partner_id': self.owner_id.id, 'picking_id': self.picking_id.id } @api.multi def do_scrap(self): for scrap in self: move = self.env['stock.move'].create(scrap._prepare_move_values()) # master: replace context by cancel_backorder move.with_context(is_scrap=True)._action_done() scrap.write({'move_id': move.id, 'state': 'done'}) return True def action_get_stock_picking(self): action = self.env.ref('stock.action_picking_tree_all').read([])[0] action['domain'] = [('id', '=', self.picking_id.id)] return action def action_get_stock_move_lines(self): action = self.env.ref('stock.stock_move_line_action').read([])[0] action['domain'] = [('move_id', '=', self.move_id.id)] return action def action_validate(self): self.ensure_one() if self.product_id.type != 'product': return self.do_scrap() precision = self.env['decimal.precision'].precision_get( 'Product Unit of Measure') available_qty = sum(self.env['stock.quant']._gather( self.product_id, self.location_id, self.lot_id, self.package_id, self.owner_id, strict=True).mapped('quantity')) scrap_qty = self.product_uom_id._compute_quantity( self.scrap_qty, self.product_id.uom_id) if float_compare(available_qty, scrap_qty, precision_digits=precision) >= 0: return self.do_scrap() else: return { 'name': _('Insufficient Quantity'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'stock.warn.insufficient.qty.scrap', 'view_id': self.env.ref( 'stock.stock_warn_insufficient_qty_scrap_form_view').id, 'type': 'ir.actions.act_window', 'context': { 'default_product_id': self.product_id.id, 'default_location_id': self.location_id.id, 'default_scrap_id': self.id }, 'target': 'new' }
class StockMoveLine(models.Model): _name = "stock.move.line" _description = "Product Moves (Stock Move Line)" _rec_name = "product_id" _order = "result_package_id desc, id" picking_id = fields.Many2one( 'stock.picking', 'Stock Picking', auto_join=True, help='The stock operation where the packing has been made') move_id = fields.Many2one('stock.move', 'Stock Move', help="Change to a better name", index=True) product_id = fields.Many2one('product.product', 'Product', ondelete="cascade") product_uom_id = fields.Many2one('uom.uom', 'Unit of Measure', required=True) product_qty = fields.Float('Real Reserved Quantity', digits=0, compute='_compute_product_qty', inverse='_set_product_qty', store=True) product_uom_qty = fields.Float( 'Reserved', default=0.0, digits=dp.get_precision('Product Unit of Measure'), required=True) qty_done = fields.Float('Done', default=0.0, digits=dp.get_precision('Product Unit of Measure'), copy=False) package_id = fields.Many2one('stock.quant.package', 'Source Package', ondelete='restrict') package_level_id = fields.Many2one('stock.package_level', 'Package Level') lot_id = fields.Many2one('stock.production.lot', 'Lot/Serial Number') lot_name = fields.Char('Lot/Serial Number Name') result_package_id = fields.Many2one( 'stock.quant.package', 'Destination Package', ondelete='restrict', required=False, help="If set, the operations are packed into this package") date = fields.Datetime('Date', default=fields.Datetime.now, required=True) owner_id = fields.Many2one('res.partner', 'Owner', help="Owner of the quants") location_id = fields.Many2one('stock.location', 'From', required=True) location_dest_id = fields.Many2one('stock.location', 'To', required=True) lots_visible = fields.Boolean(compute='_compute_lots_visible') picking_type_use_create_lots = fields.Boolean( related='picking_id.picking_type_id.use_create_lots', readonly=True) picking_type_use_existing_lots = fields.Boolean( related='picking_id.picking_type_id.use_existing_lots', readonly=True) state = fields.Selection(related='move_id.state', store=True, related_sudo=False, readonly=False) is_initial_demand_editable = fields.Boolean( related='move_id.is_initial_demand_editable', readonly=False) is_locked = fields.Boolean(related='move_id.is_locked', default=True, readonly=True) consume_line_ids = fields.Many2many( 'stock.move.line', 'stock_move_line_consume_rel', 'consume_line_id', 'produce_line_id', help="Technical link to see who consumed what. ") produce_line_ids = fields.Many2many( 'stock.move.line', 'stock_move_line_consume_rel', 'produce_line_id', 'consume_line_id', help="Technical link to see which line was produced with this. ") reference = fields.Char(related='move_id.reference', store=True, related_sudo=False, readonly=False) tracking = fields.Selection(related='product_id.tracking', readonly=True) picking_type_entire_packs = fields.Boolean( related='picking_id.picking_type_id.show_entire_packs', readonly=True) @api.one @api.depends('picking_id.picking_type_id', 'product_id.tracking') def _compute_lots_visible(self): picking = self.picking_id if picking.picking_type_id and self.product_id.tracking != 'none': # TDE FIXME: not sure correctly migrated self.lots_visible = picking.picking_type_id.use_existing_lots or picking.picking_type_id.use_create_lots else: self.lots_visible = self.product_id.tracking != 'none' @api.one @api.depends('product_id', 'product_uom_id', 'product_uom_qty') def _compute_product_qty(self): self.product_qty = self.product_uom_id._compute_quantity( self.product_uom_qty, self.product_id.uom_id, rounding_method='HALF-UP') @api.constrains('lot_id', 'product_id') def _check_lot_product(self): for line in self: if line.lot_id and line.product_id != line.lot_id.sudo( ).product_id: raise ValidationError( _('This lot %s is incompatible with this product %s' % (line.lot_id.name, line.product_id.display_name))) @api.one def _set_product_qty(self): """ The meaning of product_qty field changed lately and is now a functional field computing the quantity in the default product UoM. This code has been added to raise an error if a write is made given a value for `product_qty`, where the same write should set the `product_uom_qty` field instead, in order to detect errors. """ raise UserError( _('The requested operation cannot be processed because of a programming error setting the `product_qty` field instead of the `product_uom_qty`.' )) @api.constrains('product_uom_qty') def check_reserved_done_quantity(self): for move_line in self: if move_line.state == 'done' and not float_is_zero( move_line.product_uom_qty, precision_digits=self.env['decimal.precision']. precision_get('Product Unit of Measure')): raise ValidationError( _('A done move line should never have a reserved quantity.' )) @api.onchange('product_id', 'product_uom_id') def onchange_product_id(self): if self.product_id: self.lots_visible = self.product_id.tracking != 'none' if not self.product_uom_id or self.product_uom_id.category_id != self.product_id.uom_id.category_id: if self.move_id.product_uom: self.product_uom_id = self.move_id.product_uom.id else: self.product_uom_id = self.product_id.uom_id.id res = { 'domain': { 'product_uom_id': [('category_id', '=', self.product_uom_id.category_id.id)] } } else: res = {'domain': {'product_uom_id': []}} return res @api.onchange('lot_name', 'lot_id') def onchange_serial_number(self): """ When the user is encoding a move line for a tracked product, we apply some logic to help him. This includes: - automatically switch `qty_done` to 1.0 - warn if he has already encoded `lot_name` in another move line """ res = {} if self.product_id.tracking == 'serial': if not self.qty_done: self.qty_done = 1 message = None if self.lot_name or self.lot_id: move_lines_to_check = self._get_similar_move_lines() - self if self.lot_name: counter = Counter( [line.lot_name for line in move_lines_to_check]) if counter.get( self.lot_name) and counter[self.lot_name] > 1: message = _( 'You cannot use the same serial number twice. Please correct the serial numbers encoded.' ) elif self.lot_id: counter = Counter( [line.lot_id.id for line in move_lines_to_check]) if counter.get( self.lot_id.id) and counter[self.lot_id.id] > 1: message = _( 'You cannot use the same serial number twice. Please correct the serial numbers encoded.' ) if message: res['warning'] = {'title': _('Warning'), 'message': message} return res @api.onchange('qty_done') def _onchange_qty_done(self): """ When the user is encoding a move line for a tracked product, we apply some logic to help him. This onchange will warn him if he set `qty_done` to a non-supported value. """ res = {} if self.qty_done and self.product_id.tracking == 'serial': if float_compare( self.qty_done, 1.0, precision_rounding=self.product_id.uom_id.rounding) != 0: message = _( 'You can only process 1.0 %s of products with unique serial number.' ) % self.product_id.uom_id.name res['warning'] = {'title': _('Warning'), 'message': message} return res @api.constrains('qty_done') def _check_positive_qty_done(self): if any([ml.qty_done < 0 for ml in self]): raise ValidationError(_('You can not enter negative quantities.')) def _get_similar_move_lines(self): self.ensure_one() lines = self.env['stock.move.line'] picking_id = self.move_id.picking_id if self.move_id else self.picking_id if picking_id: lines |= picking_id.move_line_ids.filtered( lambda ml: ml.product_id == self.product_id and (ml.lot_id or ml.lot_name)) return lines @api.model_cr def init(self): if not tools.index_exists(self._cr, 'stock_move_line_free_reservation_index'): self._cr.execute(""" CREATE INDEX stock_move_line_free_reservation_index ON stock_move_line (id, product_id, lot_id, location_id, owner_id, package_id) WHERE (state IS NULL OR state NOT IN ('cancel', 'done')) AND product_qty > 0""" ) @api.model_create_multi def create(self, vals_list): for vals in vals_list: # If the move line is directly create on the picking view. # If this picking is already done we should generate an # associated done move. if 'picking_id' in vals and not vals.get('move_id'): picking = self.env['stock.picking'].browse(vals['picking_id']) if picking.state == 'done': product = self.env['product.product'].browse( vals['product_id']) new_move = self.env['stock.move'].create({ 'name': _('New Move:') + product.display_name, 'product_id': product.id, 'product_uom_qty': 'qty_done' in vals and vals['qty_done'] or 0, 'product_uom': vals['product_uom_id'], 'location_id': 'location_id' in vals and vals['location_id'] or picking.location_id.id, 'location_dest_id': 'location_dest_id' in vals and vals['location_dest_id'] or picking.location_dest_id.id, 'state': 'done', 'additional': True, 'picking_id': picking.id, }) vals['move_id'] = new_move.id mls = super(StockMoveLine, self).create(vals_list) for ml, vals in izip(mls, vals_list): if ml.state == 'done': if 'qty_done' in vals: ml.move_id.product_uom_qty = ml.move_id.quantity_done if ml.product_id.type == 'product': Quant = self.env['stock.quant'] quantity = ml.product_uom_id._compute_quantity( ml.qty_done, ml.move_id.product_id.uom_id, rounding_method='HALF-UP') in_date = None available_qty, in_date = Quant._update_available_quantity( ml.product_id, ml.location_id, -quantity, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id) if available_qty < 0 and ml.lot_id: # see if we can compensate the negative quants with some untracked quants untracked_qty = Quant._get_available_quantity( ml.product_id, ml.location_id, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id, strict=True) if untracked_qty: taken_from_untracked_qty = min( untracked_qty, abs(quantity)) Quant._update_available_quantity( ml.product_id, ml.location_id, -taken_from_untracked_qty, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id) Quant._update_available_quantity( ml.product_id, ml.location_id, taken_from_untracked_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id) Quant._update_available_quantity( ml.product_id, ml.location_dest_id, quantity, lot_id=ml.lot_id, package_id=ml.result_package_id, owner_id=ml.owner_id, in_date=in_date) next_moves = ml.move_id.move_dest_ids.filtered( lambda move: move.state not in ('done', 'cancel')) next_moves._do_unreserve() next_moves._action_assign() return mls def write(self, vals): if self.env.context.get('bypass_reservation_update'): return super(StockMoveLine, self).write(vals) if 'product_id' in vals and any( vals.get('state', ml.state) != 'draft' and vals['product_id'] != ml.product_id.id for ml in self): raise UserError( _("Changing the product is only allowed in 'Draft' state.")) Quant = self.env['stock.quant'] precision = self.env['decimal.precision'].precision_get( 'Product Unit of Measure') triggers = [('location_id', 'stock.location'), ('location_dest_id', 'stock.location'), ('lot_id', 'stock.production.lot'), ('package_id', 'stock.quant.package'), ('result_package_id', 'stock.quant.package'), ('owner_id', 'res.partner')] updates = {} for key, model in triggers: if key in vals: updates[key] = self.env[model].browse(vals[key]) if 'result_package_id' in updates: for ml in self.filtered(lambda ml: ml.package_level_id): if updates.get('result_package_id'): ml.package_level_id.package_id = updates.get( 'result_package_id') else: # TODO: make package levels less of a pain and fix this package_level = ml.package_level_id ml.package_level_id = False package_level.unlink() # When we try to write on a reserved move line any fields from `triggers` or directly # `product_uom_qty` (the actual reserved quantity), we need to make sure the associated # quants are correctly updated in order to not make them out of sync (i.e. the sum of the # move lines `product_uom_qty` should always be equal to the sum of `reserved_quantity` on # the quants). If the new charateristics are not available on the quants, we chose to # reserve the maximum possible. if updates or 'product_uom_qty' in vals: for ml in self.filtered( lambda ml: ml.state in ['partially_available', 'assigned'] and ml.product_id.type == 'product'): if 'product_uom_qty' in vals: new_product_uom_qty = ml.product_uom_id._compute_quantity( vals['product_uom_qty'], ml.product_id.uom_id, rounding_method='HALF-UP') # Make sure `product_uom_qty` is not negative. if float_compare(new_product_uom_qty, 0, precision_rounding=ml.product_id.uom_id. rounding) < 0: raise UserError( _('Reserving a negative quantity is not allowed.')) else: new_product_uom_qty = ml.product_qty # Unreserve the old charateristics of the move line. if not ml.location_id.should_bypass_reservation(): Quant._update_reserved_quantity(ml.product_id, ml.location_id, -ml.product_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id, strict=True) # Reserve the maximum available of the new charateristics of the move line. if not updates.get('location_id', ml.location_id).should_bypass_reservation(): reserved_qty = 0 try: q = Quant._update_reserved_quantity( ml.product_id, updates.get('location_id', ml.location_id), new_product_uom_qty, lot_id=updates.get('lot_id', ml.lot_id), package_id=updates.get('package_id', ml.package_id), owner_id=updates.get('owner_id', ml.owner_id), strict=True) reserved_qty = sum([x[1] for x in q]) except UserError: pass if reserved_qty != new_product_uom_qty: new_product_uom_qty = ml.product_id.uom_id._compute_quantity( reserved_qty, ml.product_uom_id, rounding_method='HALF-UP') ml.with_context(bypass_reservation_update=True ).product_uom_qty = new_product_uom_qty # When editing a done move line, the reserved availability of a potential chained move is impacted. Take care of running again `_action_assign` on the concerned moves. next_moves = self.env['stock.move'] if updates or 'qty_done' in vals: mls = self.filtered(lambda ml: ml.move_id.state == 'done' and ml. product_id.type == 'product') if not updates: # we can skip those where qty_done is already good up to UoM rounding mls = mls.filtered(lambda ml: not float_is_zero( ml.qty_done - vals['qty_done'], precision_rounding=ml.product_uom_id.rounding)) for ml in mls: # undo the original move line qty_done_orig = ml.move_id.product_uom._compute_quantity( ml.qty_done, ml.move_id.product_id.uom_id, rounding_method='HALF-UP') in_date = Quant._update_available_quantity( ml.product_id, ml.location_dest_id, -qty_done_orig, lot_id=ml.lot_id, package_id=ml.result_package_id, owner_id=ml.owner_id)[1] Quant._update_available_quantity(ml.product_id, ml.location_id, qty_done_orig, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id, in_date=in_date) # move what's been actually done product_id = ml.product_id location_id = updates.get('location_id', ml.location_id) location_dest_id = updates.get('location_dest_id', ml.location_dest_id) qty_done = vals.get('qty_done', ml.qty_done) lot_id = updates.get('lot_id', ml.lot_id) package_id = updates.get('package_id', ml.package_id) result_package_id = updates.get('result_package_id', ml.result_package_id) owner_id = updates.get('owner_id', ml.owner_id) quantity = ml.move_id.product_uom._compute_quantity( qty_done, ml.move_id.product_id.uom_id, rounding_method='HALF-UP') if not location_id.should_bypass_reservation(): ml._free_reservation(product_id, location_id, quantity, lot_id=lot_id, package_id=package_id, owner_id=owner_id) if not float_is_zero(quantity, precision_digits=precision): available_qty, in_date = Quant._update_available_quantity( product_id, location_id, -quantity, lot_id=lot_id, package_id=package_id, owner_id=owner_id) if available_qty < 0 and lot_id: # see if we can compensate the negative quants with some untracked quants untracked_qty = Quant._get_available_quantity( product_id, location_id, lot_id=False, package_id=package_id, owner_id=owner_id, strict=True) if untracked_qty: taken_from_untracked_qty = min( untracked_qty, abs(available_qty)) Quant._update_available_quantity( product_id, location_id, -taken_from_untracked_qty, lot_id=False, package_id=package_id, owner_id=owner_id) Quant._update_available_quantity( product_id, location_id, taken_from_untracked_qty, lot_id=lot_id, package_id=package_id, owner_id=owner_id) if not location_id.should_bypass_reservation(): ml._free_reservation(ml.product_id, location_id, untracked_qty, lot_id=False, package_id=package_id, owner_id=owner_id) Quant._update_available_quantity( product_id, location_dest_id, quantity, lot_id=lot_id, package_id=result_package_id, owner_id=owner_id, in_date=in_date) # Unreserve and reserve following move in order to have the real reserved quantity on move_line. next_moves |= ml.move_id.move_dest_ids.filtered( lambda move: move.state not in ('done', 'cancel')) # Log a note if ml.picking_id: ml._log_message(ml.picking_id, ml, 'stock.track_move_template', vals) res = super(StockMoveLine, self).write(vals) # Update scrap object linked to move_lines to the new quantity. if 'qty_done' in vals: for move in self.mapped('move_id'): if move.scrapped: move.scrap_ids.write({'scrap_qty': move.quantity_done}) # As stock_account values according to a move's `product_uom_qty`, we consider that any # done stock move should have its `quantity_done` equals to its `product_uom_qty`, and # this is what move's `action_done` will do. So, we replicate the behavior here. if updates or 'qty_done' in vals: moves = self.filtered( lambda ml: ml.move_id.state == 'done').mapped('move_id') for move in moves: move.product_uom_qty = move.quantity_done next_moves._do_unreserve() next_moves._action_assign() return res def unlink(self): precision = self.env['decimal.precision'].precision_get( 'Product Unit of Measure') for ml in self: if ml.state in ('done', 'cancel'): raise UserError( _('You can not delete product moves if the picking is done. You can only correct the done quantities.' )) # Unlinking a move line should unreserve. if ml.product_id.type == 'product' and not ml.location_id.should_bypass_reservation( ) and not float_is_zero(ml.product_qty, precision_digits=precision): self.env['stock.quant']._update_reserved_quantity( ml.product_id, ml.location_id, -ml.product_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id, strict=True) moves = self.mapped('move_id') res = super(StockMoveLine, self).unlink() if moves: moves._recompute_state() return res def _action_done(self): """ This method is called during a move's `action_done`. It'll actually move a quant from the source location to the destination location, and unreserve if needed in the source location. This method is intended to be called on all the move lines of a move. This method is not intended to be called when editing a `done` move (that's what the override of `write` here is done. """ Quant = self.env['stock.quant'] # First, we loop over all the move lines to do a preliminary check: `qty_done` should not # be negative and, according to the presence of a picking type or a linked inventory # adjustment, enforce some rules on the `lot_id` field. If `qty_done` is null, we unlink # the line. It is mandatory in order to free the reservation and correctly apply # `action_done` on the next move lines. ml_to_delete = self.env['stock.move.line'] for ml in self: # Check here if `ml.qty_done` respects the rounding of `ml.product_uom_id`. uom_qty = float_round( ml.qty_done, precision_rounding=ml.product_uom_id.rounding, rounding_method='HALF-UP') precision_digits = self.env['decimal.precision'].precision_get( 'Product Unit of Measure') qty_done = float_round(ml.qty_done, precision_digits=precision_digits, rounding_method='HALF-UP') if float_compare( uom_qty, qty_done, precision_digits=precision_digits) != 0: raise UserError( _('The quantity done for the product "%s" doesn\'t respect the rounding precision \ defined on the unit of measure "%s". Please change the quantity done or the \ rounding precision of your unit of measure.') % (ml.product_id.display_name, ml.product_uom_id.name)) qty_done_float_compared = float_compare( ml.qty_done, 0, precision_rounding=ml.product_uom_id.rounding) if qty_done_float_compared > 0: if ml.product_id.tracking != 'none': picking_type_id = ml.move_id.picking_type_id if picking_type_id: if picking_type_id.use_create_lots: # If a picking type is linked, we may have to create a production lot on # the fly before assigning it to the move line if the user checked both # `use_create_lots` and `use_existing_lots`. if ml.lot_name and not ml.lot_id: lot = self.env['stock.production.lot'].create({ 'name': ml.lot_name, 'product_id': ml.product_id.id }) ml.write({'lot_id': lot.id}) elif not picking_type_id.use_create_lots and not picking_type_id.use_existing_lots: # If the user disabled both `use_create_lots` and `use_existing_lots` # checkboxes on the picking type, he's allowed to enter tracked # products without a `lot_id`. continue elif ml.move_id.inventory_id: # If an inventory adjustment is linked, the user is allowed to enter # tracked products without a `lot_id`. continue if not ml.lot_id: raise UserError( _('You need to supply a Lot/Serial number for product %s.' ) % ml.product_id.display_name) elif qty_done_float_compared < 0: raise UserError(_('No negative quantities allowed')) else: ml_to_delete |= ml ml_to_delete.unlink() # Now, we can actually move the quant. done_ml = self.env['stock.move.line'] for ml in self - ml_to_delete: if ml.product_id.type == 'product': rounding = ml.product_uom_id.rounding # if this move line is force assigned, unreserve elsewhere if needed if not ml.location_id.should_bypass_reservation( ) and float_compare(ml.qty_done, ml.product_uom_qty, precision_rounding=rounding) > 0: qty_done_product_uom = ml.product_uom_id._compute_quantity( ml.qty_done, ml.product_id.uom_id, rounding_method='HALF-UP') extra_qty = qty_done_product_uom - ml.product_qty ml._free_reservation(ml.product_id, ml.location_id, extra_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id, ml_to_ignore=done_ml) # unreserve what's been reserved if not ml.location_id.should_bypass_reservation( ) and ml.product_id.type == 'product' and ml.product_qty: Quant._update_reserved_quantity(ml.product_id, ml.location_id, -ml.product_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id, strict=True) # move what's been actually done quantity = ml.product_uom_id._compute_quantity( ml.qty_done, ml.move_id.product_id.uom_id, rounding_method='HALF-UP') available_qty, in_date = Quant._update_available_quantity( ml.product_id, ml.location_id, -quantity, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id) if available_qty < 0 and ml.lot_id: # see if we can compensate the negative quants with some untracked quants untracked_qty = Quant._get_available_quantity( ml.product_id, ml.location_id, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id, strict=True) if untracked_qty: taken_from_untracked_qty = min(untracked_qty, abs(quantity)) Quant._update_available_quantity( ml.product_id, ml.location_id, -taken_from_untracked_qty, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id) Quant._update_available_quantity( ml.product_id, ml.location_id, taken_from_untracked_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id) Quant._update_available_quantity( ml.product_id, ml.location_dest_id, quantity, lot_id=ml.lot_id, package_id=ml.result_package_id, owner_id=ml.owner_id, in_date=in_date) done_ml |= ml # Reset the reserved quantity as we just moved it to the destination location. (self - ml_to_delete).with_context(bypass_reservation_update=True).write({ 'product_uom_qty': 0.00, 'date': fields.Datetime.now(), }) def _log_message(self, record, move, template, vals): data = vals.copy() if 'lot_id' in vals and vals['lot_id'] != move.lot_id.id: data['lot_name'] = self.env['stock.production.lot'].browse( vals.get('lot_id')).name if 'location_id' in vals: data['location_name'] = self.env['stock.location'].browse( vals.get('location_id')).name if 'location_dest_id' in vals: data['location_dest_name'] = self.env['stock.location'].browse( vals.get('location_dest_id')).name if 'package_id' in vals and vals['package_id'] != move.package_id.id: data['package_name'] = self.env['stock.quant.package'].browse( vals.get('package_id')).name if 'package_result_id' in vals and vals[ 'package_result_id'] != move.package_result_id.id: data['result_package_name'] = self.env[ 'stock.quant.package'].browse( vals.get('result_package_id')).name if 'owner_id' in vals and vals['owner_id'] != move.owner_id.id: data['owner_name'] = self.env['res.partner'].browse( vals.get('owner_id')).name record.message_post_with_view( template, values={ 'move': move, 'vals': dict(vals, **data) }, subtype_id=self.env.ref('mail.mt_note').id) def _free_reservation(self, product_id, location_id, quantity, lot_id=None, package_id=None, owner_id=None, ml_to_ignore=None): """ When editing a done move line or validating one with some forced quantities, it is possible to impact quants that were not reserved. It is therefore necessary to edit or unlink the move lines that reserved a quantity now unavailable. :param ml_to_ignore: recordset of `stock.move.line` that should NOT be unreserved """ self.ensure_one() if ml_to_ignore is None: ml_to_ignore = self.env['stock.move.line'] ml_to_ignore |= self # Check the available quantity, with the `strict` kw set to `True`. If the available # quantity is greather than the quantity now unavailable, there is nothing to do. available_quantity = self.env['stock.quant']._get_available_quantity( product_id, location_id, lot_id=lot_id, package_id=package_id, owner_id=owner_id, strict=True) if quantity > available_quantity: # We now have to find the move lines that reserved our now unavailable quantity. We # take care to exclude ourselves and the move lines were work had already been done. outdated_move_lines_domain = [ ('state', 'not in', ['done', 'cancel']), ('product_id', '=', product_id.id), ('lot_id', '=', lot_id.id if lot_id else False), ('location_id', '=', location_id.id), ('owner_id', '=', owner_id.id if owner_id else False), ('package_id', '=', package_id.id if package_id else False), ('product_qty', '>', 0.0), ('id', 'not in', ml_to_ignore.ids), ] # We take the current picking first, then the pickings with the latest scheduled date current_picking_first = lambda cand: ( cand.picking_id != self.move_id.picking_id, -(cand.picking_id.scheduled_date or cand.move_id.date_expected) .timestamp() if cand.picking_id or cand.move_id else -cand.id, ) outdated_candidates = self.env['stock.move.line'].search( outdated_move_lines_domain).sorted(current_picking_first) # As the move's state is not computed over the move lines, we'll have to manually # recompute the moves which we adapted their lines. move_to_recompute_state = self.env['stock.move'] rounding = self.product_uom_id.rounding for candidate in outdated_candidates: if float_compare(candidate.product_qty, quantity, precision_rounding=rounding) <= 0: quantity -= candidate.product_qty move_to_recompute_state |= candidate.move_id if candidate.qty_done: candidate.product_uom_qty = 0.0 else: candidate.unlink() if float_is_zero(quantity, precision_rounding=rounding): break else: # split this move line and assign the new part to our extra move quantity_split = float_round( candidate.product_qty - quantity, precision_rounding=self.product_uom_id.rounding, rounding_method='UP') candidate.product_uom_qty = self.product_id.uom_id._compute_quantity( quantity_split, candidate.product_uom_id, rounding_method='HALF-UP') move_to_recompute_state |= candidate.move_id break move_to_recompute_state._recompute_state()
class ActivityReport(models.Model): """ CRM Lead Analysis """ _name = "crm.activity.report" _auto = False _description = "CRM Activity Analysis" _rec_name = 'id' date = fields.Datetime('Date', readonly=True) author_id = fields.Many2one('res.partner', 'Created By', readonly=True) user_id = fields.Many2one('res.users', 'Salesperson', readonly=True) team_id = fields.Many2one('crm.team', 'Sales Team', readonly=True) lead_id = fields.Many2one('crm.lead', "Lead", readonly=True) subject = fields.Char('Summary', readonly=True) subtype_id = fields.Many2one('mail.message.subtype', 'Subtype', readonly=True) mail_activity_type_id = fields.Many2one('mail.activity.type', 'Activity Type', readonly=True) country_id = fields.Many2one('res.country', 'Country', readonly=True) company_id = fields.Many2one('res.company', 'Company', readonly=True) stage_id = fields.Many2one('crm.stage', 'Stage', readonly=True) partner_id = fields.Many2one('res.partner', 'Partner/Customer', readonly=True) lead_type = fields.Char( string='Type', selection=[('lead', 'Lead'), ('opportunity', 'Opportunity')], help="Type is used to separate Leads and Opportunities") active = fields.Boolean('Active', readonly=True) probability = fields.Float('Probability', group_operator='avg', readonly=True) def _select(self): return """ SELECT m.id, m.subtype_id, m.mail_activity_type_id, m.author_id, m.date, m.subject, l.id as lead_id, l.user_id, l.team_id, l.country_id, l.company_id, l.stage_id, l.partner_id, l.type as lead_type, l.active, l.probability """ def _from(self): return """ FROM mail_message AS m """ def _join(self): return """ JOIN crm_lead AS l ON m.res_id = l.id """ def _where(self): return """ WHERE m.model = 'crm.lead' AND m.mail_activity_type_id IS NOT NULL """ @api.model_cr def init(self): tools.drop_view_if_exists(self._cr, self._table) self._cr.execute(""" CREATE OR REPLACE VIEW %s AS ( %s %s %s %s ) """ % (self._table, self._select(), self._from(), self._join(), self._where()))
class Inventory(models.Model): _name = "stock.inventory" _description = "Inventory" _order = "date desc, id desc" @api.model def _default_location_id(self): company_user = self.env.user.company_id warehouse = self.env['stock.warehouse'].search( [('company_id', '=', company_user.id)], limit=1) if warehouse: return warehouse.lot_stock_id.id else: raise UserError( _('You must define a warehouse for the company: %s.') % (company_user.name, )) name = fields.Char('Inventory Reference', readonly=True, required=True, states={'draft': [('readonly', False)]}) date = fields.Datetime( 'Inventory Date', readonly=True, required=True, default=fields.Datetime.now, help= "If the inventory adjustment is not validated, date at which the theoritical quantities have been checked.\n" "If the inventory adjustment is validated, date at which the inventory adjustment has been validated." ) line_ids = fields.One2many('stock.inventory.line', 'inventory_id', string='Inventories', copy=True, readonly=False, states={'done': [('readonly', True)]}) move_ids = fields.One2many('stock.move', 'inventory_id', string='Created Moves', states={'done': [('readonly', True)]}) state = fields.Selection(string='Status', selection=[('draft', 'Draft'), ('cancel', 'Cancelled'), ('confirm', 'In Progress'), ('done', 'Validated')], copy=False, index=True, readonly=True, default='draft') company_id = fields.Many2one('res.company', 'Company', readonly=True, index=True, required=True, states={'draft': [('readonly', False)]}, default=lambda self: self.env['res.company']. _company_default_get('stock.inventory')) location_id = fields.Many2one('stock.location', 'Inventoried Location', readonly=True, required=True, states={'draft': [('readonly', False)]}, default=_default_location_id) product_id = fields.Many2one( 'product.product', 'Inventoried Product', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Product to focus your inventory on a particular Product." ) package_id = fields.Many2one( 'stock.quant.package', 'Inventoried Pack', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Pack to focus your inventory on a particular Pack.") partner_id = fields.Many2one( 'res.partner', 'Inventoried Owner', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Owner to focus your inventory on a particular Owner.") lot_id = fields.Many2one( 'stock.production.lot', 'Inventoried Lot/Serial Number', copy=False, readonly=True, states={'draft': [('readonly', False)]}, help= "Specify Lot/Serial Number to focus your inventory on a particular Lot/Serial Number." ) filter = fields.Selection( string='Inventory of', selection='_selection_filter', required=True, default='none', help= "If you do an entire inventory, you can choose 'All Products' and it will prefill the inventory with the current stock. If you only do some products " "(e.g. Cycle Counting) you can choose 'Manual Selection of Products' and the system won't propose anything. You can also let the " "system propose for a single product / lot /... ") total_qty = fields.Float('Total Quantity', compute='_compute_total_qty') category_id = fields.Many2one( 'product.category', 'Product Category', readonly=True, states={'draft': [('readonly', False)]}, help= "Specify Product Category to focus your inventory on a particular Category." ) exhausted = fields.Boolean('Include Exhausted Products', readonly=True, states={'draft': [('readonly', False)]}) @api.one @api.depends('product_id', 'line_ids.product_qty') def _compute_total_qty(self): """ For single product inventory, total quantity of the counted """ if self.product_id: self.total_qty = sum(self.mapped('line_ids').mapped('product_qty')) else: self.total_qty = 0 @api.multi def unlink(self): for inventory in self: if inventory.state == 'done': raise UserError( _('You cannot delete a validated inventory adjustement.')) return super(Inventory, self).unlink() @api.model def _selection_filter(self): """ Get the list of filter allowed according to the options checked in 'Settings\Warehouse'. """ res_filter = [('none', _('All products')), ('category', _('One product category')), ('product', _('One product only')), ('partial', _('Select products manually'))] if self.user_has_groups('stock.group_tracking_owner'): res_filter += [('owner', _('One owner only')), ('product_owner', _('One product for a specific owner'))] if self.user_has_groups('stock.group_production_lot'): res_filter.append(('lot', _('One Lot/Serial Number'))) if self.user_has_groups('stock.group_tracking_lot'): res_filter.append(('pack', _('A Pack'))) return res_filter @api.onchange('filter') def _onchange_filter(self): if self.filter not in ('product', 'product_owner'): self.product_id = False if self.filter != 'lot': self.lot_id = False if self.filter not in ('owner', 'product_owner'): self.partner_id = False if self.filter != 'pack': self.package_id = False if self.filter != 'category': self.category_id = False if self.filter != 'product': self.exhausted = False if self.filter == 'product': self.exhausted = True if self.product_id: return { 'domain': { 'product_id': [('product_tmpl_id', '=', self.product_id.product_tmpl_id.id)] } } @api.onchange('location_id') def _onchange_location_id(self): if self.location_id.company_id: self.company_id = self.location_id.company_id @api.one @api.constrains('filter', 'product_id', 'lot_id', 'partner_id', 'package_id') def _check_filter_product(self): if self.filter == 'none' and self.product_id and self.location_id and self.lot_id: return if self.filter not in ('product', 'product_owner') and self.product_id: raise ValidationError( _('The selected product doesn\'t belong to that owner..')) if self.filter != 'lot' and self.lot_id: raise ValidationError(_('The selected lot number doesn\'t exist.')) if self.filter not in ('owner', 'product_owner') and self.partner_id: raise ValidationError( _('The selected owner doesn\'t have the proprietary of that product.' )) if self.filter != 'pack' and self.package_id: raise ValidationError( _('The selected inventory options are not coherent, the package doesn\'t exist.' )) def action_reset_product_qty(self): self.mapped('line_ids').write({'product_qty': 0}) return True def action_validate(self): inventory_lines = self.line_ids.filtered( lambda l: l.product_id.tracking in ['lot', 'serial'] and not l. prod_lot_id and l.theoretical_qty != l.product_qty) lines = self.line_ids.filtered(lambda l: float_compare( l.product_qty, 1, precision_rounding=l.product_uom_id.rounding ) > 0 and l.product_id.tracking == 'serial' and l.prod_lot_id) if inventory_lines and not lines: wiz_lines = [(0, 0, { 'product_id': product.id, 'tracking': product.tracking }) for product in inventory_lines.mapped('product_id')] wiz = self.env['stock.track.confirmation'].create({ 'inventory_id': self.id, 'tracking_line_ids': wiz_lines }) return { 'name': _('Tracked Products in Inventory Adjustment'), 'type': 'ir.actions.act_window', 'view_mode': 'form', 'res_model': 'stock.track.confirmation', 'target': 'new', 'res_id': wiz.id, } else: self._action_done() def _action_done(self): negative = next( (line for line in self.mapped('line_ids') if line.product_qty < 0 and line.product_qty != line.theoretical_qty), False) if negative: raise UserError( _('You cannot set a negative product quantity in an inventory line:\n\t%s - qty: %s' ) % (negative.product_id.name, negative.product_qty)) self.action_check() self.write({'state': 'done', 'date': fields.Datetime.now()}) self.post_inventory() return True def post_inventory(self): # The inventory is posted as a single step which means quants cannot be moved from an internal location to another using an inventory # as they will be moved to inventory loss, and other quants will be created to the encoded quant location. This is a normal behavior # as quants cannot be reuse from inventory location (users can still manually move the products before/after the inventory if they want). self.mapped('move_ids').filtered( lambda move: move.state != 'done')._action_done() return True def action_check(self): """ Checks the inventory and computes the stock move to do """ # tde todo: clean after _generate_moves for inventory in self.filtered(lambda x: x.state not in ('done', 'cancel')): # first remove the existing stock moves linked to this inventory inventory.with_context( prefetch_fields=False).mapped('move_ids').unlink() inventory.line_ids._generate_moves() def action_cancel_draft(self): self.mapped('move_ids')._action_cancel() self.write({'line_ids': [(5, )], 'state': 'draft'}) def action_start(self): for inventory in self.filtered(lambda x: x.state not in ('done', 'cancel')): vals = {'state': 'confirm', 'date': fields.Datetime.now()} if (inventory.filter != 'partial') and not inventory.line_ids: vals.update({ 'line_ids': [(0, 0, line_values) for line_values in inventory._get_inventory_lines_values()] }) inventory.write(vals) return True def action_inventory_line_tree(self): action = self.env.ref('stock.action_inventory_line_tree').read()[0] action['context'] = { 'default_location_id': self.location_id.id, 'default_product_id': self.product_id.id, 'default_prod_lot_id': self.lot_id.id, 'default_package_id': self.package_id.id, 'default_partner_id': self.partner_id.id, 'default_inventory_id': self.id, } return action def _get_inventory_lines_values(self): # TDE CLEANME: is sql really necessary ? I don't think so locations = self.env['stock.location'].search([ ('id', 'child_of', [self.location_id.id]) ]) domain = ' location_id in %s AND quantity != 0 AND active = TRUE' args = (tuple(locations.ids), ) vals = [] Product = self.env['product.product'] # Empty recordset of products available in stock_quants quant_products = self.env['product.product'] # Empty recordset of products to filter products_to_filter = self.env['product.product'] # case 0: Filter on company if self.company_id: domain += ' AND company_id = %s' args += (self.company_id.id, ) #case 1: Filter on One owner only or One product for a specific owner if self.partner_id: domain += ' AND owner_id = %s' args += (self.partner_id.id, ) #case 2: Filter on One Lot/Serial Number if self.lot_id: domain += ' AND lot_id = %s' args += (self.lot_id.id, ) #case 3: Filter on One product if self.product_id: domain += ' AND product_id = %s' args += (self.product_id.id, ) products_to_filter |= self.product_id #case 4: Filter on A Pack if self.package_id: domain += ' AND package_id = %s' args += (self.package_id.id, ) #case 5: Filter on One product category + Exahausted Products if self.category_id: categ_products = Product.search([('categ_id', 'child_of', self.category_id.id)]) domain += ' AND product_id = ANY (%s)' args += (categ_products.ids, ) products_to_filter |= categ_products self.env.cr.execute( """SELECT product_id, sum(quantity) as product_qty, location_id, lot_id as prod_lot_id, package_id, owner_id as partner_id FROM stock_quant LEFT JOIN product_product ON product_product.id = stock_quant.product_id WHERE %s GROUP BY product_id, location_id, lot_id, package_id, partner_id """ % domain, args) for product_data in self.env.cr.dictfetchall(): # replace the None the dictionary by False, because falsy values are tested later on for void_field in [ item[0] for item in product_data.items() if item[1] is None ]: product_data[void_field] = False product_data['theoretical_qty'] = product_data['product_qty'] if product_data['product_id']: product_data['product_uom_id'] = Product.browse( product_data['product_id']).uom_id.id quant_products |= Product.browse(product_data['product_id']) vals.append(product_data) if self.exhausted: exhausted_vals = self._get_exhausted_inventory_line( products_to_filter, quant_products) vals.extend(exhausted_vals) return vals def _get_exhausted_inventory_line(self, products, quant_products): ''' This function return inventory lines for exausted products :param products: products With Selected Filter. :param quant_products: products available in stock_quants ''' vals = [] exhausted_domain = [('type', 'not in', ('service', 'consu', 'digital')) ] if products: exhausted_products = products - quant_products exhausted_domain += [('id', 'in', exhausted_products.ids)] else: exhausted_domain += [('id', 'not in', quant_products.ids)] exhausted_products = self.env['product.product'].search( exhausted_domain) for product in exhausted_products: vals.append({ 'inventory_id': self.id, 'product_id': product.id, 'location_id': self.location_id.id, }) return vals
class PosSaleReport(models.Model): _name = "report.all.channels.sales" _description = "Sales by Channel (All in One)" _auto = False name = fields.Char('Order Reference', readonly=True) partner_id = fields.Many2one('res.partner', 'Partner', readonly=True) product_id = fields.Many2one('product.product', string='Product', readonly=True) product_tmpl_id = fields.Many2one('product.template', 'Product Template', readonly=True) date_order = fields.Datetime(string='Date Order', readonly=True) user_id = fields.Many2one('res.users', 'Salesperson', readonly=True) categ_id = fields.Many2one('product.category', 'Product Category', readonly=True) company_id = fields.Many2one('res.company', 'Company', readonly=True) price_total = fields.Float('Total', readonly=True) pricelist_id = fields.Many2one('product.pricelist', 'Pricelist', readonly=True) country_id = fields.Many2one('res.country', 'Partner Country', readonly=True) price_subtotal = fields.Float(string='Price Subtotal', readonly=True) product_qty = fields.Float('Product Quantity', readonly=True) analytic_account_id = fields.Many2one('account.analytic.account', 'Analytic Account', readonly=True) team_id = fields.Many2one('crm.team', 'Sales Team', readonly=True) def _so(self): so_str = """ SELECT sol.id AS id, so.name AS name, so.partner_id AS partner_id, sol.product_id AS product_id, pro.product_tmpl_id AS product_tmpl_id, so.date_order AS date_order, so.user_id AS user_id, pt.categ_id AS categ_id, so.company_id AS company_id, sol.price_total / CASE COALESCE(so.currency_rate, 0) WHEN 0 THEN 1.0 ELSE so.currency_rate END AS price_total, so.pricelist_id AS pricelist_id, rp.country_id AS country_id, sol.price_subtotal / CASE COALESCE(so.currency_rate, 0) WHEN 0 THEN 1.0 ELSE so.currency_rate END AS price_subtotal, (sol.product_uom_qty / u.factor * u2.factor) as product_qty, so.analytic_account_id AS analytic_account_id, so.team_id AS team_id FROM sale_order_line sol JOIN sale_order so ON (sol.order_id = so.id) LEFT JOIN product_product pro ON (sol.product_id = pro.id) JOIN res_partner rp ON (so.partner_id = rp.id) LEFT JOIN product_template pt ON (pro.product_tmpl_id = pt.id) LEFT JOIN product_pricelist pp ON (so.pricelist_id = pp.id) LEFT JOIN uom_uom u on (u.id=sol.product_uom) LEFT JOIN uom_uom u2 on (u2.id=pt.uom_id) WHERE so.state in ('sale','done') """ return so_str def _from(self): return """(%s)""" % (self._so()) def get_main_request(self): request = """ CREATE or REPLACE VIEW %s AS SELECT id AS id, name, partner_id, product_id, product_tmpl_id, date_order, user_id, categ_id, company_id, price_total, pricelist_id, analytic_account_id, country_id, team_id, price_subtotal, product_qty FROM %s AS foo""" % (self._table, self._from()) return request @api.model_cr def init(self): tools.drop_view_if_exists(self.env.cr, self._table) self.env.cr.execute(self.get_main_request())
class MaintenanceRequest(models.Model): _name = 'maintenance.request' _inherit = ['mail.thread', 'mail.activity.mixin'] _description = 'Maintenance Request' _order = "id desc" @api.returns('self') def _default_stage(self): return self.env['maintenance.stage'].search([], limit=1) @api.multi def _track_subtype(self, init_values): self.ensure_one() if 'stage_id' in init_values and self.stage_id.sequence <= 1: return 'maintenance.mt_req_created' elif 'stage_id' in init_values and self.stage_id.sequence > 1: return 'maintenance.mt_req_status' return super(MaintenanceRequest, self)._track_subtype(init_values) def _get_default_team_id(self): MT = self.env['maintenance.team'] team = MT.search([('company_id', '=', self.env.user.company_id.id)], limit=1) if not team: team = MT.search([], limit=1) return team.id name = fields.Char('Subjects', required=True) company_id = fields.Many2one('res.company', string='Company', default=lambda self: self.env.user.company_id) description = fields.Text('Description') request_date = fields.Date('Request Date', track_visibility='onchange', default=fields.Date.context_today, help="Date requested for the maintenance to happen") owner_user_id = fields.Many2one('res.users', string='Created by User', default=lambda s: s.env.uid) category_id = fields.Many2one('maintenance.equipment.category', related='equipment_id.category_id', string='Category', store=True, readonly=True) equipment_id = fields.Many2one('maintenance.equipment', string='Equipment', ondelete='restrict', index=True) user_id = fields.Many2one('res.users', string='Technician', track_visibility='onchange', oldname='technician_user_id') stage_id = fields.Many2one('maintenance.stage', string='Stage', ondelete='restrict', track_visibility='onchange', group_expand='_read_group_stage_ids', default=_default_stage, copy=False) priority = fields.Selection([('0', 'Very Low'), ('1', 'Low'), ('2', 'Normal'), ('3', 'High')], string='Priority') color = fields.Integer('Color Index') close_date = fields.Date('Close Date', help="Date the maintenance was finished. ") kanban_state = fields.Selection([('normal', 'In Progress'), ('blocked', 'Blocked'), ('done', 'Ready for next stage')], string='Kanban State', required=True, default='normal', track_visibility='onchange') # active = fields.Boolean(default=True, help="Set active to false to hide the maintenance request without deleting it.") archive = fields.Boolean(default=False, help="Set archive to true to hide the maintenance request without deleting it.") maintenance_type = fields.Selection([('corrective', 'Corrective'), ('preventive', 'Preventive')], string='Maintenance Type', default="corrective") schedule_date = fields.Datetime('Scheduled Date', help="Date the maintenance team plans the maintenance. It should not differ much from the Request Date. ") maintenance_team_id = fields.Many2one('maintenance.team', string='Team', required=True, default=_get_default_team_id) duration = fields.Float(help="Duration in hours and minutes.") @api.multi def archive_equipment_request(self): self.write({'archive': True}) @api.multi def reset_equipment_request(self): """ Reinsert the maintenance request into the maintenance pipe in the first stage""" first_stage_obj = self.env['maintenance.stage'].search([], order="sequence asc", limit=1) # self.write({'active': True, 'stage_id': first_stage_obj.id}) self.write({'archive': False, 'stage_id': first_stage_obj.id}) @api.onchange('equipment_id') def onchange_equipment_id(self): if self.equipment_id: self.user_id = self.equipment_id.technician_user_id if self.equipment_id.technician_user_id else self.equipment_id.category_id.technician_user_id self.category_id = self.equipment_id.category_id if self.equipment_id.maintenance_team_id: self.maintenance_team_id = self.equipment_id.maintenance_team_id.id @api.onchange('category_id') def onchange_category_id(self): if not self.user_id or not self.equipment_id or (self.user_id and not self.equipment_id.technician_user_id): self.user_id = self.category_id.technician_user_id @api.model def create(self, vals): # context: no_log, because subtype already handle this self = self.with_context(mail_create_nolog=True) request = super(MaintenanceRequest, self).create(vals) if request.owner_user_id or request.user_id: request._add_followers() if request.equipment_id and not request.maintenance_team_id: request.maintenance_team_id = request.equipment_id.maintenance_team_id request.activity_update() return request @api.multi def write(self, vals): # Overridden to reset the kanban_state to normal whenever # the stage (stage_id) of the Maintenance Request changes. if vals and 'kanban_state' not in vals and 'stage_id' in vals: vals['kanban_state'] = 'normal' res = super(MaintenanceRequest, self).write(vals) if vals.get('owner_user_id') or vals.get('user_id'): self._add_followers() if 'stage_id' in vals: self.filtered(lambda m: m.stage_id.done).write({'close_date': fields.Date.today()}) self.activity_feedback(['maintenance.mail_act_maintenance_request']) if vals.get('user_id') or vals.get('schedule_date'): self.activity_update() if vals.get('equipment_id'): # need to change description of activity also so unlink old and create new activity self.activity_unlink(['maintenance.mail_act_maintenance_request']) self.activity_update() return res def activity_update(self): """ Update maintenance activities based on current record set state. It reschedule, unlink or create maintenance request activities. """ self.filtered(lambda request: not request.schedule_date).activity_unlink(['maintenance.mail_act_maintenance_request']) for request in self.filtered(lambda request: request.schedule_date): date_dl = fields.Datetime.from_string(request.schedule_date).date() updated = request.activity_reschedule( ['maintenance.mail_act_maintenance_request'], date_deadline=date_dl, new_user_id=request.user_id.id or request.owner_user_id.id or self.env.uid) if not updated: if request.equipment_id: note = _('Request planned for <a href="#" data-oe-model="%s" data-oe-id="%s">%s</a>') % ( request.equipment_id._name, request.equipment_id.id, request.equipment_id.display_name) else: note = False request.activity_schedule( 'maintenance.mail_act_maintenance_request', fields.Datetime.from_string(request.schedule_date).date(), note=note, user_id=request.user_id.id or request.owner_user_id.id or self.env.uid) def _add_followers(self): for request in self: partner_ids = (request.owner_user_id.partner_id + request.user_id.partner_id).ids request.message_subscribe(partner_ids=partner_ids) @api.model def _read_group_stage_ids(self, stages, domain, order): """ Read group customization in order to display all the stages in the kanban view, even if they are empty """ stage_ids = stages._search([], order=order, access_rights_uid=SUPERUSER_ID) return stages.browse(stage_ids)
class MrpWorkorder(models.Model): _name = 'mrp.workorder' _description = 'Work Order' _inherit = ['mail.thread'] name = fields.Char( 'Work Order', required=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) workcenter_id = fields.Many2one( 'mrp.workcenter', 'Work Center', required=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) working_state = fields.Selection( 'Workcenter Status', related='workcenter_id.working_state', readonly=False, help='Technical: used in views only') production_id = fields.Many2one( 'mrp.production', 'Manufacturing Order', index=True, ondelete='cascade', required=True, track_visibility='onchange', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) product_id = fields.Many2one( 'product.product', 'Product', related='production_id.product_id', readonly=True, help='Technical: used in views only.', store=True) product_uom_id = fields.Many2one( 'uom.uom', 'Unit of Measure', related='production_id.product_uom_id', readonly=True, help='Technical: used in views only.') production_availability = fields.Selection( 'Stock Availability', readonly=True, related='production_id.availability', store=True, help='Technical: used in views and domains only.') production_state = fields.Selection( 'Production State', readonly=True, related='production_id.state', help='Technical: used in views only.') product_tracking = fields.Selection( 'Product Tracking', related='production_id.product_id.tracking', readonly=False, help='Technical: used in views only.') qty_production = fields.Float('Original Production Quantity', readonly=True, related='production_id.product_qty') qty_remaining = fields.Float('Quantity To Be Produced', compute='_compute_qty_remaining', digits=dp.get_precision('Product Unit of Measure')) qty_produced = fields.Float( 'Quantity', default=0.0, readonly=True, digits=dp.get_precision('Product Unit of Measure'), help="The number of products already handled by this work order") qty_producing = fields.Float( 'Currently Produced Quantity', default=1.0, digits=dp.get_precision('Product Unit of Measure'), states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) is_produced = fields.Boolean(string="Has Been Produced", compute='_compute_is_produced') is_first_wo = fields.Boolean(string="Is the first WO to produce", compute='_compute_is_first_wo') state = fields.Selection([ ('pending', 'Pending'), ('ready', 'Ready'), ('progress', 'In Progress'), ('done', 'Finished'), ('cancel', 'Cancelled')], string='Status', default='pending') date_planned_start = fields.Datetime( 'Scheduled Date Start', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) date_planned_finished = fields.Datetime( 'Scheduled Date Finished', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) date_start = fields.Datetime( 'Effective Start Date', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) date_finished = fields.Datetime( 'Effective End Date', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) duration_expected = fields.Float( 'Expected Duration', digits=(16, 2), states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="Expected duration (in minutes)") duration = fields.Float( 'Real Duration', compute='_compute_duration', readonly=True, store=True) duration_unit = fields.Float( 'Duration Per Unit', compute='_compute_duration', group_operator="avg", readonly=True, store=True) duration_percent = fields.Integer( 'Duration Deviation (%)', compute='_compute_duration', group_operator="avg", readonly=True, store=True) operation_id = fields.Many2one( 'mrp.routing.workcenter', 'Operation') # Should be used differently as BoM can change in the meantime worksheet = fields.Binary( 'Worksheet', related='operation_id.worksheet', readonly=True) move_raw_ids = fields.One2many( 'stock.move', 'workorder_id', 'Moves') move_line_ids = fields.One2many( 'stock.move.line', 'workorder_id', 'Moves to Track', domain=[('done_wo', '=', True)], help="Inventory moves for which you must scan a lot number at this work order") active_move_line_ids = fields.One2many( 'stock.move.line', 'workorder_id', domain=[('done_wo', '=', False)]) final_lot_id = fields.Many2one( 'stock.production.lot', 'Lot/Serial Number', domain="[('product_id', '=', product_id)]", states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) time_ids = fields.One2many( 'mrp.workcenter.productivity', 'workorder_id') is_user_working = fields.Boolean( 'Is the Current User Working', compute='_compute_working_users', help="Technical field indicating whether the current user is working. ") working_user_ids = fields.One2many('res.users', string='Working user on this work order.', compute='_compute_working_users') last_working_user_id = fields.One2many('res.users', string='Last user that worked on this work order.', compute='_compute_working_users') next_work_order_id = fields.Many2one('mrp.workorder', "Next Work Order") scrap_ids = fields.One2many('stock.scrap', 'workorder_id') scrap_count = fields.Integer(compute='_compute_scrap_move_count', string='Scrap Move') production_date = fields.Datetime('Production Date', related='production_id.date_planned_start', store=True, readonly=False) color = fields.Integer('Color', compute='_compute_color') capacity = fields.Float( 'Capacity', default=1.0, help="Number of pieces that can be produced in parallel.") @api.multi def name_get(self): return [(wo.id, "%s - %s - %s" % (wo.production_id.sudo().name, wo.product_id.sudo().name, wo.name)) for wo in self] @api.one @api.depends('production_id.product_qty', 'qty_produced') def _compute_is_produced(self): rounding = self.production_id.product_uom_id.rounding self.is_produced = float_compare(self.qty_produced, self.production_id.product_qty, precision_rounding=rounding) >= 0 @api.multi def _compute_is_first_wo(self): for wo in self: wo.is_first_wo = (wo.production_id.workorder_ids[0] == wo) @api.one @api.depends('time_ids.duration', 'qty_produced') def _compute_duration(self): self.duration = sum(self.time_ids.mapped('duration')) self.duration_unit = round(self.duration / max(self.qty_produced, 1), 2) # rounding 2 because it is a time if self.duration_expected: self.duration_percent = 100 * (self.duration_expected - self.duration) / self.duration_expected else: self.duration_percent = 0 def _compute_working_users(self): """ Checks whether the current user is working, all the users currently working and the last user that worked. """ for order in self: order.working_user_ids = [(4, order.id) for order in order.time_ids.filtered(lambda time: not time.date_end).sorted('date_start').mapped('user_id')] if order.working_user_ids: order.last_working_user_id = order.working_user_ids[-1] elif order.time_ids: order.last_working_user_id = order.time_ids.sorted('date_end')[-1].user_id if order.time_ids.filtered(lambda x: (x.user_id.id == self.env.user.id) and (not x.date_end) and (x.loss_type in ('productive', 'performance'))): order.is_user_working = True else: order.is_user_working = False @api.multi def _compute_scrap_move_count(self): data = self.env['stock.scrap'].read_group([('workorder_id', 'in', self.ids)], ['workorder_id'], ['workorder_id']) count_data = dict((item['workorder_id'][0], item['workorder_id_count']) for item in data) for workorder in self: workorder.scrap_count = count_data.get(workorder.id, 0) @api.multi @api.depends('date_planned_finished', 'production_id.date_planned_finished') def _compute_color(self): late_orders = self.filtered(lambda x: x.production_id.date_planned_finished and x.date_planned_finished and x.date_planned_finished > x.production_id.date_planned_finished) for order in late_orders: order.color = 4 for order in (self - late_orders): order.color = 2 @api.onchange('date_planned_start', 'duration_expected') def _onchange_date_planned_finished(self): if self.date_planned_start and self.duration_expected: self.date_planned_finished = self.date_planned_start + relativedelta(minutes=self.duration_expected) @api.onchange('qty_producing') def _onchange_qty_producing(self): """ Update stock.move.lot records, according to the new qty currently produced. """ moves = self.move_raw_ids.filtered(lambda move: move.state not in ('done', 'cancel') and move.product_id.tracking != 'none' and move.product_id.id != self.production_id.product_id.id) for move in moves: move_lots = self.active_move_line_ids.filtered(lambda move_lot: move_lot.move_id == move) if not move_lots: continue rounding = move.product_uom.rounding new_qty = float_round(move.unit_factor * self.qty_producing, precision_rounding=rounding) if move.product_id.tracking == 'lot': move_lots[0].product_qty = new_qty move_lots[0].qty_done = new_qty elif move.product_id.tracking == 'serial': # Create extra pseudo record qty_todo = float_round(new_qty - sum(move_lots.mapped('qty_done')), precision_rounding=rounding) if float_compare(qty_todo, 0.0, precision_rounding=rounding) > 0: while float_compare(qty_todo, 0.0, precision_rounding=rounding) > 0: self.active_move_line_ids += self.env['stock.move.line'].new({ 'move_id': move.id, 'product_id': move.product_id.id, 'lot_id': False, 'product_uom_qty': 0.0, 'product_uom_id': move.product_uom.id, 'qty_done': min(1.0, qty_todo), 'workorder_id': self.id, 'done_wo': False, 'location_id': move.location_id.id, 'location_dest_id': move.location_dest_id.id, 'date': move.date, }) qty_todo -= 1 elif float_compare(qty_todo, 0.0, precision_rounding=rounding) < 0: qty_todo = abs(qty_todo) for move_lot in move_lots: if float_compare(qty_todo, 0, precision_rounding=rounding) <= 0: break if not move_lot.lot_id and float_compare(qty_todo, move_lot.qty_done, precision_rounding=rounding) >= 0: qty_todo = float_round(qty_todo - move_lot.qty_done, precision_rounding=rounding) self.active_move_line_ids -= move_lot # Difference operator else: #move_lot.product_qty = move_lot.product_qty - qty_todo if float_compare(move_lot.qty_done - qty_todo, 0, precision_rounding=rounding) == 1: move_lot.qty_done = move_lot.qty_done - qty_todo else: move_lot.qty_done = 0 qty_todo = 0 @api.multi def write(self, values): if list(values.keys()) != ['time_ids'] and any(workorder.state == 'done' for workorder in self): raise UserError(_('You can not change the finished work order.')) return super(MrpWorkorder, self).write(values) def _generate_lot_ids(self): """ Generate stock move lines """ self.ensure_one() MoveLine = self.env['stock.move.line'] tracked_moves = self.move_raw_ids.filtered( lambda move: move.state not in ('done', 'cancel') and move.product_id.tracking != 'none' and move.product_id != self.production_id.product_id and move.bom_line_id) for move in tracked_moves: qty = move.unit_factor * self.qty_producing if move.product_id.tracking == 'serial': while float_compare(qty, 0.0, precision_rounding=move.product_uom.rounding) > 0: MoveLine.create({ 'move_id': move.id, 'product_uom_qty': 0, 'product_uom_id': move.product_uom.id, 'qty_done': min(1, qty), 'production_id': self.production_id.id, 'workorder_id': self.id, 'product_id': move.product_id.id, 'done_wo': False, 'location_id': move.location_id.id, 'location_dest_id': move.location_dest_id.id, }) qty -= 1 else: MoveLine.create({ 'move_id': move.id, 'product_uom_qty': 0, 'product_uom_id': move.product_uom.id, 'qty_done': qty, 'product_id': move.product_id.id, 'production_id': self.production_id.id, 'workorder_id': self.id, 'done_wo': False, 'location_id': move.location_id.id, 'location_dest_id': move.location_dest_id.id, }) def _assign_default_final_lot_id(self): self.final_lot_id = self.env['stock.production.lot'].search([('use_next_on_work_order_id', '=', self.id)], order='create_date, id', limit=1) def _get_byproduct_move_line(self, by_product_move, quantity): return { 'move_id': by_product_move.id, 'product_id': by_product_move.product_id.id, 'product_uom_qty': quantity, 'product_uom_id': by_product_move.product_uom.id, 'qty_done': quantity, 'workorder_id': self.id, 'location_id': by_product_move.location_id.id, 'location_dest_id': by_product_move.location_dest_id.id, } def _link_to_quality_check(self, old_move_line, new_move_line): return True @api.multi def record_production(self): if not self: return True self.ensure_one() if self.qty_producing <= 0: raise UserError(_('Please set the quantity you are currently producing. It should be different from zero.')) if (self.production_id.product_id.tracking != 'none') and not self.final_lot_id and self.move_raw_ids: raise UserError(_('You should provide a lot/serial number for the final product.')) # Update quantities done on each raw material line # For each untracked component without any 'temporary' move lines, # (the new workorder tablet view allows registering consumed quantities for untracked components) # we assume that only the theoretical quantity was used for move in self.move_raw_ids: if move.has_tracking == 'none' and (move.state not in ('done', 'cancel')) and move.bom_line_id\ and move.unit_factor and not move.move_line_ids.filtered(lambda ml: not ml.done_wo): rounding = move.product_uom.rounding if self.product_id.tracking != 'none': qty_to_add = float_round(self.qty_producing * move.unit_factor, precision_rounding=rounding) move._generate_consumed_move_line(qty_to_add, self.final_lot_id) elif len(move._get_move_lines()) < 2: move.quantity_done += float_round(self.qty_producing * move.unit_factor, precision_rounding=rounding) else: move._set_quantity_done(move.quantity_done + float_round(self.qty_producing * move.unit_factor, precision_rounding=rounding)) # Transfer quantities from temporary to final move lots or make them final for move_line in self.active_move_line_ids: # Check if move_line already exists if move_line.qty_done <= 0: # rounding... move_line.sudo().unlink() continue if move_line.product_id.tracking != 'none' and not move_line.lot_id: raise UserError(_('You should provide a lot/serial number for a component.')) # Search other move_line where it could be added: lots = self.move_line_ids.filtered(lambda x: (x.lot_id.id == move_line.lot_id.id) and (not x.lot_produced_id) and (not x.done_move) and (x.product_id == move_line.product_id)) if lots: lots[0].qty_done += move_line.qty_done lots[0].lot_produced_id = self.final_lot_id.id self._link_to_quality_check(move_line, lots[0]) move_line.sudo().unlink() else: move_line.lot_produced_id = self.final_lot_id.id move_line.done_wo = True self.move_line_ids.filtered( lambda move_line: not move_line.done_move and not move_line.lot_produced_id and move_line.qty_done > 0 ).write({ 'lot_produced_id': self.final_lot_id.id, 'lot_produced_qty': self.qty_producing }) # If last work order, then post lots used # TODO: should be same as checking if for every workorder something has been done? if not self.next_work_order_id: production_move = self.production_id.move_finished_ids.filtered( lambda x: (x.product_id.id == self.production_id.product_id.id) and (x.state not in ('done', 'cancel'))) if production_move.product_id.tracking != 'none': move_line = production_move.move_line_ids.filtered(lambda x: x.lot_id.id == self.final_lot_id.id) if move_line: move_line.product_uom_qty += self.qty_producing move_line.qty_done += self.qty_producing else: location_dest_id = production_move.location_dest_id.get_putaway_strategy(self.product_id).id or production_move.location_dest_id.id move_line.create({'move_id': production_move.id, 'product_id': production_move.product_id.id, 'lot_id': self.final_lot_id.id, 'product_uom_qty': self.qty_producing, 'product_uom_id': production_move.product_uom.id, 'qty_done': self.qty_producing, 'workorder_id': self.id, 'location_id': production_move.location_id.id, 'location_dest_id': location_dest_id, }) else: production_move._set_quantity_done(self.qty_producing) if not self.next_work_order_id: for by_product_move in self._get_byproduct_move_to_update(): if by_product_move.has_tracking != 'serial': values = self._get_byproduct_move_line(by_product_move, self.qty_producing * by_product_move.unit_factor) self.env['stock.move.line'].create(values) elif by_product_move.has_tracking == 'serial': qty_todo = by_product_move.product_uom._compute_quantity(self.qty_producing * by_product_move.unit_factor, by_product_move.product_id.uom_id) for i in range(0, int(float_round(qty_todo, precision_digits=0))): values = self._get_byproduct_move_line(by_product_move, 1) self.env['stock.move.line'].create(values) # Update workorder quantity produced self.qty_produced += self.qty_producing if self.final_lot_id: self.final_lot_id.use_next_on_work_order_id = self.next_work_order_id self.final_lot_id = False # One a piece is produced, you can launch the next work order self._start_nextworkorder() # Set a qty producing rounding = self.production_id.product_uom_id.rounding if float_compare(self.qty_produced, self.production_id.product_qty, precision_rounding=rounding) >= 0: self.qty_producing = 0 elif self.production_id.product_id.tracking == 'serial': self._assign_default_final_lot_id() self.qty_producing = 1.0 self._generate_lot_ids() else: self.qty_producing = float_round(self.production_id.product_qty - self.qty_produced, precision_rounding=rounding) self._generate_lot_ids() if self.next_work_order_id and self.next_work_order_id.state not in ['done', 'cancel'] and self.production_id.product_id.tracking != 'none': self.next_work_order_id._assign_default_final_lot_id() if float_compare(self.qty_produced, self.production_id.product_qty, precision_rounding=rounding) >= 0: self.button_finish() return True def _get_byproduct_move_to_update(self): return self.production_id.move_finished_ids.filtered(lambda x: (x.product_id.id != self.production_id.product_id.id) and (x.state not in ('done', 'cancel'))) @api.multi def _start_nextworkorder(self): rounding = self.product_id.uom_id.rounding if self.next_work_order_id.state == 'pending' and ( (self.operation_id.batch == 'no' and float_compare(self.qty_production, self.qty_produced, precision_rounding=rounding) <= 0) or (self.operation_id.batch == 'yes' and float_compare(self.operation_id.batch_size, self.qty_produced, precision_rounding=rounding) <= 0)): self.next_work_order_id.state = 'ready' @api.multi def button_start(self): self.ensure_one() # As button_start is automatically called in the new view if self.state in ('done', 'cancel'): return True # Need a loss in case of the real time exceeding the expected timeline = self.env['mrp.workcenter.productivity'] if self.duration < self.duration_expected: loss_id = self.env['mrp.workcenter.productivity.loss'].search([('loss_type','=','productive')], limit=1) if not len(loss_id): raise UserError(_("You need to define at least one productivity loss in the category 'Productivity'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses.")) else: loss_id = self.env['mrp.workcenter.productivity.loss'].search([('loss_type','=','performance')], limit=1) if not len(loss_id): raise UserError(_("You need to define at least one productivity loss in the category 'Performance'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses.")) for workorder in self: if workorder.production_id.state != 'progress': workorder.production_id.write({ 'state': 'progress', 'date_start': datetime.now(), }) timeline.create({ 'workorder_id': workorder.id, 'workcenter_id': workorder.workcenter_id.id, 'description': _('Time Tracking: ')+self.env.user.name, 'loss_id': loss_id[0].id, 'date_start': datetime.now(), 'user_id': self.env.user.id }) return self.write({'state': 'progress', 'date_start': datetime.now(), }) @api.multi def button_finish(self): self.ensure_one() self.end_all() return self.write({'state': 'done', 'date_finished': fields.Datetime.now()}) @api.multi def end_previous(self, doall=False): """ @param: doall: This will close all open time lines on the open work orders when doall = True, otherwise only the one of the current user """ # TDE CLEANME timeline_obj = self.env['mrp.workcenter.productivity'] domain = [('workorder_id', 'in', self.ids), ('date_end', '=', False)] if not doall: domain.append(('user_id', '=', self.env.user.id)) not_productive_timelines = timeline_obj.browse() for timeline in timeline_obj.search(domain, limit=None if doall else 1): wo = timeline.workorder_id if wo.duration_expected <= wo.duration: if timeline.loss_type == 'productive': not_productive_timelines += timeline timeline.write({'date_end': fields.Datetime.now()}) else: maxdate = fields.Datetime.from_string(timeline.date_start) + relativedelta(minutes=wo.duration_expected - wo.duration) enddate = datetime.now() if maxdate > enddate: timeline.write({'date_end': enddate}) else: timeline.write({'date_end': maxdate}) not_productive_timelines += timeline.copy({'date_start': maxdate, 'date_end': enddate}) if not_productive_timelines: loss_id = self.env['mrp.workcenter.productivity.loss'].search([('loss_type', '=', 'performance')], limit=1) if not len(loss_id): raise UserError(_("You need to define at least one unactive productivity loss in the category 'Performance'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses.")) not_productive_timelines.write({'loss_id': loss_id.id}) return True @api.multi def end_all(self): return self.end_previous(doall=True) @api.multi def button_pending(self): self.end_previous() return True @api.multi def button_unblock(self): for order in self: order.workcenter_id.unblock() return True @api.multi def action_cancel(self): return self.write({'state': 'cancel'}) @api.multi def button_done(self): if any([x.state in ('done', 'cancel') for x in self]): raise UserError(_('A Manufacturing Order is already done or cancelled.')) self.end_all() return self.write({'state': 'done', 'date_finished': datetime.now()}) @api.multi def button_scrap(self): self.ensure_one() return { 'name': _('Scrap'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'stock.scrap', 'view_id': self.env.ref('stock.stock_scrap_form_view2').id, 'type': 'ir.actions.act_window', 'context': {'default_workorder_id': self.id, 'default_production_id': self.production_id.id, 'product_ids': (self.production_id.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')) | self.production_id.move_finished_ids.filtered(lambda x: x.state == 'done')).mapped('product_id').ids}, # 'context': {'product_ids': self.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')).mapped('product_id').ids + [self.production_id.product_id.id]}, 'target': 'new', } @api.multi def action_see_move_scrap(self): self.ensure_one() action = self.env.ref('stock.action_stock_scrap').read()[0] action['domain'] = [('workorder_id', '=', self.id)] return action @api.multi @api.depends('qty_production', 'qty_produced') def _compute_qty_remaining(self): for wo in self: wo.qty_remaining = float_round(wo.qty_production - wo.qty_produced, precision_rounding=wo.production_id.product_uom_id.rounding)
class MailMailStats(models.Model): """ MailMailStats models the statistics collected about emails. Those statistics are stored in a separated model and table to avoid bloating the mail_mail table with statistics values. This also allows to delete emails send with mass mailing without loosing the statistics about them. """ _name = 'mail.mail.statistics' _description = 'Email Statistics' _rec_name = 'message_id' _order = 'message_id' mail_mail_id = fields.Many2one('mail.mail', string='Mail', index=True) mail_mail_id_int = fields.Integer( string='Mail ID (tech)', help='ID of the related mail_mail. This field is an integer field because ' 'the related mail_mail can be deleted separately from its statistics. ' 'However the ID is needed for several action and controllers.', index=True, ) message_id = fields.Char(string='Message-ID') model = fields.Char(string='Document model') res_id = fields.Integer(string='Document ID') # campaign / wave data mass_mailing_id = fields.Many2one('mail.mass_mailing', string='Mass Mailing', index=True) mass_mailing_campaign_id = fields.Many2one( related='mass_mailing_id.mass_mailing_campaign_id', string='Mass Mailing Campaign', store=True, readonly=True, index=True) # Bounce and tracking ignored = fields.Datetime(help='Date when the email has been invalidated. ' 'Invalid emails are blacklisted, opted-out or invalid email format') scheduled = fields.Datetime(help='Date when the email has been created', default=fields.Datetime.now) sent = fields.Datetime(help='Date when the email has been sent') exception = fields.Datetime(help='Date of technical error leading to the email not being sent') opened = fields.Datetime(help='Date when the email has been opened the first time') replied = fields.Datetime(help='Date when this email has been replied for the first time.') bounced = fields.Datetime(help='Date when this email has bounced.') # Link tracking links_click_ids = fields.One2many('link.tracker.click', 'mail_stat_id', string='Links click') clicked = fields.Datetime(help='Date when customer clicked on at least one tracked link') # Status state = fields.Selection(compute="_compute_state", selection=[('outgoing', 'Outgoing'), ('exception', 'Exception'), ('sent', 'Sent'), ('opened', 'Opened'), ('replied', 'Replied'), ('bounced', 'Bounced'), ('ignored', 'Ignored')], store=True) state_update = fields.Datetime(compute="_compute_state", string='State Update', help='Last state update of the mail', store=True) email = fields.Char(string="Recipient email address") @api.depends('sent', 'opened', 'clicked', 'replied', 'bounced', 'exception', 'ignored') def _compute_state(self): self.update({'state_update': fields.Datetime.now()}) for stat in self: if stat.ignored: stat.state = 'ignored' elif stat.exception: stat.state = 'exception' elif stat.replied: stat.state = 'replied' elif stat.opened or stat.clicked: stat.state = 'opened' elif stat.bounced: stat.state = 'bounced' elif stat.sent: stat.state = 'sent' else: stat.state = 'outgoing' @api.model def create(self, values): if 'mail_mail_id' in values: values['mail_mail_id_int'] = values['mail_mail_id'] res = super(MailMailStats, self).create(values) return res def _get_records(self, mail_mail_ids=None, mail_message_ids=None, domain=None): if not self.ids and mail_mail_ids: base_domain = [('mail_mail_id_int', 'in', mail_mail_ids)] elif not self.ids and mail_message_ids: base_domain = [('message_id', 'in', mail_message_ids)] else: base_domain = [('id', 'in', self.ids)] if domain: base_domain = ['&'] + domain + base_domain return self.search(base_domain) def set_opened(self, mail_mail_ids=None, mail_message_ids=None): statistics = self._get_records(mail_mail_ids, mail_message_ids, [('opened', '=', False)]) statistics.write({'opened': fields.Datetime.now(), 'bounced': False}) return statistics def set_clicked(self, mail_mail_ids=None, mail_message_ids=None): statistics = self._get_records(mail_mail_ids, mail_message_ids, [('clicked', '=', False)]) statistics.write({'clicked': fields.Datetime.now()}) return statistics def set_replied(self, mail_mail_ids=None, mail_message_ids=None): statistics = self._get_records(mail_mail_ids, mail_message_ids, [('replied', '=', False)]) statistics.write({'replied': fields.Datetime.now()}) return statistics def set_bounced(self, mail_mail_ids=None, mail_message_ids=None): statistics = self._get_records( mail_mail_ids, mail_message_ids, [('bounced', '=', False), ('opened', '=', False)]) statistics.write({'bounced': fields.Datetime.now()}) return statistics