class FleetVehicleLogFuel(models.Model): _name = 'fleet.vehicle.log.fuel' _description = 'Fuel log for vehicles' _inherits = {'fleet.vehicle.cost': 'cost_id'} @api.model def default_get(self, default_fields): res = super(FleetVehicleLogFuel, self).default_get(default_fields) service = self.env.ref('fleet.type_service_refueling', raise_if_not_found=False) res.update({ 'date': fields.Date.context_today(self), 'cost_subtype_id': service and service.id or False, 'cost_type': 'fuel' }) return res liter = fields.Float() price_per_liter = fields.Float() purchaser_id = fields.Many2one( 'res.partner', 'Purchaser', domain="['|',('customer','=',True),('employee','=',True)]") inv_ref = fields.Char('Invoice Reference', size=64) vendor_id = fields.Many2one('res.partner', 'Vendor', domain="[('supplier','=',True)]") notes = fields.Text() cost_id = fields.Many2one('fleet.vehicle.cost', 'Cost', required=True, ondelete='cascade') # we need to keep this field as a related with store=True because the graph view doesn't support # (1) to address fields from inherited table # (2) fields that aren't stored in database cost_amount = fields.Float(related='cost_id.amount', string='Amount', store=True) @api.onchange('vehicle_id') def _onchange_vehicle(self): if self.vehicle_id: self.odometer_unit = self.vehicle_id.odometer_unit self.purchaser_id = self.vehicle_id.driver_id.id @api.onchange('liter', 'price_per_liter', 'amount') def _onchange_liter_price_amount(self): # need to cast in float because the value receveid from web client maybe an integer (Javascript and JSON do not # make any difference between 3.0 and 3). This cause a problem if you encode, for example, 2 liters at 1.5 per # liter => total is computed as 3.0, then trigger an onchange that recomputes price_per_liter as 3/2=1 (instead # of 3.0/2=1.5) # If there is no change in the result, we return an empty dict to prevent an infinite loop due to the 3 intertwine # onchange. And in order to verify that there is no change in the result, we have to limit the precision of the # computation to 2 decimal liter = float(self.liter) price_per_liter = float(self.price_per_liter) amount = float(self.amount) if liter > 0 and price_per_liter > 0 and round(liter * price_per_liter, 2) != amount: self.amount = round(liter * price_per_liter, 2) elif amount > 0 and liter > 0 and round(amount / liter, 2) != price_per_liter: self.price_per_liter = round(amount / liter, 2) elif amount > 0 and price_per_liter > 0 and round( amount / price_per_liter, 2) != liter: self.liter = round(amount / price_per_liter, 2)
class StockCycleCount(models.Model): _name = 'stock.cycle.count' _description = "Stock Cycle Counts" _inherit = 'mail.thread' _order = "id desc" @api.model def _default_company(self): company_id = self.env['res.company']._company_default_get(self._name) return company_id name = fields.Char(string='Name', readonly=True) location_id = fields.Many2one( comodel_name='stock.location', string='Location', required=True, readonly=True, states={'draft': [('readonly', False)]}, ) responsible_id = fields.Many2one( comodel_name='res.users', string='Assigned to', readonly=True, states={'draft': [('readonly', False)]}, track_visibility='onchange', ) date_deadline = fields.Date( string='Required Date', readonly=True, states={'draft': [('readonly', False)]}, track_visibility='onchange', ) cycle_count_rule_id = fields.Many2one( comodel_name='stock.cycle.count.rule', string='Cycle count rule', required=True, readonly=True, states={'draft': [('readonly', False)]}, track_visibility='onchange', ) state = fields.Selection( selection=[('draft', 'Planned'), ('open', 'Execution'), ('cancelled', 'Cancelled'), ('done', 'Done')], string='State', default='draft', track_visibility='onchange', ) stock_adjustment_ids = fields.One2many( comodel_name='stock.inventory', inverse_name='cycle_count_id', string='Inventory Adjustment', track_visibility='onchange', ) inventory_adj_count = fields.Integer( compute='_compute_inventory_adj_count', ) company_id = fields.Many2one( comodel_name='res.company', string='Company', required=True, default=_default_company, readonly=True, ) @api.depends('stock_adjustment_ids') @api.multi def _compute_inventory_adj_count(self): for rec in self: rec.inventory_adj_count = len(rec.stock_adjustment_ids) @api.multi def do_cancel(self): self.write({'state': 'cancelled'}) @api.multi def _prepare_inventory_adjustment(self): self.ensure_one() return { 'name': 'INV/{}'.format(self.name), 'cycle_count_id': self.id, 'location_id': self.location_id.id, 'exclude_sublocation': True } @api.model def create(self, vals): vals['name'] = self.env['ir.sequence'].next_by_code( 'stock.cycle.count') or '' return super(StockCycleCount, self).create(vals) @api.multi def action_create_inventory_adjustment(self): if any([s != 'draft' for s in self.mapped('state')]): raise UserError( _("You can only confirm cycle counts in state 'Planned'.")) for rec in self: data = rec._prepare_inventory_adjustment() self.env['stock.inventory'].create(data) self.write({'state': 'open'}) return True @api.multi def action_view_inventory(self): action = self.env.ref('stock.action_inventory_form') result = action.read()[0] result['context'] = {} adjustment_ids = self.mapped('stock_adjustment_ids').ids if len(adjustment_ids) > 1: result['domain'] = [('id', 'in', adjustment_ids)] elif len(adjustment_ids) == 1: res = self.env.ref('stock.view_inventory_form', False) result['views'] = [(res and res.id or False, 'form')] result['res_id'] = adjustment_ids and adjustment_ids[0] or False return result
class PlmDescription(models.Model): _name = "plm.description" _description = "PLM Descriptions" name = fields.Char(_('Note to Description'), size=128) description = fields.Char(_('Standard Description'), default='', size=128) description_en = fields.Char(_('Description English'), size=128) umc1 = fields.Char( _('UM / Feature 1'), size=32, help=_("Allow to specify a unit measure or a label for the feature.")) fmt1 = fields.Char( _('Format Feature 1'), size=32, default='', help= _("Allow to represent the measure: %s%s allow to build um and value, %s builds only value, none builds only value." )) umc2 = fields.Char( _('UM / Feature 2'), size=32, help=_("Allow to specify a unit measure or a label for the feature.")) fmt2 = fields.Char( _('Format Feature 2'), size=32, default='', help= _("Allow to represent the measure: %s%s allow to build um and value, %s builds only value, none builds only value." )) umc3 = fields.Char( _('UM / Feature 3'), size=32, help=_("Allow to specify a unit measure or a label for the feature.")) fmt3 = fields.Char( _('Format Feature 3'), size=32, default='', help= _("Allow to represent the measure: %s%s allow to build um and value, %s builds only value, none builds only value." )) fmtend = fields.Char( _('Format Feature Composed'), size=32, default='', help= _("Allow to represent a normalized composition of technical features : %s%s allows to build chained values." )) unitab = fields.Char( _('Normative Rule'), size=32, default='', help= _("Specify normative rule (UNI, ISO, DIN...). It will be queued to build the product description." )) sequence = fields.Integer( _('Sequence'), help= _("Assign the sequence order when displaying a list of product categories." ))
class AccountMove(models.Model): _inherit = "account.move" # TO DO in master : refactor hashing algo to go into a mixin l10n_fr_secure_sequence_number = fields.Integer( string="Inalteralbility No Gap Sequence #", readonly=True, copy=False) l10n_fr_hash = fields.Char(string="Inalterability Hash", readonly=True, copy=False) l10n_fr_string_to_hash = fields.Char(compute='_compute_string_to_hash', readonly=True, store=False) def _get_new_hash(self, secure_seq_number): """ Returns the hash to write on journal entries when they get posted""" self.ensure_one() #get the only one exact previous move in the securisation sequence prev_move = self.search([('state', '=', 'posted'), ('company_id', '=', self.company_id.id), ('l10n_fr_secure_sequence_number', '!=', 0), ('l10n_fr_secure_sequence_number', '=', int(secure_seq_number) - 1)]) if prev_move and len(prev_move) != 1: raise UserError( _('An error occured when computing the inalterability. Impossible to get the unique previous posted journal entry.' )) #build and return the hash return self._compute_hash(prev_move.l10n_fr_hash if prev_move else u'') def _compute_hash(self, previous_hash): """ Computes the hash of the browse_record given as self, based on the hash of the previous record in the company's securisation sequence given as parameter""" self.ensure_one() hash_string = sha256( (previous_hash + self.l10n_fr_string_to_hash).encode('utf-8')) return hash_string.hexdigest() def _compute_string_to_hash(self): def _getattrstring(obj, field_str): field_value = obj[field_str] if obj._fields[field_str].type == 'many2one': field_value = field_value.id return str(field_value) for move in self: values = {} for field in MOVE_FIELDS: values[field] = _getattrstring(move, field) for line in move.line_ids: for field in LINE_FIELDS: k = 'line_%d_%s' % (line.id, field) values[k] = _getattrstring(line, field) #make the json serialization canonical # (https://tools.ietf.org/html/draft-staykov-hu-json-canonical-form-00) move.l10n_fr_string_to_hash = dumps(values, sort_keys=True, ensure_ascii=True, indent=None, separators=(',', ':')) @api.multi def write(self, vals): has_been_posted = False for move in self: if move.company_id._is_accounting_unalterable(): # write the hash and the secure_sequence_number when posting an account.move if vals.get('state') == 'posted': has_been_posted = True # restrict the operation in case we are trying to write a forbidden field if (move.state == "posted" and set(vals).intersection(MOVE_FIELDS)): raise UserError( _("According to the French law, you cannot modify a journal entry in order for its posted data to be updated or deleted. Unauthorized field: %s." ) % ', '.join(MOVE_FIELDS)) # restrict the operation in case we are trying to overwrite existing hash if (move.l10n_fr_hash and 'l10n_fr_hash' in vals) or ( move.l10n_fr_secure_sequence_number and 'l10n_fr_secure_sequence_number' in vals): raise UserError( _('You cannot overwrite the values ensuring the inalterability of the accounting.' )) res = super(AccountMove, self).write(vals) # write the hash and the secure_sequence_number when posting an account.move if has_been_posted: for move in self.filtered( lambda m: m.company_id._is_accounting_unalterable() and not (m.l10n_fr_secure_sequence_number or m.l10n_fr_hash)): new_number = move.company_id.l10n_fr_secure_sequence_id.next_by_id( ) vals_hashing = { 'l10n_fr_secure_sequence_number': new_number, 'l10n_fr_hash': move._get_new_hash(new_number) } res |= super(AccountMove, move).write(vals_hashing) return res @api.multi def button_cancel(self): #by-pass the normal behavior/message that tells people can cancel a posted journal entry #if the journal allows it. if self.company_id._is_accounting_unalterable(): raise UserError( _('You cannot modify a posted journal entry. This ensures its inalterability.' )) super(AccountMove, self).button_cancel() @api.model def _check_hash_integrity(self, company_id): """Checks that all posted moves have still the same data as when they were posted and raises an error with the result. """ def build_move_info(move): entry_reference = _('(ref.: %s)') move_reference_string = move.ref and entry_reference % move.ref or '' return [move.name, move_reference_string] moves = self.search([('state', '=', 'posted'), ('company_id', '=', company_id), ('l10n_fr_secure_sequence_number', '!=', 0)], order="l10n_fr_secure_sequence_number ASC") if not moves: raise UserError( _('There isn\'t any journal entry flagged for data inalterability yet for the company %s. This mechanism only runs for journal entries generated after the installation of the module France - Certification CGI 286 I-3 bis.' ) % self.env.user.company_id.name) previous_hash = u'' start_move_info = [] for move in moves: if move.l10n_fr_hash != move._compute_hash( previous_hash=previous_hash): raise UserError( _('Corrupted data on journal entry with id %s.') % move.id) if not previous_hash: #save the date and sequence number of the first move hashed start_move_info = build_move_info(move) previous_hash = move.l10n_fr_hash end_move_info = build_move_info(move) report_dict = { 'start_move_name': start_move_info[0], 'start_move_ref': start_move_info[1], 'end_move_name': end_move_info[0], 'end_move_ref': end_move_info[1] } # Raise on success raise UserError( _('''Successful test ! The journal entries are guaranteed to be in their original and inalterable state From: %(start_move_name)s %(start_move_ref)s To: %(end_move_name)s %(end_move_ref)s For this report to be legally meaningful, please download your certification from your customer account on Flectrahq.com (Only for Flectra Enterprise users).''' ) % report_dict)
class MailComposeMessage(models.TransientModel): """Add concept of mass mailing campaign to the mail.compose.message wizard """ _inherit = 'mail.compose.message' mass_mailing_campaign_id = fields.Many2one('mail.mass_mailing.campaign', string='Mass Mailing Campaign') mass_mailing_id = fields.Many2one('mail.mass_mailing', string='Mass Mailing', ondelete='cascade') mass_mailing_name = fields.Char(string='Mass Mailing') mailing_list_ids = fields.Many2many('mail.mass_mailing.list', string='Mailing List') @api.multi def get_mail_values(self, res_ids): """ Override method that generated the mail content by creating the mail.mail.statistics values in the o2m of mail_mail, when doing pure email mass mailing. """ self.ensure_one() res = super(MailComposeMessage, self).get_mail_values(res_ids) # use only for allowed models in mass mailing if self.composition_mode == 'mass_mail' and \ (self.mass_mailing_name or self.mass_mailing_id) and \ self.env['ir.model'].sudo().search([('model', '=', self.model), ('is_mail_thread', '=', True)], limit=1): mass_mailing = self.mass_mailing_id if not mass_mailing: reply_to_mode = 'email' if self.no_auto_thread else 'thread' reply_to = self.reply_to if self.no_auto_thread else False mass_mailing = self.env['mail.mass_mailing'].create({ 'mass_mailing_campaign_id': self.mass_mailing_campaign_id.id, 'name': self.mass_mailing_name, 'template_id': self.template_id.id, 'state': 'done', 'reply_to_mode': reply_to_mode, 'reply_to': reply_to, 'sent_date': fields.Datetime.now(), 'body_html': self.body, 'mailing_model_id': self.env['ir.model']._get(self.model).id, 'mailing_domain': self.active_domain, }) # Preprocess res.partners to batch-fetch from db # if recipient_ids is present, it means they are partners # (the only object to fill get_default_recipient this way) recipient_partners_ids = [] read_partners = {} for res_id in res_ids: mail_values = res[res_id] if mail_values.get('recipient_ids'): # recipient_ids is a list of x2m command tuples at this point recipient_partners_ids.append( mail_values.get('recipient_ids')[0][1]) read_partners = self.env['res.partner'].browse( recipient_partners_ids) partners_email = {p.id: p.email for p in read_partners} blacklist = self._context.get('mass_mailing_blacklist') seen_list = self._context.get('mass_mailing_seen_list') for res_id in res_ids: mail_values = res[res_id] if mail_values.get('email_to'): recips = tools.email_split(mail_values['email_to']) else: recips = tools.email_split(partners_email.get(res_id)) mail_to = recips[0].lower() if recips else False if (blacklist and mail_to in blacklist) or (seen_list and mail_to in seen_list): # prevent sending to blocked addresses that were included by mistake mail_values['state'] = 'cancel' elif seen_list is not None: seen_list.add(mail_to) stat_vals = { 'model': self.model, 'res_id': res_id, 'mass_mailing_id': mass_mailing.id } # propagate exception state to stat when still-born if mail_values.get('state') == 'cancel': stat_vals['exception'] = fields.Datetime.now() mail_values.update({ 'mailing_id': mass_mailing.id, 'statistics_ids': [(0, 0, stat_vals)], # email-mode: keep original message for routing 'notification': mass_mailing.reply_to_mode == 'thread', 'auto_delete': not mass_mailing.keep_archives, }) return res
class EventEvent(models.Model): """Event""" _name = 'event.event' _description = 'Event' _inherit = ['mail.thread'] _order = 'date_begin' name = fields.Char(string='Event Name', translate=True, required=True, readonly=False, states={'done': [('readonly', True)]}) active = fields.Boolean(default=True) user_id = fields.Many2one('res.users', string='Responsible', default=lambda self: self.env.user, track_visibility="onchange", readonly=False, states={'done': [('readonly', True)]}) company_id = fields.Many2one('res.company', string='Company', change_default=True, default=lambda self: self.env['res.company']. _company_default_get('event.event'), required=False, readonly=False, states={'done': [('readonly', True)]}) organizer_id = fields.Many2one( 'res.partner', string='Organizer', track_visibility="onchange", default=lambda self: self.env.user.company_id.partner_id) event_type_id = fields.Many2one('event.type', string='Category', readonly=False, states={'done': [('readonly', True)]}, oldname='type') color = fields.Integer('Kanban Color Index') event_mail_ids = fields.One2many('event.mail', 'event_id', string='Mail Schedule', copy=True) # Seats and computation seats_max = fields.Integer( string='Maximum Attendees Number', oldname='register_max', readonly=True, states={ 'draft': [('readonly', False)], 'confirm': [('readonly', False)] }, help= "For each event you can define a maximum registration of seats(number of attendees), above this numbers the registrations are not accepted." ) seats_availability = fields.Selection([('limited', 'Limited'), ('unlimited', 'Unlimited')], 'Maximum Attendees', required=True, default='unlimited') seats_min = fields.Integer( string='Minimum Attendees', oldname='register_min', help= "For each event you can define a minimum reserved seats (number of attendees), if it does not reach the mentioned registrations the event can not be confirmed (keep 0 to ignore this rule)" ) seats_reserved = fields.Integer(oldname='register_current', string='Reserved Seats', store=True, readonly=True, compute='_compute_seats') seats_available = fields.Integer(oldname='register_avail', string='Available Seats', store=True, readonly=True, compute='_compute_seats') seats_unconfirmed = fields.Integer(oldname='register_prospect', string='Unconfirmed Seat Reservations', store=True, readonly=True, compute='_compute_seats') seats_used = fields.Integer(oldname='register_attended', string='Number of Participants', store=True, readonly=True, compute='_compute_seats') seats_expected = fields.Integer(string='Number of Expected Attendees', readonly=True, compute='_compute_seats') # Registration fields registration_ids = fields.One2many('event.registration', 'event_id', string='Attendees', readonly=False, states={'done': [('readonly', True)]}) # Date fields date_tz = fields.Selection('_tz_get', string='Timezone', required=True, default=lambda self: self.env.user.tz or 'UTC') date_begin = fields.Datetime(string='Start Date', required=True, track_visibility='onchange', states={'done': [('readonly', True)]}) date_end = fields.Datetime(string='End Date', required=True, track_visibility='onchange', states={'done': [('readonly', True)]}) date_begin_located = fields.Char(string='Start Date Located', compute='_compute_date_begin_tz') date_end_located = fields.Char(string='End Date Located', compute='_compute_date_end_tz') state = fields.Selection( [('draft', 'Unconfirmed'), ('cancel', 'Cancelled'), ('confirm', 'Confirmed'), ('done', 'Done')], string='Status', default='draft', readonly=True, required=True, copy=False, help= "If event is created, the status is 'Draft'. If event is confirmed for the particular dates the status is set to 'Confirmed'. If the event is over, the status is set to 'Done'. If event is cancelled the status is set to 'Cancelled'." ) auto_confirm = fields.Boolean(string='Autoconfirm Registrations') is_online = fields.Boolean('Online Event') address_id = fields.Many2one( 'res.partner', string='Location', default=lambda self: self.env.user.company_id.partner_id, readonly=False, states={'done': [('readonly', True)]}, track_visibility="onchange") country_id = fields.Many2one('res.country', 'Country', related='address_id.country_id', store=True) twitter_hashtag = fields.Char('Twitter Hashtag') description = fields.Html(string='Description', oldname='note', translate=html_translate, sanitize_attributes=False, readonly=False, states={'done': [('readonly', True)]}) # badge fields badge_front = fields.Html(string='Badge Front') badge_back = fields.Html(string='Badge Back') badge_innerleft = fields.Html(string='Badge Inner Left') badge_innerright = fields.Html(string='Badge Inner Right') event_logo = fields.Html(string='Event Logo') @api.multi @api.depends('seats_max', 'registration_ids.state') def _compute_seats(self): """ Determine reserved, available, reserved but unconfirmed and used seats. """ # initialize fields to 0 for event in self: event.seats_unconfirmed = event.seats_reserved = event.seats_used = event.seats_available = 0 # aggregate registrations by event and by state if self.ids: state_field = { 'draft': 'seats_unconfirmed', 'open': 'seats_reserved', 'done': 'seats_used', } query = """ SELECT event_id, state, count(event_id) FROM event_registration WHERE event_id IN %s AND state IN ('draft', 'open', 'done') GROUP BY event_id, state """ self._cr.execute(query, (tuple(self.ids), )) for event_id, state, num in self._cr.fetchall(): event = self.browse(event_id) event[state_field[state]] += num # compute seats_available for event in self: if event.seats_max > 0: event.seats_available = event.seats_max - ( event.seats_reserved + event.seats_used) event.seats_expected = event.seats_unconfirmed + event.seats_reserved + event.seats_used @api.model def _tz_get(self): return [(x, x) for x in pytz.all_timezones] @api.one @api.depends('date_tz', 'date_begin') def _compute_date_begin_tz(self): if self.date_begin: self.date_begin_located = format_tz( self.with_context(use_babel=True).env, self.date_begin, tz=self.date_tz) else: self.date_begin_located = False @api.one @api.depends('date_tz', 'date_end') def _compute_date_end_tz(self): if self.date_end: self.date_end_located = format_tz( self.with_context(use_babel=True).env, self.date_end, tz=self.date_tz) else: self.date_end_located = False @api.onchange('event_type_id') def _onchange_type(self): if self.event_type_id: self.seats_min = self.event_type_id.default_registration_min self.seats_max = self.event_type_id.default_registration_max if self.event_type_id.default_registration_max: self.seats_availability = 'limited' if self.event_type_id.auto_confirm: self.auto_confirm = self.event_type_id.auto_confirm if self.event_type_id.use_hashtag: self.twitter_hashtag = self.event_type_id.default_hashtag if self.event_type_id.use_timezone: self.date_tz = self.event_type_id.default_timezone self.is_online = self.event_type_id.is_online if self.event_type_id.event_type_mail_ids: self.event_mail_ids = [ (5, 0, 0) ] + [{ 'template_id': line.template_id, 'interval_nbr': line.interval_nbr, 'interval_unit': line.interval_unit, 'interval_type': line.interval_type } for line in self.event_type_id.event_type_mail_ids] @api.constrains('seats_min', 'seats_max', 'seats_availability') def _check_seats_min_max(self): if any(event.seats_availability == 'limited' and event.seats_min > event.seats_max for event in self): raise ValidationError( _('Maximum attendees number should be greater than minimum attendees number.' )) @api.constrains('seats_max', 'seats_available') def _check_seats_limit(self): if any(event.seats_availability == 'limited' and event.seats_max and event.seats_available < 0 for event in self): raise ValidationError(_('No more available seats.')) @api.one @api.constrains('date_begin', 'date_end') def _check_closing_date(self): if self.date_end < self.date_begin: raise ValidationError( _('Closing Date cannot be set before Beginning Date.')) @api.multi @api.depends('name', 'date_begin', 'date_end') def name_get(self): result = [] for event in self: date_begin = fields.Datetime.from_string(event.date_begin) date_end = fields.Datetime.from_string(event.date_end) dates = [ fields.Date.to_string( fields.Datetime.context_timestamp(event, dt)) for dt in [date_begin, date_end] if dt ] dates = sorted(set(dates)) result.append( (event.id, '%s (%s)' % (event.name, ' - '.join(dates)))) return result @api.model def create(self, vals): res = super(EventEvent, self).create(vals) if res.organizer_id: res.message_subscribe([res.organizer_id.id]) if res.auto_confirm: res.button_confirm() return res @api.multi def write(self, vals): res = super(EventEvent, self).write(vals) if vals.get('organizer_id'): self.message_subscribe([vals['organizer_id']]) return res @api.multi def copy(self, default=None): self.ensure_one() default = dict(default or {}, name=_("%s (copy)") % (self.name)) return super(EventEvent, self).copy(default) @api.one def button_draft(self): self.state = 'draft' @api.multi def button_cancel(self): if any('done' in event.mapped('registration_ids.state') for event in self): raise UserError( _("There are already attendees who attended this event. Please reset it to draft if you want to cancel this event." )) self.registration_ids.write({'state': 'cancel'}) self.state = 'cancel' @api.one def button_done(self): self.state = 'done' @api.one def button_confirm(self): self.state = 'confirm' @api.one def mail_attendees(self, template_id, force_send=False, filter_func=lambda self: self.state != 'cancel'): for attendee in self.registration_ids.filtered(filter_func): self.env['mail.template'].browse(template_id).send_mail( attendee.id, force_send=force_send) @api.multi def _is_event_registrable(self): return True
class AccountFrFec(models.TransientModel): _name = 'account.fr.fec' _description = 'Ficher Echange Informatise' date_from = fields.Date(string='Start Date', required=True) date_to = fields.Date(string='End Date', required=True) fec_data = fields.Binary('FEC File', readonly=True) filename = fields.Char(string='Filename', size=256, readonly=True) export_type = fields.Selection([ ('official', 'Official FEC report (posted entries only)'), ('nonofficial', 'Non-official FEC report (posted and unposted entries)'), ], string='Export Type', required=True, default='official') def do_query_unaffected_earnings(self): ''' Compute the sum of ending balances for all accounts that are of a type that does not bring forward the balance in new fiscal years. This is needed because we have to display only one line for the initial balance of all expense/revenue accounts in the FEC. ''' sql_query = ''' SELECT 'OUV' AS JournalCode, 'Balance initiale' AS JournalLib, 'OUVERTURE/' || %s AS EcritureNum, %s AS EcritureDate, '120/129' AS CompteNum, 'Benefice (perte) reporte(e)' AS CompteLib, '' AS CompAuxNum, '' AS CompAuxLib, '-' AS PieceRef, %s AS PieceDate, '/' AS EcritureLib, replace(CASE WHEN COALESCE(sum(aml.balance), 0) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '999999999999999D99') END, '.', ',') AS Debit, replace(CASE WHEN COALESCE(sum(aml.balance), 0) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '999999999999999D99') END, '.', ',') AS Credit, '' AS EcritureLet, '' AS DateLet, %s AS ValidDate, '' AS Montantdevise, '' AS Idevise FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id WHERE am.date < %s AND am.company_id = %s AND aat.include_initial_balance = 'f' AND (aml.debit != 0 OR aml.credit != 0) ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' company = self.env.user.company_id formatted_date_from = self.date_from.replace('-', '') date_from = datetime.strptime(self.date_from, DEFAULT_SERVER_DATE_FORMAT) formatted_date_year = date_from.year self._cr.execute( sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id)) listrow = [] row = self._cr.fetchone() listrow = list(row) return listrow @api.multi def generate_fec(self): self.ensure_one() # We choose to implement the flat file instead of the XML # file for 2 reasons : # 1) the XSD file impose to have the label on the account.move # but Flectra has the label on the account.move.line, so that's a # problem ! # 2) CSV files are easier to read/use for a regular accountant. # So it will be easier for the accountant to check the file before # sending it to the fiscal administration header = [ u'JournalCode', # 0 u'JournalLib', # 1 u'EcritureNum', # 2 u'EcritureDate', # 3 u'CompteNum', # 4 u'CompteLib', # 5 u'CompAuxNum', # 6 We use partner.id u'CompAuxLib', # 7 u'PieceRef', # 8 u'PieceDate', # 9 u'EcritureLib', # 10 u'Debit', # 11 u'Credit', # 12 u'EcritureLet', # 13 u'DateLet', # 14 u'ValidDate', # 15 u'Montantdevise', # 16 u'Idevise', # 17 ] company = self.env.user.company_id if not company.vat: raise Warning( _("Missing VAT number for company %s") % company.name) if company.vat[0:2] != 'FR': raise Warning(_("FEC is for French companies only !")) fecfile = io.BytesIO() w = pycompat.csv_writer(fecfile, delimiter='|') w.writerow(header) # INITIAL BALANCE unaffected_earnings_xml_ref = self.env.ref( 'account.data_unaffected_earnings') unaffected_earnings_line = True # used to make sure that we add the unaffected earning initial balance only once if unaffected_earnings_xml_ref: #compute the benefit/loss of last year to add in the initial balance of the current year earnings account unaffected_earnings_results = self.do_query_unaffected_earnings() unaffected_earnings_line = False sql_query = ''' SELECT 'OUV' AS JournalCode, 'Balance initiale' AS JournalLib, 'OUVERTURE/' || %s AS EcritureNum, %s AS EcritureDate, MIN(aa.code) AS CompteNum, replace(replace(MIN(aa.name), '|', '/'), '\t', '') AS CompteLib, '' AS CompAuxNum, '' AS CompAuxLib, '-' AS PieceRef, %s AS PieceDate, '/' AS EcritureLib, replace(CASE WHEN sum(aml.balance) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '999999999999999D99') END, '.', ',') AS Debit, replace(CASE WHEN sum(aml.balance) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '999999999999999D99') END, '.', ',') AS Credit, '' AS EcritureLet, '' AS DateLet, %s AS ValidDate, '' AS Montantdevise, '' AS Idevise, MIN(aa.id) AS CompteID FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id WHERE am.date < %s AND am.company_id = %s AND aat.include_initial_balance = 't' AND (aml.debit != 0 OR aml.credit != 0) ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' sql_query += ''' GROUP BY aml.account_id, aat.type HAVING sum(aml.balance) != 0 AND aat.type not in ('receivable', 'payable') ''' formatted_date_from = self.date_from.replace('-', '') date_from = datetime.strptime(self.date_from, DEFAULT_SERVER_DATE_FORMAT) formatted_date_year = date_from.year self._cr.execute( sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id)) for row in self._cr.fetchall(): listrow = list(row) account_id = listrow.pop() if not unaffected_earnings_line: account = self.env['account.account'].browse(account_id) if account.user_type_id.id == self.env.ref( 'account.data_unaffected_earnings').id: #add the benefit/loss of previous fiscal year to the first unaffected earnings account found. unaffected_earnings_line = True current_amount = float(listrow[11].replace( ',', '.')) - float(listrow[12].replace(',', '.')) unaffected_earnings_amount = float( unaffected_earnings_results[11].replace( ',', '.')) - float( unaffected_earnings_results[12].replace( ',', '.')) listrow_amount = current_amount + unaffected_earnings_amount if listrow_amount > 0: listrow[11] = str(listrow_amount).replace('.', ',') listrow[12] = '0,00' else: listrow[11] = '0,00' listrow[12] = str(-listrow_amount).replace('.', ',') w.writerow(listrow) #if the unaffected earnings account wasn't in the selection yet: add it manually if (not unaffected_earnings_line and unaffected_earnings_results and (unaffected_earnings_results[11] != '0,00' or unaffected_earnings_results[12] != '0,00')): #search an unaffected earnings account unaffected_earnings_account = self.env['account.account'].search( [('user_type_id', '=', self.env.ref('account.data_unaffected_earnings').id)], limit=1) if unaffected_earnings_account: unaffected_earnings_results[ 4] = unaffected_earnings_account.code unaffected_earnings_results[ 5] = unaffected_earnings_account.name w.writerow(unaffected_earnings_results) # INITIAL BALANCE - receivable/payable sql_query = ''' SELECT 'OUV' AS JournalCode, 'Balance initiale' AS JournalLib, 'OUVERTURE/' || %s AS EcritureNum, %s AS EcritureDate, MIN(aa.code) AS CompteNum, replace(MIN(aa.name), '|', '/') AS CompteLib, CASE WHEN rp.ref IS null OR rp.ref = '' THEN COALESCE('ID ' || rp.id, '') ELSE rp.ref END AS CompAuxNum, COALESCE(replace(rp.name, '|', '/'), '') AS CompAuxLib, '-' AS PieceRef, %s AS PieceDate, '/' AS EcritureLib, replace(CASE WHEN sum(aml.balance) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '999999999999999D99') END, '.', ',') AS Debit, replace(CASE WHEN sum(aml.balance) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '999999999999999D99') END, '.', ',') AS Credit, '' AS EcritureLet, '' AS DateLet, %s AS ValidDate, '' AS Montantdevise, '' AS Idevise, MIN(aa.id) AS CompteID FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id LEFT JOIN res_partner rp ON rp.id=aml.partner_id JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id WHERE am.date < %s AND am.company_id = %s AND aat.include_initial_balance = 't' AND (aml.debit != 0 OR aml.credit != 0) ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' sql_query += ''' GROUP BY aml.account_id, aat.type, rp.ref, rp.id HAVING sum(aml.balance) != 0 AND aat.type in ('receivable', 'payable') ''' self._cr.execute( sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id)) for row in self._cr.fetchall(): listrow = list(row) account_id = listrow.pop() w.writerow([s.encode("utf-8") for s in listrow]) # LINES sql_query = ''' SELECT replace(replace(aj.code, '|', '/'), '\t', '') AS JournalCode, replace(replace(aj.name, '|', '/'), '\t', '') AS JournalLib, replace(replace(am.name, '|', '/'), '\t', '') AS EcritureNum, TO_CHAR(am.date, 'YYYYMMDD') AS EcritureDate, aa.code AS CompteNum, replace(replace(aa.name, '|', '/'), '\t', '') AS CompteLib, CASE WHEN rp.ref IS null OR rp.ref = '' THEN COALESCE('ID ' || rp.id, '') ELSE rp.ref END AS CompAuxNum, COALESCE(replace(replace(rp.name, '|', '/'), '\t', ''), '') AS CompAuxLib, CASE WHEN am.ref IS null OR am.ref = '' THEN '-' ELSE replace(replace(am.ref, '|', '/'), '\t', '') END AS PieceRef, TO_CHAR(am.date, 'YYYYMMDD') AS PieceDate, CASE WHEN aml.name IS NULL THEN '/' ELSE replace(replace(aml.name, '|', '/'), '\t', '') END AS EcritureLib, replace(CASE WHEN aml.debit = 0 THEN '0,00' ELSE to_char(aml.debit, '999999999999999D99') END, '.', ',') AS Debit, replace(CASE WHEN aml.credit = 0 THEN '0,00' ELSE to_char(aml.credit, '999999999999999D99') END, '.', ',') AS Credit, CASE WHEN rec.name IS NULL THEN '' ELSE rec.name END AS EcritureLet, CASE WHEN aml.full_reconcile_id IS NULL THEN '' ELSE TO_CHAR(rec.create_date, 'YYYYMMDD') END AS DateLet, TO_CHAR(am.date, 'YYYYMMDD') AS ValidDate, CASE WHEN aml.amount_currency IS NULL OR aml.amount_currency = 0 THEN '' ELSE replace(to_char(aml.amount_currency, '999999999999999D99'), '.', ',') END AS Montantdevise, CASE WHEN aml.currency_id IS NULL THEN '' ELSE rc.name END AS Idevise FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id LEFT JOIN res_partner rp ON rp.id=aml.partner_id JOIN account_journal aj ON aj.id = am.journal_id JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN res_currency rc ON rc.id = aml.currency_id LEFT JOIN account_full_reconcile rec ON rec.id = aml.full_reconcile_id WHERE am.date >= %s AND am.date <= %s AND am.company_id = %s AND (aml.debit != 0 OR aml.credit != 0) ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' sql_query += ''' ORDER BY am.date, am.name, aml.id ''' self._cr.execute(sql_query, (self.date_from, self.date_to, company.id)) for row in self._cr.fetchall(): w.writerow(list(row)) siren = company.vat[4:13] end_date = self.date_to.replace('-', '') suffix = '' if self.export_type == "nonofficial": suffix = '-NONOFFICIAL' fecvalue = fecfile.getvalue() self.write({ 'fec_data': base64.encodestring(fecvalue), # Filename = <siren>FECYYYYMMDD where YYYMMDD is the closing date 'filename': '%sFEC%s%s.csv' % (siren, end_date, suffix), }) fecfile.close() action = { 'name': 'FEC', 'type': 'ir.actions.act_url', 'url': "web/content/?model=account.fr.fec&id=" + str(self.id) + "&filename_field=filename&field=fec_data&download=true&filename=" + self.filename, 'target': 'self', } return action
class Contract(models.Model): _name = 'hr.contract' _description = 'Contract' _inherit = ['mail.thread', 'mail.activity.mixin'] name = fields.Char('Contract Reference', required=True) active = fields.Boolean(default=True) structure_type_id = fields.Many2one('hr.payroll.structure.type', string="Salary Structure Type") employee_id = fields.Many2one('hr.employee', string='Employee', tracking=True, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]") department_id = fields.Many2one('hr.department', compute='_compute_employee_contract', store=True, readonly=False, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]", string="Department") job_id = fields.Many2one('hr.job', compute='_compute_employee_contract', store=True, readonly=False, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]", string='Job Position') date_start = fields.Date('Start Date', required=True, default=fields.Date.today, tracking=True, help="Start date of the contract.") date_end = fields.Date('End Date', tracking=True, help="End date of the contract (if it's a fixed-term contract).") trial_date_end = fields.Date('End of Trial Period', help="End date of the trial period (if there is one).") resource_calendar_id = fields.Many2one( 'resource.calendar', 'Working Schedule', compute='_compute_employee_contract', store=True, readonly=False, default=lambda self: self.env.company.resource_calendar_id.id, copy=False, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]") wage = fields.Monetary('Wage', required=True, tracking=True, help="Employee's monthly gross wage.") notes = fields.Text('Notes') state = fields.Selection([ ('draft', 'New'), ('open', 'Running'), ('close', 'Expired'), ('cancel', 'Cancelled') ], string='Status', group_expand='_expand_states', copy=False, tracking=True, help='Status of the contract', default='draft') company_id = fields.Many2one('res.company', compute='_compute_employee_contract', store=True, readonly=False, default=lambda self: self.env.company, required=True) company_country_id = fields.Many2one('res.country', string="Company country", related='company_id.country_id', readonly=True) """ kanban_state: * draft + green = "Incoming" state (will be set as Open once the contract has started) * open + red = "Pending" state (will be set as Closed once the contract has ended) * red = Shows a warning on the employees kanban view """ kanban_state = fields.Selection([ ('normal', 'Grey'), ('done', 'Green'), ('blocked', 'Red') ], string='Kanban State', default='normal', tracking=True, copy=False) currency_id = fields.Many2one(string="Currency", related='company_id.currency_id', readonly=True) permit_no = fields.Char('Work Permit No', related="employee_id.permit_no", readonly=False) visa_no = fields.Char('Visa No', related="employee_id.visa_no", readonly=False) visa_expire = fields.Date('Visa Expire Date', related="employee_id.visa_expire", readonly=False) hr_responsible_id = fields.Many2one('res.users', 'HR Responsible', tracking=True, help='Person responsible for validating the employee\'s contracts.') calendar_mismatch = fields.Boolean(compute='_compute_calendar_mismatch') first_contract_date = fields.Date(related='employee_id.first_contract_date') @api.depends('employee_id.resource_calendar_id', 'resource_calendar_id') def _compute_calendar_mismatch(self): for contract in self: contract.calendar_mismatch = contract.resource_calendar_id != contract.employee_id.resource_calendar_id def _expand_states(self, states, domain, order): return [key for key, val in type(self).state.selection] @api.depends('employee_id') def _compute_employee_contract(self): for contract in self.filtered('employee_id'): contract.job_id = contract.employee_id.job_id contract.department_id = contract.employee_id.department_id contract.resource_calendar_id = contract.employee_id.resource_calendar_id contract.company_id = contract.employee_id.company_id @api.onchange('company_id') def _onchange_company_id(self): if self.company_id: structure_types = self.env['hr.payroll.structure.type'].search([ '|', ('country_id', '=', self.company_id.country_id.id), ('country_id', '=', False)]) if structure_types: self.structure_type_id = structure_types[0] elif self.structure_type_id not in structure_types: self.structure_type_id = False @api.onchange('structure_type_id') def _onchange_structure_type_id(self): if self.structure_type_id.default_resource_calendar_id: self.resource_calendar_id = self.structure_type_id.default_resource_calendar_id @api.constrains('employee_id', 'state', 'kanban_state', 'date_start', 'date_end') def _check_current_contract(self): """ Two contracts in state [incoming | open | close] cannot overlap """ for contract in self.filtered(lambda c: (c.state not in ['draft', 'cancel'] or c.state == 'draft' and c.kanban_state == 'done') and c.employee_id): domain = [ ('id', '!=', contract.id), ('employee_id', '=', contract.employee_id.id), '|', ('state', 'in', ['open', 'close']), '&', ('state', '=', 'draft'), ('kanban_state', '=', 'done') # replaces incoming ] if not contract.date_end: start_domain = [] end_domain = ['|', ('date_end', '>=', contract.date_start), ('date_end', '=', False)] else: start_domain = [('date_start', '<=', contract.date_end)] end_domain = ['|', ('date_end', '>', contract.date_start), ('date_end', '=', False)] domain = expression.AND([domain, start_domain, end_domain]) if self.search_count(domain): raise ValidationError(_('An employee can only have one contract at the same time. (Excluding Draft and Cancelled contracts)')) @api.constrains('date_start', 'date_end') def _check_dates(self): if self.filtered(lambda c: c.date_end and c.date_start > c.date_end): raise ValidationError(_('Contract start date must be earlier than contract end date.')) @api.model def update_state(self): contracts = self.search([ ('state', '=', 'open'), ('kanban_state', '!=', 'blocked'), '|', '&', ('date_end', '<=', fields.Date.to_string(date.today() + relativedelta(days=7))), ('date_end', '>=', fields.Date.to_string(date.today() + relativedelta(days=1))), '&', ('visa_expire', '<=', fields.Date.to_string(date.today() + relativedelta(days=60))), ('visa_expire', '>=', fields.Date.to_string(date.today() + relativedelta(days=1))), ]) for contract in contracts: contract.activity_schedule( 'mail.mail_activity_data_todo', contract.date_end, _("The contract of %s is about to expire.", contract.employee_id.name), user_id=contract.hr_responsible_id.id or self.env.uid) contracts.write({'kanban_state': 'blocked'}) self.search([ ('state', '=', 'open'), '|', ('date_end', '<=', fields.Date.to_string(date.today() + relativedelta(days=1))), ('visa_expire', '<=', fields.Date.to_string(date.today() + relativedelta(days=1))), ]).write({ 'state': 'close' }) self.search([('state', '=', 'draft'), ('kanban_state', '=', 'done'), ('date_start', '<=', fields.Date.to_string(date.today())),]).write({ 'state': 'open' }) contract_ids = self.search([('date_end', '=', False), ('state', '=', 'close'), ('employee_id', '!=', False)]) # Ensure all closed contract followed by a new contract have a end date. # If closed contract has no closed date, the work entries will be generated for an unlimited period. for contract in contract_ids: next_contract = self.search([ ('employee_id', '=', contract.employee_id.id), ('state', 'not in', ['cancel', 'new']), ('date_start', '>', contract.date_start) ], order="date_start asc", limit=1) if next_contract: contract.date_end = next_contract.date_start - relativedelta(days=1) continue next_contract = self.search([ ('employee_id', '=', contract.employee_id.id), ('date_start', '>', contract.date_start) ], order="date_start asc", limit=1) if next_contract: contract.date_end = next_contract.date_start - relativedelta(days=1) return True def _assign_open_contract(self): for contract in self: contract.employee_id.sudo().write({'contract_id': contract.id}) def _get_contract_wage(self): self.ensure_one() return self[self._get_contract_wage_field()] def _get_contract_wage_field(self): self.ensure_one() return 'wage' def write(self, vals): res = super(Contract, self).write(vals) if vals.get('state') == 'open': self._assign_open_contract() if vals.get('state') == 'close': for contract in self.filtered(lambda c: not c.date_end): contract.date_end = max(date.today(), contract.date_start) calendar = vals.get('resource_calendar_id') if calendar: self.filtered(lambda c: c.state == 'open' or (c.state == 'draft' and c.kanban_state == 'done')).mapped('employee_id').write({'resource_calendar_id': calendar}) if 'state' in vals and 'kanban_state' not in vals: self.write({'kanban_state': 'normal'}) return res @api.model def create(self, vals): contracts = super(Contract, self).create(vals) if vals.get('state') == 'open': contracts._assign_open_contract() open_contracts = contracts.filtered(lambda c: c.state == 'open' or c.state == 'draft' and c.kanban_state == 'done') # sync contract calendar -> calendar employee for contract in open_contracts.filtered(lambda c: c.employee_id and c.resource_calendar_id): contract.employee_id.resource_calendar_id = contract.resource_calendar_id return contracts def _track_subtype(self, init_values): self.ensure_one() if 'state' in init_values and self.state == 'open' and 'kanban_state' in init_values and self.kanban_state == 'blocked': return self.env.ref('hr_contract.mt_contract_pending') elif 'state' in init_values and self.state == 'close': return self.env.ref('hr_contract.mt_contract_close') return super(Contract, self)._track_subtype(init_values)
class SaleOrder(models.Model): _inherit = 'sale.order' report_grids = fields.Boolean( string="Print Variant Grids", default=True, help= "If set, the matrix of the products configurable by matrix will be shown on the report of the order." ) """ Matrix loading and update: fields and methods : NOTE: The matrix functionality was done in python, server side, to avoid js restriction. Indeed, the js framework only loads the x first lines displayed in the client, which means in case of big matrices and lots of so_lines, the js doesn't have access to the 41nth and following lines. To force the loading, a 'hack' of the js framework would have been needed... """ grid_product_tmpl_id = fields.Many2one( 'product.template', store=False, help="Technical field for product_matrix functionalities.") grid_update = fields.Boolean( default=False, store=False, help="Whether the grid field contains a new matrix to apply or not.") grid = fields.Char( "Matrix local storage", store=False, help= "Technical local storage of grid. \nIf grid_update, will be loaded on the SO. \nIf not, represents the matrix to open." ) @api.onchange('grid_product_tmpl_id') def _set_grid_up(self): """Save locally the matrix of the given product.template, to be used by the matrix configurator.""" if self.grid_product_tmpl_id: self.grid_update = False self.grid = json.dumps(self._get_matrix(self.grid_product_tmpl_id)) @api.onchange('grid') def _apply_grid(self): """Apply the given list of changed matrix cells to the current SO.""" if self.grid and self.grid_update: grid = json.loads(self.grid) product_template = self.env['product.template'].browse( grid['product_template_id']) product_ids = set() dirty_cells = grid['changes'] Attrib = self.env['product.template.attribute.value'] default_so_line_vals = {} new_lines = [] for cell in dirty_cells: combination = Attrib.browse(cell['ptav_ids']) no_variant_attribute_values = combination - combination._without_no_variant_attributes( ) # create or find product variant from combination product = product_template._create_product_variant(combination) order_lines = self.order_line.filtered( lambda line: line.product_id.id == product.id and line. product_no_variant_attribute_value_ids.ids == no_variant_attribute_values.ids) # if product variant already exist in order lines old_qty = sum(order_lines.mapped('product_uom_qty')) qty = cell['qty'] diff = qty - old_qty if not diff: continue product_ids.add(product.id) # TODO keep qty check? cannot be 0 because we only get cell changes ... if order_lines: if qty == 0: if self.state in ['draft', 'sent']: # Remove lines if qty was set to 0 in matrix # only if SO state = draft/sent self.order_line -= order_lines else: order_lines.update({'product_uom_qty': 0.0}) else: """ When there are multiple lines for same product and its quantity was changed in the matrix, An error is raised. A 'good' strategy would be to: * Sets the quantity of the first found line to the cell value * Remove the other lines. But this would remove all business logic linked to the other lines... Therefore, it only raises an Error for now. """ if len(order_lines) > 1: raise ValidationError( _("You cannot change the quantity of a product present in multiple sale lines." )) else: order_lines[0].product_uom_qty = qty # If we want to support multiple lines edition: # removal of other lines. # For now, an error is raised instead # if len(order_lines) > 1: # # Remove 1+ lines # self.order_line -= order_lines[1:] else: if not default_so_line_vals: OrderLine = self.env['sale.order.line'] default_so_line_vals = OrderLine.default_get( OrderLine._fields.keys()) last_sequence = self.order_line[-1:].sequence if last_sequence: default_so_line_vals['sequence'] = last_sequence new_lines.append( (0, 0, dict(default_so_line_vals, product_id=product.id, product_uom_qty=qty, product_no_variant_attribute_value_ids= no_variant_attribute_values.ids))) if product_ids: res = False if new_lines: # Add new SO lines self.update(dict(order_line=new_lines)) # Recompute prices for new/modified lines for line in self.order_line.filtered( lambda line: line.product_id.id in product_ids): res = line.product_id_change() or res line._onchange_discount() line._onchange_product_id_set_customer_lead() return res def _get_matrix(self, product_template): """Return the matrix of the given product, updated with current SOLines quantities. :param product.template product_template: :return: matrix to display :rtype dict: """ def has_ptavs(line, sorted_attr_ids): # TODO instead of sorting on ids, use flectra-defined order for matrix ? ptav = line.product_template_attribute_value_ids.ids pnav = line.product_no_variant_attribute_value_ids.ids pav = pnav + ptav pav.sort() return pav == sorted_attr_ids matrix = product_template._get_template_matrix( company_id=self.company_id, currency_id=self.currency_id, display_extra_price=True) if self.order_line: lines = matrix['matrix'] order_lines = self.order_line.filtered( lambda line: line.product_template_id == product_template) for line in lines: for cell in line: if not cell.get('name', False): line = order_lines.filtered( lambda line: has_ptavs(line, cell['ptav_ids'])) if line: cell.update( {'qty': sum(line.mapped('product_uom_qty'))}) return matrix def get_report_matrixes(self): """Reporting method. :return: array of matrices to display in the report :rtype: list """ matrixes = [] if self.report_grids: grid_configured_templates = self.order_line.filtered( 'is_configurable_product').product_template_id.filtered( lambda ptmpl: ptmpl.product_add_mode == 'matrix') for template in grid_configured_templates: if len( self.order_line.filtered( lambda line: line.product_template_id == template) ) > 1: # TODO do we really want the whole matrix even if there isn't a lot of lines ?? matrixes.append(self._get_matrix(template)) return matrixes
class IrSequence(models.Model): """ Sequence model. The sequence model allows to define and use so-called sequence objects. Such objects are used to generate unique identifiers in a transaction-safe way. """ _name = 'ir.sequence' _order = 'name' def _get_number_next_actual(self): '''Return number from ir_sequence row when no_gap implementation, and number from postgres sequence when standard implementation.''' for seq in self: if seq.implementation != 'standard': seq.number_next_actual = seq.number_next else: seq_id = "%03d" % seq.id seq.number_next_actual = _predict_nextval(self, seq_id) def _set_number_next_actual(self): for seq in self: seq.write({'number_next': seq.number_next_actual or 1}) @api.model def _get_current_sequence(self): '''Returns the object on which we can find the number_next to consider for the sequence. It could be an ir.sequence or an ir.sequence.date_range depending if use_date_range is checked or not. This function will also create the ir.sequence.date_range if none exists yet for today ''' if not self.use_date_range: return self now = fields.Date.today() seq_date = self.env['ir.sequence.date_range'].search( [('sequence_id', '=', self.id), ('date_from', '<=', now), ('date_to', '>=', now)], limit=1) if seq_date: return seq_date[0] #no date_range sequence was found, we create a new one return self._create_date_range_seq(now) name = fields.Char(required=True) code = fields.Char(string='Sequence Code') implementation = fields.Selection( [('standard', 'Standard'), ('no_gap', 'No gap')], string='Implementation', required=True, default='standard', help="Two sequence object implementations are offered: Standard " "and 'No gap'. The later is slower than the former but forbids any " "gap in the sequence (while they are possible in the former).") active = fields.Boolean(default=True) prefix = fields.Char(help="Prefix value of the record for the sequence") suffix = fields.Char(help="Suffix value of the record for the sequence") number_next = fields.Integer(string='Next Number', required=True, default=1, help="Next number of this sequence") number_next_actual = fields.Integer( compute='_get_number_next_actual', inverse='_set_number_next_actual', string='Next Number', help="Next number that will be used. This number can be incremented " "frequently so the displayed value might already be obsolete") number_increment = fields.Integer( string='Step', required=True, default=1, help= "The next number of the sequence will be incremented by this number") padding = fields.Integer( string='Sequence Size', required=True, default=0, help="Flectra will automatically adds some '0' on the left of the " "'Next Number' to get the required padding size.") company_id = fields.Many2one('res.company', string='Company', default=lambda s: s.env['res.company']. _company_default_get('ir.sequence')) use_date_range = fields.Boolean(string='Use subsequences per date_range') date_range_ids = fields.One2many('ir.sequence.date_range', 'sequence_id', string='Subsequences') @api.model def create(self, values): """ Create a sequence, in implementation == standard a fast gaps-allowed PostgreSQL sequence is used. """ seq = super(IrSequence, self).create(values) if values.get('implementation', 'standard') == 'standard': _create_sequence(self._cr, "ir_sequence_%03d" % seq.id, values.get('number_increment', 1), values.get('number_next', 1)) return seq @api.multi def unlink(self): _drop_sequences(self._cr, ["ir_sequence_%03d" % x.id for x in self]) return super(IrSequence, self).unlink() @api.multi def write(self, values): new_implementation = values.get('implementation') for seq in self: # 4 cases: we test the previous impl. against the new one. i = values.get('number_increment', seq.number_increment) n = values.get('number_next', seq.number_next) if seq.implementation == 'standard': if new_implementation in ('standard', None): # Implementation has NOT changed. # Only change sequence if really requested. if values.get('number_next'): _alter_sequence(self._cr, "ir_sequence_%03d" % seq.id, number_next=n) if seq.number_increment != i: _alter_sequence(self._cr, "ir_sequence_%03d" % seq.id, number_increment=i) seq.date_range_ids._alter_sequence(number_increment=i) else: _drop_sequences(self._cr, ["ir_sequence_%03d" % seq.id]) for sub_seq in seq.date_range_ids: _drop_sequences( self._cr, ["ir_sequence_%03d_%03d" % (seq.id, sub_seq.id)]) else: if new_implementation in ('no_gap', None): pass else: _create_sequence(self._cr, "ir_sequence_%03d" % seq.id, i, n) for sub_seq in seq.date_range_ids: _create_sequence( self._cr, "ir_sequence_%03d_%03d" % (seq.id, sub_seq.id), i, n) return super(IrSequence, self).write(values) def _next_do(self): if self.implementation == 'standard': number_next = _select_nextval(self._cr, 'ir_sequence_%03d' % self.id) else: number_next = _update_nogap(self, self.number_increment) return self.get_next_char(number_next) def _get_prefix_suffix(self): def _interpolate(s, d): return (s % d) if s else '' def _interpolation_dict(): now = range_date = effective_date = datetime.now( pytz.timezone(self._context.get('tz') or 'UTC')) if self._context.get('ir_sequence_date'): effective_date = datetime.strptime( self._context.get('ir_sequence_date'), '%Y-%m-%d') if self._context.get('ir_sequence_date_range'): range_date = datetime.strptime( self._context.get('ir_sequence_date_range'), '%Y-%m-%d') sequences = { 'year': '%Y', 'month': '%m', 'day': '%d', 'y': '%y', 'doy': '%j', 'woy': '%W', 'weekday': '%w', 'h24': '%H', 'h12': '%I', 'min': '%M', 'sec': '%S' } res = {} for key, format in sequences.items(): res[key] = effective_date.strftime(format) res['range_' + key] = range_date.strftime(format) res['current_' + key] = now.strftime(format) return res d = _interpolation_dict() try: interpolated_prefix = _interpolate(self.prefix, d) interpolated_suffix = _interpolate(self.suffix, d) except ValueError: raise UserError( _('Invalid prefix or suffix for sequence \'%s\'') % (self.get('name'))) return interpolated_prefix, interpolated_suffix def get_next_char(self, number_next): interpolated_prefix, interpolated_suffix = self._get_prefix_suffix() return interpolated_prefix + '%%0%sd' % self.padding % number_next + interpolated_suffix def _create_date_range_seq(self, date): year = fields.Date.from_string(date).strftime('%Y') date_from = '{}-01-01'.format(year) date_to = '{}-12-31'.format(year) date_range = self.env['ir.sequence.date_range'].search( [('sequence_id', '=', self.id), ('date_from', '>=', date), ('date_from', '<=', date_to)], order='date_from desc', limit=1) if date_range: date_to = datetime.strptime(date_range.date_from, '%Y-%m-%d') + timedelta(days=-1) date_to = date_to.strftime('%Y-%m-%d') date_range = self.env['ir.sequence.date_range'].search( [('sequence_id', '=', self.id), ('date_to', '>=', date_from), ('date_to', '<=', date)], order='date_to desc', limit=1) if date_range: date_from = datetime.strptime(date_range.date_to, '%Y-%m-%d') + timedelta(days=1) date_from = date_from.strftime('%Y-%m-%d') seq_date_range = self.env['ir.sequence.date_range'].sudo().create({ 'date_from': date_from, 'date_to': date_to, 'sequence_id': self.id, }) return seq_date_range def _next(self): """ Returns the next number in the preferred sequence in all the ones given in self.""" if not self.use_date_range: return self._next_do() # date mode dt = fields.Date.today() if self._context.get('ir_sequence_date'): dt = self._context.get('ir_sequence_date') seq_date = self.env['ir.sequence.date_range'].search( [('sequence_id', '=', self.id), ('date_from', '<=', dt), ('date_to', '>=', dt)], limit=1) if not seq_date: seq_date = self._create_date_range_seq(dt) return seq_date.with_context( ir_sequence_date_range=seq_date.date_from)._next() @api.multi def next_by_id(self): """ Draw an interpolated string using the specified sequence.""" self.check_access_rights('read') return self._next() @api.model def next_by_code(self, sequence_code): """ Draw an interpolated string using a sequence with the requested code. If several sequences with the correct code are available to the user (multi-company cases), the one from the user's current company will be used. :param dict context: context dictionary may contain a ``force_company`` key with the ID of the company to use instead of the user's current company for the sequence selection. A matching sequence for that specific company will get higher priority. """ self.check_access_rights('read') force_company = self._context.get('force_company') if not force_company: force_company = self.env.user.company_id.id seq_ids = self.search([('code', '=', sequence_code), ('company_id', 'in', [force_company, False])], order='company_id') if not seq_ids: _logger.debug( "No ir.sequence has been found for code '%s'. Please make sure a sequence is set for current company." % sequence_code) return False seq_id = seq_ids[0] return seq_id._next() @api.model def get_id(self, sequence_code_or_id, code_or_id='id'): """ Draw an interpolated string using the specified sequence. The sequence to use is specified by the ``sequence_code_or_id`` argument, which can be a code or an id (as controlled by the ``code_or_id`` argument. This method is deprecated. """ _logger.warning( "ir_sequence.get() and ir_sequence.get_id() are deprecated. " "Please use ir_sequence.next_by_code() or ir_sequence.next_by_id()." ) if code_or_id == 'id': return self.browse(sequence_code_or_id).next_by_id() else: return self.next_by_code(sequence_code_or_id) @api.model def get(self, code): """ Draw an interpolated string using the specified sequence. The sequence to use is specified by its code. This method is deprecated. """ return self.get_id(code, 'code')
class AccountPaymentRegister(models.TransientModel): _name = 'account.payment.register' _description = 'Register Payment' # == Business fields == payment_date = fields.Date(string="Payment Date", required=True, default=fields.Date.context_today) amount = fields.Monetary(currency_field='currency_id', store=True, readonly=False, compute='_compute_amount') communication = fields.Char(string="Memo", store=True, readonly=False, compute='_compute_communication') group_payment = fields.Boolean( string="Group Payments", store=True, readonly=False, compute='_compute_group_payment', help="Only one payment will be created by partner (bank)/ currency.") currency_id = fields.Many2one('res.currency', string='Currency', store=True, readonly=False, compute='_compute_currency_id', help="The payment's currency.") journal_id = fields.Many2one( 'account.journal', store=True, readonly=False, compute='_compute_journal_id', domain= "[('company_id', '=', company_id), ('type', 'in', ('bank', 'cash'))]") partner_bank_id = fields.Many2one( 'res.partner.bank', string="Recipient Bank Account", readonly=False, store=True, compute='_compute_partner_bank_id', domain= "['|', ('company_id', '=', False), ('company_id', '=', company_id), ('partner_id', '=', partner_id)]" ) company_currency_id = fields.Many2one('res.currency', string="Company Currency", related='company_id.currency_id') # == Fields given through the context == line_ids = fields.Many2many( 'account.move.line', 'account_payment_register_move_line_rel', 'wizard_id', 'line_id', string="Journal items", readonly=True, copy=False, ) payment_type = fields.Selection([ ('outbound', 'Send Money'), ('inbound', 'Receive Money'), ], string='Payment Type', store=True, copy=False, compute='_compute_from_lines') partner_type = fields.Selection([ ('customer', 'Customer'), ('supplier', 'Vendor'), ], store=True, copy=False, compute='_compute_from_lines') source_amount = fields.Monetary(string="Amount to Pay (company currency)", store=True, copy=False, currency_field='company_currency_id', compute='_compute_from_lines') source_amount_currency = fields.Monetary( string="Amount to Pay (foreign currency)", store=True, copy=False, currency_field='source_currency_id', compute='_compute_from_lines') source_currency_id = fields.Many2one('res.currency', string='Source Currency', store=True, copy=False, compute='_compute_from_lines', help="The payment's currency.") can_edit_wizard = fields.Boolean( store=True, copy=False, compute='_compute_from_lines', help= "Technical field used to indicate the user can edit the wizard content such as the amount." ) can_group_payments = fields.Boolean( store=True, copy=False, compute='_compute_from_lines', help= "Technical field used to indicate the user can see the 'group_payments' box." ) company_id = fields.Many2one('res.company', store=True, copy=False, compute='_compute_from_lines') partner_id = fields.Many2one('res.partner', string="Customer/Vendor", store=True, copy=False, ondelete='restrict', compute='_compute_from_lines') # == Payment methods fields == payment_method_id = fields.Many2one('account.payment.method', string='Payment Method', readonly=False, store=True, compute='_compute_payment_method_id', domain="[('id', 'in', available_payment_method_ids)]", help="Manual: Get paid by cash, check or any other method outside of Flectra.\n"\ "Electronic: Get paid automatically through a payment acquirer by requesting a transaction on a card saved by the customer when buying or subscribing online (payment token).\n"\ "Check: Pay bill by check and print it from Flectra.\n"\ "Batch Deposit: Encase several customer checks at once by generating a batch deposit to submit to your bank. When encoding the bank statement in Flectra, you are suggested to reconcile the transaction with the batch deposit.To enable batch deposit, module account_batch_payment must be installed.\n"\ "SEPA Credit Transfer: Pay bill from a SEPA Credit Transfer file you submit to your bank. To enable sepa credit transfer, module account_sepa must be installed ") available_payment_method_ids = fields.Many2many( 'account.payment.method', compute='_compute_payment_method_fields') hide_payment_method = fields.Boolean( compute='_compute_payment_method_fields', help= "Technical field used to hide the payment method if the selected journal has only one available which is 'manual'" ) # == Payment difference fields == payment_difference = fields.Monetary(compute='_compute_payment_difference') payment_difference_handling = fields.Selection( [ ('open', 'Keep open'), ('reconcile', 'Mark as fully paid'), ], default='open', string="Payment Difference Handling") writeoff_account_id = fields.Many2one( 'account.account', string="Difference Account", copy=False, domain="[('deprecated', '=', False), ('company_id', '=', company_id)]") writeoff_label = fields.Char( string='Journal Item Label', default='Write-Off', help= 'Change label of the counterpart that will hold the payment difference' ) # == Display purpose fields == show_partner_bank_account = fields.Boolean( compute='_compute_show_require_partner_bank', help= "Technical field used to know whether the field `partner_bank_id` needs to be displayed or not in the payments form views" ) require_partner_bank_account = fields.Boolean( compute='_compute_show_require_partner_bank', help= "Technical field used to know whether the field `partner_bank_id` needs to be required or not in the payments form views" ) country_code = fields.Char(related='company_id.country_id.code', readonly=True) # ------------------------------------------------------------------------- # HELPERS # ------------------------------------------------------------------------- @api.model def _get_batch_communication(self, batch_result): ''' Helper to compute the communication based on the batch. :param batch_result: A batch returned by '_get_batches'. :return: A string representing a communication to be set on payment. ''' labels = set(line.name or line.move_id.ref or line.move_id.name for line in batch_result['lines']) return ' '.join(sorted(labels)) @api.model def _get_line_batch_key(self, line): ''' Turn the line passed as parameter to a dictionary defining on which way the lines will be grouped together. :return: A python dictionary. ''' return { 'partner_id': line.partner_id.id, 'account_id': line.account_id.id, 'currency_id': (line.currency_id or line.company_currency_id).id, 'partner_bank_id': line.move_id.partner_bank_id.id, 'partner_type': 'customer' if line.account_internal_type == 'receivable' else 'supplier', 'payment_type': 'inbound' if line.balance > 0.0 else 'outbound', } def _get_batches(self): ''' Group the account.move.line linked to the wizard together. :return: A list of batches, each one containing: * key_values: The key as a dictionary used to group the journal items together. * moves: An account.move recordset. ''' self.ensure_one() lines = self.line_ids._origin if len(lines.company_id) > 1: raise UserError( _("You can't create payments for entries belonging to different companies." )) if not lines: raise UserError( _("You can't open the register payment wizard without at least one receivable/payable line." )) batches = {} for line in lines: batch_key = self._get_line_batch_key(line) serialized_key = '-'.join(str(v) for v in batch_key.values()) batches.setdefault(serialized_key, { 'key_values': batch_key, 'lines': self.env['account.move.line'], }) batches[serialized_key]['lines'] += line return list(batches.values()) @api.model def _get_wizard_values_from_batch(self, batch_result): ''' Extract values from the batch passed as parameter (see '_get_batches') to be mounted in the wizard view. :param batch_result: A batch returned by '_get_batches'. :return: A dictionary containing valid fields ''' key_values = batch_result['key_values'] lines = batch_result['lines'] company = lines[0].company_id source_amount = abs(sum(lines.mapped('amount_residual'))) if key_values['currency_id'] == company.currency_id.id: source_amount_currency = source_amount else: source_amount_currency = abs( sum(lines.mapped('amount_residual_currency'))) return { 'company_id': company.id, 'partner_id': key_values['partner_id'], 'partner_type': key_values['partner_type'], 'payment_type': key_values['payment_type'], 'source_currency_id': key_values['currency_id'], 'source_amount': source_amount, 'source_amount_currency': source_amount_currency, } # ------------------------------------------------------------------------- # COMPUTE METHODS # ------------------------------------------------------------------------- @api.depends('line_ids') def _compute_from_lines(self): ''' Load initial values from the account.moves passed through the context. ''' for wizard in self: batches = wizard._get_batches() batch_result = batches[0] wizard_values_from_batch = wizard._get_wizard_values_from_batch( batch_result) if len(batches) == 1: # == Single batch to be mounted on the view == wizard.update(wizard_values_from_batch) wizard.can_edit_wizard = True wizard.can_group_payments = len(batch_result['lines']) != 1 else: # == Multiple batches: The wizard is not editable == wizard.update({ 'company_id': batches[0]['lines'][0].company_id.id, 'partner_id': False, 'partner_type': False, 'payment_type': wizard_values_from_batch['payment_type'], 'source_currency_id': False, 'source_amount': False, 'source_amount_currency': False, }) wizard.can_edit_wizard = False wizard.can_group_payments = any( len(batch_result['lines']) != 1 for batch_result in batches) @api.depends('can_edit_wizard') def _compute_communication(self): # The communication can't be computed in '_compute_from_lines' because # it's a compute editable field and then, should be computed in a separated method. for wizard in self: if wizard.can_edit_wizard: batches = self._get_batches() wizard.communication = wizard._get_batch_communication( batches[0]) else: wizard.communication = False @api.depends('can_edit_wizard') def _compute_group_payment(self): for wizard in self: if wizard.can_edit_wizard: batches = wizard._get_batches() wizard.group_payment = len(batches[0]['lines'].move_id) == 1 else: wizard.group_payment = False @api.depends('company_id', 'source_currency_id') def _compute_journal_id(self): for wizard in self: domain = [ ('type', 'in', ('bank', 'cash')), ('company_id', '=', wizard.company_id.id), ] journal = None if wizard.source_currency_id: journal = self.env['account.journal'].search( domain + [('currency_id', '=', wizard.source_currency_id.id)], limit=1) if not journal: journal = self.env['account.journal'].search(domain, limit=1) wizard.journal_id = journal @api.depends('journal_id') def _compute_currency_id(self): for wizard in self: wizard.currency_id = wizard.journal_id.currency_id or wizard.source_currency_id or wizard.company_id.currency_id @api.depends('partner_id') def _compute_partner_bank_id(self): ''' The default partner_bank_id will be the first available on the partner. ''' for wizard in self: available_partner_bank_accounts = wizard.partner_id.bank_ids.filtered( lambda x: x.company_id in (False, wizard.company_id)) if available_partner_bank_accounts: wizard.partner_bank_id = available_partner_bank_accounts[ 0]._origin else: wizard.partner_bank_id = False @api.depends('journal_id') def _compute_payment_method_id(self): for wizard in self: batches = wizard._get_batches() payment_type = batches[0]['key_values']['payment_type'] if payment_type == 'inbound': available_payment_methods = wizard.journal_id.inbound_payment_method_ids else: available_payment_methods = wizard.journal_id.outbound_payment_method_ids # Select the first available one by default. if available_payment_methods: wizard.payment_method_id = available_payment_methods[0]._origin else: wizard.payment_method_id = False @api.depends('payment_type', 'journal_id.inbound_payment_method_ids', 'journal_id.outbound_payment_method_ids') def _compute_payment_method_fields(self): for wizard in self: if wizard.payment_type == 'inbound': wizard.available_payment_method_ids = wizard.journal_id.inbound_payment_method_ids else: wizard.available_payment_method_ids = wizard.journal_id.outbound_payment_method_ids wizard.hide_payment_method = len( wizard.available_payment_method_ids ) == 1 and wizard.available_payment_method_ids.code == 'manual' @api.depends('payment_type', 'journal_id.inbound_payment_method_ids', 'journal_id.outbound_payment_method_ids') def _compute_payment_method_id(self): for wizard in self: if wizard.payment_type == 'inbound': available_payment_methods = wizard.journal_id.inbound_payment_method_ids else: available_payment_methods = wizard.journal_id.outbound_payment_method_ids # Select the first available one by default. if available_payment_methods: wizard.payment_method_id = available_payment_methods[0]._origin else: wizard.payment_method_id = False @api.depends('payment_method_id') def _compute_show_require_partner_bank(self): """ Computes if the destination bank account must be displayed in the payment form view. By default, it won't be displayed but some modules might change that, depending on the payment type.""" for wizard in self: wizard.show_partner_bank_account = wizard.payment_method_id.code in self.env[ 'account.payment']._get_method_codes_using_bank_account() wizard.require_partner_bank_account = wizard.payment_method_id.code in self.env[ 'account.payment']._get_method_codes_needing_bank_account() @api.depends('source_amount', 'source_amount_currency', 'source_currency_id', 'company_id', 'currency_id', 'payment_date') def _compute_amount(self): for wizard in self: if wizard.source_currency_id == wizard.currency_id: # Same currency. wizard.amount = wizard.source_amount_currency elif wizard.currency_id == wizard.company_id.currency_id: # Payment expressed on the company's currency. wizard.amount = wizard.source_amount else: # Foreign currency on payment different than the one set on the journal entries. amount_payment_currency = wizard.company_id.currency_id._convert( wizard.source_amount, wizard.currency_id, wizard.company_id, wizard.payment_date) wizard.amount = amount_payment_currency @api.depends('amount') def _compute_payment_difference(self): for wizard in self: if wizard.source_currency_id == wizard.currency_id: # Same currency. wizard.payment_difference = wizard.source_amount_currency - wizard.amount elif wizard.currency_id == wizard.company_id.currency_id: # Payment expressed on the company's currency. wizard.payment_difference = wizard.source_amount - wizard.amount else: # Foreign currency on payment different than the one set on the journal entries. amount_payment_currency = wizard.company_id.currency_id._convert( wizard.source_amount, wizard.currency_id, wizard.company_id, wizard.payment_date) wizard.payment_difference = amount_payment_currency - wizard.amount # ------------------------------------------------------------------------- # LOW-LEVEL METHODS # ------------------------------------------------------------------------- @api.model def default_get(self, fields_list): # OVERRIDE res = super().default_get(fields_list) if 'line_ids' in fields_list and 'line_ids' not in res: # Retrieve moves to pay from the context. if self._context.get('active_model') == 'account.move': lines = self.env['account.move'].browse( self._context.get('active_ids', [])).line_ids elif self._context.get('active_model') == 'account.move.line': lines = self.env['account.move.line'].browse( self._context.get('active_ids', [])) else: raise UserError( _("The register payment wizard should only be called on account.move or account.move.line records." )) # Keep lines having a residual amount to pay. available_lines = self.env['account.move.line'] for line in lines: if line.move_id.state != 'posted': raise UserError( _("You can only register payment for posted journal entries." )) if line.account_internal_type not in ('receivable', 'payable'): continue if line.currency_id: if line.currency_id.is_zero(line.amount_residual_currency): continue else: if line.company_currency_id.is_zero(line.amount_residual): continue available_lines |= line # Check. if not available_lines: raise UserError( _("You can't register a payment because there is nothing left to pay on the selected journal items." )) if len(lines.company_id) > 1: raise UserError( _("You can't create payments for entries belonging to different companies." )) if len(set(available_lines.mapped('account_internal_type'))) > 1: raise UserError( _("You can't register payments for journal items being either all inbound, either all outbound." )) res['line_ids'] = [(6, 0, available_lines.ids)] return res # ------------------------------------------------------------------------- # BUSINESS METHODS # ------------------------------------------------------------------------- def _create_payment_vals_from_wizard(self): payment_vals = { 'date': self.payment_date, 'amount': self.amount, 'payment_type': self.payment_type, 'partner_type': self.partner_type, 'ref': self.communication, 'journal_id': self.journal_id.id, 'currency_id': self.currency_id.id, 'partner_id': self.partner_id.id, 'partner_bank_id': self.partner_bank_id.id, 'payment_method_id': self.payment_method_id.id, 'destination_account_id': self.line_ids[0].account_id.id } if not self.currency_id.is_zero( self.payment_difference ) and self.payment_difference_handling == 'reconcile': payment_vals['write_off_line_vals'] = { 'name': self.writeoff_label, 'amount': self.payment_difference, 'account_id': self.writeoff_account_id.id, } return payment_vals def _create_payment_vals_from_batch(self, batch_result): batch_values = self._get_wizard_values_from_batch(batch_result) return { 'date': self.payment_date, 'amount': batch_values['source_amount_currency'], 'payment_type': batch_values['payment_type'], 'partner_type': batch_values['partner_type'], 'ref': self._get_batch_communication(batch_result), 'journal_id': self.journal_id.id, 'currency_id': batch_values['source_currency_id'], 'partner_id': batch_values['partner_id'], 'partner_bank_id': batch_result['key_values']['partner_bank_id'], 'payment_method_id': self.payment_method_id.id, 'destination_account_id': batch_result['lines'][0].account_id.id } def _create_payments(self): self.ensure_one() batches = self._get_batches() edit_mode = self.can_edit_wizard and (len(batches[0]['lines']) == 1 or self.group_payment) to_reconcile = [] if edit_mode: payment_vals = self._create_payment_vals_from_wizard() payment_vals_list = [payment_vals] to_reconcile.append(batches[0]['lines']) else: # Don't group payments: Create one batch per move. if not self.group_payment: new_batches = [] for batch_result in batches: for line in batch_result['lines']: new_batches.append({ **batch_result, 'lines': line, }) batches = new_batches payment_vals_list = [] for batch_result in batches: payment_vals_list.append( self._create_payment_vals_from_batch(batch_result)) to_reconcile.append(batch_result['lines']) payments = self.env['account.payment'].create(payment_vals_list) # If payments are made using a currency different than the source one, ensure the balance match exactly in # order to fully paid the source journal items. # For example, suppose a new currency B having a rate 100:1 regarding the company currency A. # If you try to pay 12.15A using 0.12B, the computed balance will be 12.00A for the payment instead of 12.15A. if edit_mode: for payment, lines in zip(payments, to_reconcile): # Batches are made using the same currency so making 'lines.currency_id' is ok. if payment.currency_id != lines.currency_id: liquidity_lines, counterpart_lines, writeoff_lines = payment._seek_for_lines( ) source_balance = abs(sum(lines.mapped('amount_residual'))) payment_rate = liquidity_lines[ 0].amount_currency / liquidity_lines[0].balance source_balance_converted = abs( source_balance) * payment_rate # Translate the balance into the payment currency is order to be able to compare them. # In case in both have the same value (12.15 * 0.01 ~= 0.12 in our example), it means the user # attempt to fully paid the source lines and then, we need to manually fix them to get a perfect # match. payment_balance = abs( sum(counterpart_lines.mapped('balance'))) payment_amount_currency = abs( sum(counterpart_lines.mapped('amount_currency'))) if not payment.currency_id.is_zero( source_balance_converted - payment_amount_currency): continue delta_balance = source_balance - payment_balance # Balance are already the same. if self.company_currency_id.is_zero(delta_balance): continue # Fix the balance but make sure to peek the liquidity and counterpart lines first. debit_lines = (liquidity_lines + counterpart_lines).filtered('debit') credit_lines = (liquidity_lines + counterpart_lines).filtered('credit') payment.move_id.write({ 'line_ids': [ (1, debit_lines[0].id, { 'debit': debit_lines[0].debit + delta_balance }), (1, credit_lines[0].id, { 'credit': credit_lines[0].credit + delta_balance }), ] }) payments.action_post() domain = [('account_internal_type', 'in', ('receivable', 'payable')), ('reconciled', '=', False)] for payment, lines in zip(payments, to_reconcile): # When using the payment tokens, the payment could not be posted at this point (e.g. the transaction failed) # and then, we can't perform the reconciliation. if payment.state != 'posted': continue payment_lines = payment.line_ids.filtered_domain(domain) for account in payment_lines.account_id: (payment_lines + lines)\ .filtered_domain([('account_id', '=', account.id), ('reconciled', '=', False)])\ .reconcile() return payments def action_create_payments(self): payments = self._create_payments() if self._context.get('dont_redirect_to_payments'): return True action = { 'name': _('Payments'), 'type': 'ir.actions.act_window', 'res_model': 'account.payment', 'context': { 'create': False }, } if len(payments) == 1: action.update({ 'view_mode': 'form', 'res_id': payments.id, }) else: action.update({ 'view_mode': 'tree,form', 'domain': [('id', 'in', payments.ids)], }) return action
class IrUiMenu(models.Model): _name = 'ir.ui.menu' _order = "sequence,id" _parent_store = True def __init__(self, *args, **kwargs): super(IrUiMenu, self).__init__(*args, **kwargs) self.pool['ir.model.access'].register_cache_clearing_method( self._name, 'clear_caches') name = fields.Char(string='Menu', required=True, translate=True) active = fields.Boolean(default=True) sequence = fields.Integer(default=10) child_id = fields.One2many('ir.ui.menu', 'parent_id', string='Child IDs') parent_id = fields.Many2one('ir.ui.menu', string='Parent Menu', index=True, ondelete="restrict") parent_left = fields.Integer(index=True) parent_right = fields.Integer(index=True) groups_id = fields.Many2many('res.groups', 'ir_ui_menu_group_rel', 'menu_id', 'gid', string='Groups', help="If you have groups, the visibility of this menu will be based on these groups. "\ "If this field is empty, Flectra will compute visibility based on the related object's read access.") complete_name = fields.Char(compute='_compute_complete_name', string='Full Path') web_icon = fields.Char(string='Web Icon File') action = fields.Reference(selection=[( 'ir.actions.report', 'ir.actions.report' ), ('ir.actions.act_window', 'ir.actions.act_window'), ( 'ir.actions.act_url', 'ir.actions.act_url'), ( 'ir.actions.server', 'ir.actions.server'), ('ir.actions.client', 'ir.actions.client')]) web_icon_data = fields.Binary(string='Web Icon Image', attachment=True) bookmark_icon = fields.Binary(string='Bookmark Icon Image', attachment=True, compute='_compute_bookmark_icon') bookmark_label = fields.Char(string='Bookmark Label', compute='_compute_bookmark_label') def _compute_bookmark_label(self): for menu in self: label_parts = menu._get_parent_bookmark_label() menu.bookmark_label = '<br/>'.join(label_parts) def _get_parent_bookmark_label(self): self.ensure_one() if self.parent_id: label_parts = self.parent_id._get_parent_bookmark_label() if label_parts and label_parts[-1] != self.name: label_parts.append(self.name) else: label_parts = [self.name] return label_parts def _compute_bookmark_icon(self): for menu in self: menu.bookmark_icon = menu._get_parent_icon() def _get_parent_icon(self): self.ensure_one() if self.web_icon_data: return self.web_icon_data return self.parent_id._get_parent_icon() @api.depends('name', 'parent_id.complete_name') def _compute_complete_name(self): for menu in self: menu.complete_name = menu._get_full_name() def _get_full_name(self, level=6): """ Return the full name of ``self`` (up to a certain level). """ if level <= 0: return '...' if self.parent_id: return self.parent_id._get_full_name( level - 1) + MENU_ITEM_SEPARATOR + (self.name or "") else: return self.name def read_image(self, path): if not path: return False path_info = path.split(',') icon_path = get_module_resource(path_info[0], path_info[1]) icon_image = False if icon_path: with tools.file_open(icon_path, 'rb') as icon_file: icon_image = base64.encodestring(icon_file.read()) return icon_image @api.constrains('parent_id') def _check_parent_id(self): if not self._check_recursion(): raise ValidationError( _('Error! You cannot create recursive menus.')) @api.model @tools.ormcache('frozenset(self.env.user.groups_id.ids)', 'debug') def _visible_menu_ids(self, debug=False): """ Return the ids of the menu items visible to the user. """ # retrieve all menus, and determine which ones are visible context = {'ir.ui.menu.full_list': True} menus = self.with_context(context).search([]) groups = self.env.user.groups_id if not debug: groups = groups - self.env.ref('base.group_no_one') # first discard all menus with groups the user does not have menus = menus.filtered( lambda menu: not menu.groups_id or menu.groups_id & groups) # take apart menus that have an action action_menus = menus.filtered(lambda m: m.action and m.action.exists()) folder_menus = menus - action_menus visible = self.browse() # process action menus, check whether their action is allowed access = self.env['ir.model.access'] MODEL_GETTER = { 'ir.actions.act_window': lambda action: action.res_model, 'ir.actions.report': lambda action: action.model, 'ir.actions.server': lambda action: action.model_id.model, } for menu in action_menus: get_model = MODEL_GETTER.get(menu.action._name) if not get_model or not get_model(menu.action) or \ access.check(get_model(menu.action), 'read', False): # make menu visible, and its folder ancestors, too visible += menu menu = menu.parent_id while menu and menu in folder_menus and menu not in visible: visible += menu menu = menu.parent_id return set(visible.ids) @api.multi @api.returns('self') def _filter_visible_menus(self): """ Filter `self` to only keep the menu items that should be visible in the menu hierarchy of the current user. Uses a cache for speeding up the computation. """ visible_ids = self._visible_menu_ids( request.debug if request else False) return self.filtered(lambda menu: menu.id in visible_ids) @api.model def search(self, args, offset=0, limit=None, order=None, count=False): menus = super(IrUiMenu, self).search(args, offset=0, limit=None, order=order, count=False) if menus: # menu filtering is done only on main menu tree, not other menu lists if not self._context.get('ir.ui.menu.full_list'): menus = menus._filter_visible_menus() if offset: menus = menus[offset:] if limit: menus = menus[:limit] return len(menus) if count else menus @api.multi def name_get(self): return [(menu.id, menu._get_full_name()) for menu in self] @api.model def create(self, values): self.clear_caches() if 'web_icon' in values: values['web_icon_data'] = self._compute_web_icon_data( values.get('web_icon')) return super(IrUiMenu, self).create(values) @api.multi def write(self, values): self.clear_caches() if 'web_icon' in values: values['web_icon_data'] = self._compute_web_icon_data( values.get('web_icon')) return super(IrUiMenu, self).write(values) def _compute_web_icon_data(self, web_icon): """ Returns the image associated to `web_icon`. `web_icon` can either be: - an image icon [module, path] - a built icon [icon_class, icon_color, background_color] and it only has to call `read_image` if it's an image. """ if web_icon and len(web_icon.split(',')) == 2: return self.read_image(web_icon) @api.multi def unlink(self): # Detach children and promote them to top-level, because it would be unwise to # cascade-delete submenus blindly. We also can't use ondelete=set null because # that is not supported when _parent_store is used (would silently corrupt it). # TODO: ideally we should move them under a generic "Orphans" menu somewhere? extra = {'ir.ui.menu.full_list': True, 'active_test': False} direct_children = self.with_context(**extra).search([('parent_id', 'in', self.ids)]) direct_children.write({'parent_id': False}) self.clear_caches() return super(IrUiMenu, self).unlink() @api.multi def copy(self, default=None): record = super(IrUiMenu, self).copy(default=default) match = NUMBER_PARENS.search(record.name) if match: next_num = int(match.group(1)) + 1 record.name = NUMBER_PARENS.sub('(%d)' % next_num, record.name) else: record.name = record.name + '(1)' return record @api.model @api.returns('self') def get_user_roots(self): """ Return all root menu ids visible for the user. :return: the root menu ids :rtype: list(int) """ return self.search([('parent_id', '=', False)]) @api.model @tools.ormcache_context('self._uid', keys=('lang', )) def load_menus_root(self): fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon_data'] menu_roots = self.get_user_roots() menu_roots_data = menu_roots.read(fields) if menu_roots else [] menu_root = { 'id': False, 'name': 'root', 'parent_id': [-1, ''], 'children': menu_roots_data, 'all_menu_ids': menu_roots.ids, } menu_roots._set_menuitems_xmlids(menu_root) return menu_root @api.model @tools.ormcache_context('self._uid', 'debug', keys=('lang', )) def load_menus(self, debug): """ Loads all menu items (all applications and their sub-menus). :return: the menu root :rtype: dict('children': menu_nodes) """ fields = [ 'name', 'sequence', 'parent_id', 'action', 'web_icon', 'web_icon_data' ] menu_roots = self.get_user_roots() menu_roots_data = menu_roots.read(fields) if menu_roots else [] menu_root = { 'id': False, 'name': 'root', 'parent_id': [-1, ''], 'children': menu_roots_data, 'all_menu_ids': menu_roots.ids, } if not menu_roots_data: return menu_root # menus are loaded fully unlike a regular tree view, cause there are a # limited number of items (752 when all 6.1 addons are installed) menus = self.search([('id', 'child_of', menu_roots.ids)]) menu_items = menus.read(fields) # add roots at the end of the sequence, so that they will overwrite # equivalent menu items from full menu read when put into id:item # mapping, resulting in children being correctly set on the roots. menu_items.extend(menu_roots_data) menu_root['all_menu_ids'] = menus.ids # includes menu_roots! # make a tree using parent_id menu_items_map = { menu_item["id"]: menu_item for menu_item in menu_items } for menu_item in menu_items: parent = menu_item['parent_id'] and menu_item['parent_id'][0] if parent in menu_items_map: menu_items_map[parent].setdefault('children', []).append(menu_item) # sort by sequence a tree using parent_id for menu_item in menu_items: menu_item.setdefault('children', []).sort(key=operator.itemgetter('sequence')) (menu_roots + menus)._set_menuitems_xmlids(menu_root) return menu_root def _set_menuitems_xmlids(self, menu_root): menuitems = self.env['ir.model.data'].sudo().search([ ('res_id', 'in', self.ids), ('model', '=', 'ir.ui.menu') ]) xmlids = {menu.res_id: menu.complete_name for menu in menuitems} def _set_xmlids(tree, xmlids): tree['xmlid'] = xmlids.get(tree['id'], '') if 'children' in tree: for child in tree['children']: _set_xmlids(child, xmlids) _set_xmlids(menu_root, xmlids)
class ResConfigSettings(models.TransientModel): _inherit = 'res.config.settings' group_multi_company = fields.Boolean( "Manage multiple companies", implied_group='base.group_multi_company') company_id = fields.Many2one('res.company', string='Company', required=True, default=lambda self: self.env.user.company_id) default_user_rights = fields.Boolean("Default Access Rights") default_external_email_server = fields.Boolean("External Email Servers") module_base_import = fields.Boolean( "Allow users to import data from CSV/XLS/XLSX/ODS files") module_google_calendar = fields.Boolean( string= 'Allow the users to synchronize their calendar with Google Calendar') module_google_drive = fields.Boolean( "Attach Google documents to any record") module_google_spreadsheet = fields.Boolean("Google Spreadsheet") module_auth_oauth = fields.Boolean( "Use external authentication providers (OAuth)") module_auth_ldap = fields.Boolean("LDAP Authentication") module_base_gengo = fields.Boolean("Translate Your Website with Gengo") module_pad = fields.Boolean("Collaborative Pads") module_voip = fields.Boolean("Asterisk (VoIP)") company_share_partner = fields.Boolean( string='Share partners to all companies', help="Share your partners to all companies defined in your instance.\n" " * Checked : Partners are visible for every companies, even if a company is defined on the partner.\n" " * Unchecked : Each company can see only its partner (partners where company is defined). Partners not related to a company are visible for all companies." ) default_custom_report_footer = fields.Boolean("Custom Report Footer") report_footer = fields.Text( related="company_id.report_footer", string='Custom Report Footer', help="Footer text displayed at the bottom of all reports.") group_multi_currency = fields.Boolean( string='Multi-Currencies', implied_group='base.group_multi_currency', help="Allows to work in a multi currency environment") paperformat_id = fields.Many2one(related="company_id.paperformat_id", string='Paper format') external_report_layout = fields.Selection( related="company_id.external_report_layout") send_statistics = fields.Boolean("Send Statistics") activator_key = fields.Binary('Upload Activation Key') contract_id = fields.Char('Contract ID') @api.model def get_values(self): res = super(ResConfigSettings, self).get_values() params = self.env['ir.config_parameter'].sudo() default_external_email_server = params.get_param( 'base_setup.default_external_email_server', default=False) send_statistics = params._get_param('base_setup.send_statistics') if send_statistics is None: send_statistics = 'true' if send_statistics in ['true', 'false']: send_statistics = json.loads(send_statistics) default_user_rights = params.get_param( 'base_setup.default_user_rights', default=False) default_custom_report_footer = params.get_param( 'base_setup.default_custom_report_footer', default=False) res.update( default_external_email_server=default_external_email_server, default_user_rights=default_user_rights, send_statistics=send_statistics, default_custom_report_footer=default_custom_report_footer, company_share_partner=not self.env.ref( 'base.res_partner_rule').active, ) return res @api.multi def set_values(self): super(ResConfigSettings, self).set_values() self.env['ir.config_parameter'].sudo().set_param( "base_setup.default_external_email_server", self.default_external_email_server) self.env['ir.config_parameter'].sudo().set_param( "base_setup.default_user_rights", self.default_user_rights) self.env['ir.config_parameter'].sudo().set_param( "base_setup.default_custom_report_footer", self.default_custom_report_footer) send_statistics = 'true' if not self.send_statistics: send_statistics = 'false' self.env['ir.config_parameter'].sudo().set_param( "base_setup.send_statistics", send_statistics) self.env.ref('base.res_partner_rule').write( {'active': not self.company_share_partner}) if self.activator_key: self._check_authorization() @api.multi def open_company(self): return { 'type': 'ir.actions.act_window', 'name': 'My Company', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'res.company', 'res_id': self.env.user.company_id.id, 'target': 'current', } @api.multi def open_default_user(self): action = self.env.ref('base.action_res_users').read()[0] action['res_id'] = self.env.ref('base.default_user').id action['views'] = [[self.env.ref('base.view_users_form').id, 'form']] return action @api.model def _prepare_report_view_action(self, template): template_id = self.env.ref(template) return { 'type': 'ir.actions.act_window', 'res_model': 'ir.ui.view', 'view_type': 'form', 'view_mode': 'form', 'res_id': template_id.id, } @api.multi def edit_external_header(self): if not self.external_report_layout: return False return self._prepare_report_view_action('web.external_layout_' + self.external_report_layout) @api.multi def change_report_template(self): self.ensure_one() template = self.env.ref('base.view_company_report_form') return { 'name': _('Choose Your Document Layout'), 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_id': self.env.user.company_id.id, 'res_model': 'res.company', 'views': [(template.id, 'form')], 'view_id': template.id, 'target': 'new', } def _check_authorization(self): if self.activator_key and self.contract_id: try: set_param = self.env['ir.config_parameter'].sudo().set_param binary = json.loads( base64.decodestring( self.activator_key).decode('utf-8')).encode('ascii') binary = base64.decodestring(binary) enc = json.dumps(decrypt(binary, self.contract_id)) if enc: dt = datetime.datetime.strptime(json.loads(enc), '"%Y-%m-%d %H:%M:%S"') set_param('database.expiration_date', dt) set_param( 'contract.validity', base64.encodestring( encrypt(json.dumps(str(dt)), str(dt)))) except Exception: _logger.info(_('Please double-check your Contract Key!'), exc_info=True) raise UserError( _('Authorization error!') + ' ' + _('Please double-check your Contract Key!'))
class FleetVehicleCost(models.Model): _name = 'fleet.vehicle.cost' _description = 'Cost related to a vehicle' _order = 'date desc, vehicle_id asc' name = fields.Char(related='vehicle_id.name', string='Name', store=True) vehicle_id = fields.Many2one('fleet.vehicle', 'Vehicle', required=True, help='Vehicle concerned by this log') cost_subtype_id = fields.Many2one( 'fleet.service.type', 'Type', help='Cost type purchased with this cost') amount = fields.Float('Total Price') cost_type = fields.Selection([('contract', 'Contract'), ('services', 'Services'), ('fuel', 'Fuel'), ('other', 'Other')], 'Category of the cost', default="other", help='For internal purpose only', required=True) parent_id = fields.Many2one('fleet.vehicle.cost', 'Parent', help='Parent cost to this current cost') cost_ids = fields.One2many('fleet.vehicle.cost', 'parent_id', 'Included Services', copy=True) odometer_id = fields.Many2one( 'fleet.vehicle.odometer', 'Odometer', help='Odometer measure of the vehicle at the moment of this log') odometer = fields.Float( compute="_get_odometer", inverse='_set_odometer', string='Odometer Value', help='Odometer measure of the vehicle at the moment of this log') odometer_unit = fields.Selection(related='vehicle_id.odometer_unit', string="Unit", readonly=True) date = fields.Date(help='Date when the cost has been executed') contract_id = fields.Many2one('fleet.vehicle.log.contract', 'Contract', help='Contract attached to this cost') auto_generated = fields.Boolean('Automatically Generated', readonly=True) description = fields.Char("Cost Description") def _get_odometer(self): for record in self: if record.odometer_id: record.odometer = record.odometer_id.value def _set_odometer(self): for record in self: if not record.odometer: raise UserError( _('Emptying the odometer value of a vehicle is not allowed.' )) odometer = self.env['fleet.vehicle.odometer'].create({ 'value': record.odometer, 'date': record.date or fields.Date.context_today(record), 'vehicle_id': record.vehicle_id.id }) self.odometer_id = odometer @api.model def create(self, data): # make sure that the data are consistent with values of parent and contract records given if 'parent_id' in data and data['parent_id']: parent = self.browse(data['parent_id']) data['vehicle_id'] = parent.vehicle_id.id data['date'] = parent.date data['cost_type'] = parent.cost_type if 'contract_id' in data and data['contract_id']: contract = self.env['fleet.vehicle.log.contract'].browse( data['contract_id']) data['vehicle_id'] = contract.vehicle_id.id data['cost_subtype_id'] = contract.cost_subtype_id.id data['cost_type'] = contract.cost_type if 'odometer' in data and not data['odometer']: # if received value for odometer is 0, then remove it from the # data as it would result to the creation of a # odometer log with 0, which is to be avoided del data['odometer'] return super(FleetVehicleCost, self).create(data)
class EventType(models.Model): _name = 'event.type' _description = 'Event Category' @api.model def _get_default_event_type_mail_ids(self): return [(0, 0, { 'interval_unit': 'now', 'interval_type': 'after_sub', 'template_id': self.env.ref('event.event_subscription') }), (0, 0, { 'interval_nbr': 1, 'interval_unit': 'days', 'interval_type': 'before_event', 'template_id': self.env.ref('event.event_reminder') }), (0, 0, { 'interval_nbr': 10, 'interval_unit': 'days', 'interval_type': 'before_event', 'template_id': self.env.ref('event.event_reminder') })] name = fields.Char('Event Category', required=True, translate=True) # registration has_seats_limitation = fields.Boolean('Limited Seats', default=False) default_registration_min = fields.Integer( 'Minimum Registrations', default=0, help= "It will select this default minimum value when you choose this event") default_registration_max = fields.Integer( 'Maximum Registrations', default=0, help= "It will select this default maximum value when you choose this event") auto_confirm = fields.Boolean( 'Automatically Confirm Registrations', default=True, help="Events and registrations will automatically be confirmed " "upon creation, easing the flow for simple events.") # location is_online = fields.Boolean( 'Online Event', help= 'Online events like webinars do not require a specific location and are hosted online.' ) use_timezone = fields.Boolean('Use Default Timezone') default_timezone = fields.Selection('_tz_get', string='Timezone', default=lambda self: self.env.user.tz) # communication use_hashtag = fields.Boolean('Use Default Hashtag') default_hashtag = fields.Char('Twitter Hashtag') use_mail_schedule = fields.Boolean('Automatically Send Emails', default=True) event_type_mail_ids = fields.One2many( 'event.type.mail', 'event_type_id', string='Mail Schedule', copy=False, default=_get_default_event_type_mail_ids) @api.onchange('has_seats_limitation') def _onchange_has_seats_limitation(self): if not self.has_seats_limitation: self.default_registration_min = 0 self.default_registration_max = 0 @api.model def _tz_get(self): return [(x, x) for x in pytz.all_timezones]
class Task(models.Model): _name = "project.task" _description = "Task" _date_name = "date_start" _inherit = ['mail.thread', 'mail.activity.mixin', 'portal.mixin', 'ir.branch.company.mixin'] _mail_post_access = 'read' _order = "priority desc, sequence, date_start, name, id" def _get_default_partner(self): if 'default_project_id' in self.env.context: default_project_id = self.env['project.project'].browse(self.env.context['default_project_id']) return default_project_id.exists().partner_id def _get_default_stage_id(self): """ Gives default stage_id """ project_id = self.env.context.get('default_project_id') if not project_id: return False return self.stage_find(project_id, [('fold', '=', False)]) @api.model def _read_group_stage_ids(self, stages, domain, order): search_domain = [('id', 'in', stages.ids)] if 'default_project_id' in self.env.context: search_domain = ['|', ('project_ids', '=', self.env.context['default_project_id'])] + search_domain stage_ids = stages._search(search_domain, order=order, access_rights_uid=SUPERUSER_ID) return stages.browse(stage_ids) @api.one @api.depends('stage_id') def calculate_actual_end_date(self): if self.stage_id.name == 'Done': self.actual_end_date = date.today() active = fields.Boolean(default=True) name = fields.Char(string='Task Title', track_visibility='always', required=True, index=True) description = fields.Html(string='Description') sequence = fields.Integer(string='Sequence', index=True, default=10, help="Gives the sequence order when displaying a list of tasks.") stage_id = fields.Many2one('project.task.type', string='Stage', track_visibility='onchange', index=True, default=_get_default_stage_id, group_expand='_read_group_stage_ids', domain="[('project_ids', '=', project_id)]", copy=False) tag_ids = fields.Many2many('project.tags', string='Tags', oldname='categ_ids') kanban_state = fields.Selection([ ('normal', 'Grey'), ('done', 'Green'), ('blocked', 'Red')], string='Kanban State', copy=False, default='normal', required=True, help="A task's kanban state indicates special situations affecting it:\n" " * Grey is the default situation\n" " * Red indicates something is preventing the progress of this task\n" " * Green indicates the task is ready to be pulled to the next stage") kanban_state_label = fields.Char(compute='_compute_kanban_state_label', string='Kanban State', track_visibility='onchange') create_date = fields.Datetime(index=True) write_date = fields.Datetime(index=True) #not displayed in the view but it might be useful with base_automation module (and it needs to be defined first for that) date_start = fields.Datetime(string='Starting Date', default=fields.Datetime.now, index=True, copy=False) date_end = fields.Datetime(string='Ending Date', index=True, copy=False) date_assign = fields.Datetime(string='Assigning Date', index=True, copy=False, readonly=True) date_deadline = fields.Date(string='Deadline', index=True, copy=False) date_last_stage_update = fields.Datetime(string='Last Stage Update', default=fields.Datetime.now, index=True, copy=False, readonly=True) project_id = fields.Many2one('project.project', string='Project', default=lambda self: self.env.context.get('default_project_id'), index=True, track_visibility='onchange', change_default=True) notes = fields.Text(string='Notes') planned_hours = fields.Float(string='Initially Planned Hours', help='Estimated time to do the task, usually set by the project manager when the task is in draft state.') remaining_hours = fields.Float(string='Remaining Hours', digits=(16,2), help="Total remaining time, can be re-estimated periodically by the assignee of the task.") user_id = fields.Many2one('res.users', string='Assigned to', default=lambda self: self.env.uid, index=True, track_visibility='always') partner_id = fields.Many2one('res.partner', string='Customer', default=_get_default_partner) manager_id = fields.Many2one('res.users', string='Project Manager', related='project_id.user_id', readonly=True) company_id = fields.Many2one('res.company', string='Company', default=lambda self: self.env['res.company']._company_default_get()) color = fields.Integer(string='Color Index') user_email = fields.Char(related='user_id.email', string='User Email', readonly=True) attachment_ids = fields.One2many('ir.attachment', compute='_compute_attachment_ids', string="Main Attachments", help="Attachment that don't come from message.") # In the domain of displayed_image_id, we couln't use attachment_ids because a one2many is represented as a list of commands so we used res_model & res_id displayed_image_id = fields.Many2one('ir.attachment', domain="[('res_model', '=', 'project.task'), ('res_id', '=', id), ('mimetype', 'ilike', 'image')]", string='Cover Image') legend_blocked = fields.Char(related='stage_id.legend_blocked', string='Kanban Blocked Explanation', readonly=True) legend_done = fields.Char(related='stage_id.legend_done', string='Kanban Valid Explanation', readonly=True) legend_normal = fields.Char(related='stage_id.legend_normal', string='Kanban Ongoing Explanation', readonly=True) parent_id = fields.Many2one('project.task', string='Parent Task') child_ids = fields.One2many('project.task', 'parent_id', string="Sub-tasks") subtask_project_id = fields.Many2one('project.project', related="project_id.subtask_project_id", string='Sub-task Project', readonly=True) subtask_count = fields.Integer(compute='_compute_subtask_count', type='integer', string="Sub-task count") email_from = fields.Char(string='Email', help="These people will receive email.", index=True) email_cc = fields.Char(string='Watchers Emails', help="""These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma""") # Computed field about working time elapsed between record creation and assignation/closing. working_hours_open = fields.Float(compute='_compute_elapsed', string='Working hours to assign', store=True, group_operator="avg") working_hours_close = fields.Float(compute='_compute_elapsed', string='Working hours to close', store=True, group_operator="avg") working_days_open = fields.Float(compute='_compute_elapsed', string='Working days to assign', store=True, group_operator="avg") working_days_close = fields.Float(compute='_compute_elapsed', string='Working days to close', store=True, group_operator="avg") task_seq = fields.Char( string="Reference", track_visibility='onchange', default=lambda self: self.env['ir.sequence'].next_by_code( 'project.task') or '/') priority = fields.Selection([ ('l', 'Low'), ('m', 'Medium'), ('h', 'High'), ], string="Priority", default='l') start_date = fields.Date(string="Start Date", track_visibility='onchange') end_date = fields.Date(string="End Date", track_visibility='onchange') actual_end_date = fields.Date( compute='calculate_actual_end_date', string="Actual End Date", store=True, track_visibility='onchange' ) @api.onchange('priority', 'project_id') def task_deadline(self): if self.project_id and self.priority: days = 0 if self.priority == "l": days = int(self.project_id.low) elif self.priority == "m": days = int(self.project_id.medium) else: days = int(self.project_id.high) self.update({ 'date_deadline': date.today() + timedelta(days) }) else: self.update({ 'date_deadline': date.today() }) def _compute_attachment_ids(self): for task in self: attachment_ids = self.env['ir.attachment'].search([('res_id', '=', task.id), ('res_model', '=', 'project.task')]).ids message_attachment_ids = self.mapped('message_ids.attachment_ids').ids # from mail_thread task.attachment_ids = list(set(attachment_ids) - set(message_attachment_ids)) @api.multi @api.depends('create_date', 'date_end', 'date_assign') def _compute_elapsed(self): task_linked_to_calendar = self.filtered( lambda task: task.project_id.resource_calendar_id and task.create_date ) for task in task_linked_to_calendar: dt_create_date = fields.Datetime.from_string(task.create_date) if task.date_assign: dt_date_assign = fields.Datetime.from_string(task.date_assign) task.working_hours_open = task.project_id.resource_calendar_id.get_work_hours_count( dt_create_date, dt_date_assign, False, compute_leaves=True) task.working_days_open = task.working_hours_open / 24.0 if task.date_end: dt_date_end = fields.Datetime.from_string(task.date_end) task.working_hours_close = task.project_id.resource_calendar_id.get_work_hours_count( dt_create_date, dt_date_end, False, compute_leaves=True) task.working_days_close = task.working_hours_close / 24.0 (self - task_linked_to_calendar).update(dict.fromkeys( ['working_hours_open', 'working_hours_close', 'working_days_open', 'working_days_close'], 0.0)) @api.depends('stage_id', 'kanban_state') def _compute_kanban_state_label(self): for task in self: if task.kanban_state == 'normal': task.kanban_state_label = task.legend_normal elif task.kanban_state == 'blocked': task.kanban_state_label = task.legend_blocked else: task.kanban_state_label = task.legend_done def _compute_portal_url(self): super(Task, self)._compute_portal_url() for task in self: task.portal_url = '/my/task/%s' % task.id @api.onchange('partner_id') def _onchange_partner_id(self): self.email_from = self.partner_id.email @api.onchange('project_id') def _onchange_project(self): default_partner_id = self.env.context.get('default_partner_id') default_partner = self.env['res.partner'].browse(default_partner_id) if default_partner_id else self.env['res.partner'] if self.project_id: self.partner_id = self.project_id.partner_id or default_partner if self.project_id not in self.stage_id.project_ids: self.stage_id = self.stage_find(self.project_id.id, [('fold', '=', False)]) else: self.partner_id = default_partner self.stage_id = False @api.onchange('user_id') def _onchange_user(self): if self.user_id: self.date_start = fields.Datetime.now() @api.multi def copy(self, default=None): if default is None: default = {} if not default.get('name'): default['name'] = _("%s (copy)") % self.name if 'remaining_hours' not in default: default['remaining_hours'] = self.planned_hours return super(Task, self).copy(default) @api.multi def _compute_subtask_count(self): for task in self: task.subtask_count = self.search_count([('id', 'child_of', task.id), ('id', '!=', task.id)]) @api.constrains('parent_id') def _check_subtask_project(self): for task in self: if task.parent_id.project_id and task.project_id != task.parent_id.project_id.subtask_project_id: raise UserError(_("You can't define a parent task if its project is not correctly configured. The sub-task's project of the parent task's project should be this task's project")) # Override view according to the company definition @api.model def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False): # read uom as admin to avoid access rights issues, e.g. for portal/share users, # this should be safe (no context passed to avoid side-effects) obj_tm = self.env.user.company_id.project_time_mode_id tm = obj_tm and obj_tm.name or 'Hours' res = super(Task, self).fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu) # read uom as admin to avoid access rights issues, e.g. for portal/share users, # this should be safe (no context passed to avoid side-effects) obj_tm = self.env.user.company_id.project_time_mode_id # using get_object to get translation value uom_hour = self.env.ref('product.product_uom_hour', False) if not obj_tm or not uom_hour or obj_tm.id == uom_hour.id: return res eview = etree.fromstring(res['arch']) # if the project_time_mode_id is not in hours (so in days), display it as a float field def _check_rec(eview): if eview.attrib.get('widget', '') == 'float_time': eview.set('widget', 'float') for child in eview: _check_rec(child) return True _check_rec(eview) res['arch'] = etree.tostring(eview, encoding='unicode') # replace reference of 'Hours' to 'Day(s)' for f in res['fields']: # TODO this NOT work in different language than english # the field 'Initially Planned Hours' should be replaced by 'Initially Planned Days' # but string 'Initially Planned Days' is not available in translation if 'Hours' in res['fields'][f]['string']: res['fields'][f]['string'] = res['fields'][f]['string'].replace('Hours', obj_tm.name) return res @api.model def get_empty_list_help(self, help): self = self.with_context( empty_list_help_id=self.env.context.get('default_project_id'), empty_list_help_model='project.project', empty_list_help_document_name=_("tasks") ) return super(Task, self).get_empty_list_help(help) # ---------------------------------------- # Case management # ---------------------------------------- def stage_find(self, section_id, domain=[], order='sequence'): """ Override of the base.stage method Parameter of the stage search taken from the lead: - section_id: if set, stages must belong to this section or be a default stage; if not set, stages must be default stages """ # collect all section_ids section_ids = [] if section_id: section_ids.append(section_id) section_ids.extend(self.mapped('project_id').ids) search_domain = [] if section_ids: search_domain = [('|')] * (len(section_ids) - 1) for section_id in section_ids: search_domain.append(('project_ids', '=', section_id)) search_domain += list(domain) # perform search, return the first found return self.env['project.task.type'].search(search_domain, order=order, limit=1).id # ------------------------------------------------ # CRUD overrides # ------------------------------------------------ @api.model def create(self, vals): # context: no_log, because subtype already handle this context = dict(self.env.context, mail_create_nolog=True) # for default stage if vals.get('project_id') and not context.get('default_project_id'): context['default_project_id'] = vals.get('project_id') # user_id change: update date_assign if vals.get('user_id'): vals['date_assign'] = fields.Datetime.now() # Stage change: Update date_end if folded stage if vals.get('stage_id'): vals.update(self.update_date_end(vals['stage_id'])) task = super(Task, self.with_context(context)).create(vals) return task @api.multi def write(self, vals): now = fields.Datetime.now() # stage change: update date_last_stage_update if 'stage_id' in vals: vals.update(self.update_date_end(vals['stage_id'])) vals['date_last_stage_update'] = now # reset kanban state when changing stage if 'kanban_state' not in vals: vals['kanban_state'] = 'normal' # user_id change: update date_assign if vals.get('user_id') and 'date_assign' not in vals: vals['date_assign'] = now result = super(Task, self).write(vals) return result def update_date_end(self, stage_id): project_task_type = self.env['project.task.type'].browse(stage_id) if project_task_type.fold: return {'date_end': fields.Datetime.now()} return {'date_end': False} @api.multi def get_access_action(self, access_uid=None): """ Instead of the classic form view, redirect to website for portal users that can read the task. """ self.ensure_one() user, record = self.env.user, self if access_uid: user = self.env['res.users'].sudo().browse(access_uid) record = self.sudo(user) if user.share: try: record.check_access_rule('read') except AccessError: pass else: return { 'type': 'ir.actions.act_url', 'url': '/my/task/%s' % self.id, 'target': 'self', 'res_id': self.id, } return super(Task, self).get_access_action(access_uid) # --------------------------------------------------- # Mail gateway # --------------------------------------------------- @api.multi def _track_template(self, tracking): res = super(Task, self)._track_template(tracking) test_task = self[0] changes, tracking_value_ids = tracking[test_task.id] if 'stage_id' in changes and test_task.stage_id.mail_template_id: res['stage_id'] = (test_task.stage_id.mail_template_id, {'composition_mode': 'mass_mail'}) return res @api.multi def _track_subtype(self, init_values): self.ensure_one() if 'kanban_state_label' in init_values and self.kanban_state == 'blocked': return 'project.mt_task_blocked' elif 'kanban_state_label' in init_values and self.kanban_state == 'done': return 'project.mt_task_ready' elif 'user_id' in init_values and self.user_id: # assigned -> new return 'project.mt_task_new' elif 'stage_id' in init_values and self.stage_id and self.stage_id.sequence <= 1: # start stage -> new return 'project.mt_task_new' elif 'stage_id' in init_values: return 'project.mt_task_stage' return super(Task, self)._track_subtype(init_values) @api.multi def _notification_recipients(self, message, groups): """ Handle project users and managers recipients that can convert assign tasks and create new one directly from notification emails. """ groups = super(Task, self)._notification_recipients(message, groups) self.ensure_one() if not self.user_id: take_action = self._notification_link_helper('assign') project_actions = [{'url': take_action, 'title': _('I take it')}] else: project_actions = [] new_group = ( 'group_project_user', lambda partner: bool(partner.user_ids) and any(user.has_group('project.group_project_user') for user in partner.user_ids), { 'actions': project_actions, }) groups = [new_group] + groups for group_name, group_method, group_data in groups: if group_name in ['customer', 'portal']: continue group_data['has_button_access'] = True return groups @api.model def message_get_reply_to(self, res_ids, default=None): """ Override to get the reply_to of the parent project. """ tasks = self.sudo().browse(res_ids) project_ids = tasks.mapped('project_id').ids aliases = self.env['project.project'].message_get_reply_to(project_ids, default=default) return {task.id: aliases.get(task.project_id.id, False) for task in tasks} @api.multi def email_split(self, msg): email_list = tools.email_split((msg.get('to') or '') + ',' + (msg.get('cc') or '')) # check left-part is not already an alias aliases = self.mapped('project_id.alias_name') return [x for x in email_list if x.split('@')[0] not in aliases] @api.model def message_new(self, msg, custom_values=None): """ Overrides mail_thread message_new that is called by the mailgateway through message_process. This override updates the document according to the email. """ # remove default author when going through the mail gateway. Indeed we # do not want to explicitly set user_id to False; however we do not # want the gateway user to be responsible if no other responsible is # found. create_context = dict(self.env.context or {}) create_context['default_user_id'] = False if custom_values is None: custom_values = {} defaults = { 'name': msg.get('subject') or _("No Subject"), 'email_from': msg.get('from'), 'email_cc': msg.get('cc'), 'planned_hours': 0.0, 'partner_id': msg.get('author_id') } defaults.update(custom_values) task = super(Task, self.with_context(create_context)).message_new(msg, custom_values=defaults) email_list = task.email_split(msg) partner_ids = [p for p in task._find_partner_from_emails(email_list, force_create=False) if p] task.message_subscribe(partner_ids) return task @api.multi def message_update(self, msg, update_vals=None): """ Override to update the task according to the email. """ if update_vals is None: update_vals = {} maps = { 'cost': 'planned_hours', } for line in msg['body'].split('\n'): line = line.strip() res = tools.command_re.match(line) if res: match = res.group(1).lower() field = maps.get(match) if field: try: update_vals[field] = float(res.group(2).lower()) except (ValueError, TypeError): pass email_list = self.email_split(msg) partner_ids = [p for p in self._find_partner_from_emails(email_list, force_create=False) if p] self.message_subscribe(partner_ids) return super(Task, self).message_update(msg, update_vals=update_vals) @api.multi def message_get_suggested_recipients(self): recipients = super(Task, self).message_get_suggested_recipients() for task in self.filtered('partner_id'): reason = _('Customer Email') if task.partner_id.email else _('Customer') if task.partner_id: task._message_add_suggested_recipient(recipients, partner=task.partner_id, reason=reason) elif task.email_from: task._message_add_suggested_recipient(recipients, partner=task.email_from, reason=reason) return recipients @api.multi def message_get_email_values(self, notif_mail=None): res = super(Task, self).message_get_email_values(notif_mail=notif_mail) headers = {} if res.get('headers'): try: headers.update(safe_eval(res['headers'])) except Exception: pass if self.project_id: current_objects = [h for h in headers.get('X-Flectra-Objects', '').split(',') if h] current_objects.insert(0, 'project.project-%s, ' % self.project_id.id) headers['X-Flectra-Objects'] = ','.join(current_objects) if self.tag_ids: headers['X-Flectra-Tags'] = ','.join(self.tag_ids.mapped('name')) res['headers'] = repr(headers) return res def _message_post_after_hook(self, message): if self.email_from and not self.partner_id: # we consider that posting a message with a specified recipient (not a follower, a specific one) # on a document without customer means that it was created through the chatter using # suggested recipients. This heuristic allows to avoid ugly hacks in JS. new_partner = message.partner_ids.filtered(lambda partner: partner.email == self.email_from) if new_partner: self.search([ ('partner_id', '=', False), ('email_from', '=', new_partner.email), ('stage_id.fold', '=', False)]).write({'partner_id': new_partner.id}) return super(Task, self)._message_post_after_hook(message) def action_assign_to_me(self): self.write({'user_id': self.env.user.id}) def action_open_parent_task(self): return { 'name': _('Parent Task'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'project.task', 'res_id': self.parent_id.id, 'type': 'ir.actions.act_window' }
class EventRegistration(models.Model): _name = 'event.registration' _description = 'Attendee' _inherit = ['mail.thread'] _order = 'name, create_date desc' origin = fields.Char( string='Source Document', readonly=True, help= "Reference of the document that created the registration, for example a sales order" ) event_id = fields.Many2one('event.event', string='Event', required=True, readonly=True, states={'draft': [('readonly', False)]}) partner_id = fields.Many2one('res.partner', string='Contact', states={'done': [('readonly', True)]}) date_open = fields.Datetime(string='Registration Date', readonly=True, default=lambda self: fields.datetime.now() ) # weird crash is directly now date_closed = fields.Datetime(string='Attended Date', readonly=True) event_begin_date = fields.Datetime(string="Event Start Date", related='event_id.date_begin', readonly=True) event_end_date = fields.Datetime(string="Event End Date", related='event_id.date_end', readonly=True) company_id = fields.Many2one('res.company', string='Company', related='event_id.company_id', store=True, readonly=True, states={'draft': [('readonly', False)]}) state = fields.Selection([('draft', 'Unconfirmed'), ('cancel', 'Cancelled'), ('open', 'Confirmed'), ('done', 'Attended')], string='Status', default='draft', readonly=True, copy=False, track_visibility='onchange') email = fields.Char(string='Email') phone = fields.Char(string='Phone') name = fields.Char(string='Attendee Name', index=True) @api.one @api.constrains('event_id', 'state') def _check_seats_limit(self): if self.event_id.seats_availability == 'limited' and self.event_id.seats_max and self.event_id.seats_available < ( 1 if self.state == 'draft' else 0): raise ValidationError(_('No more seats available for this event.')) @api.multi def _check_auto_confirmation(self): if self._context.get('registration_force_draft'): return False if any(registration.event_id.state != 'confirm' or not registration.event_id.auto_confirm or ( not registration.event_id.seats_available and registration.event_id.seats_availability == 'limited') for registration in self): return False return True @api.model def create(self, vals): registration = super(EventRegistration, self).create(vals) if registration._check_auto_confirmation(): registration.sudo().confirm_registration() return registration @api.model def _prepare_attendee_values(self, registration): """ Method preparing the values to create new attendees based on a sales order line. It takes some registration data (dict-based) that are optional values coming from an external input like a web page. This method is meant to be inherited in various addons that sell events. """ partner_id = registration.pop('partner_id', self.env.user.partner_id) event_id = registration.pop('event_id', False) data = { 'name': registration.get('name', partner_id.name), 'phone': registration.get('phone', partner_id.phone), 'email': registration.get('email', partner_id.email), 'partner_id': partner_id.id, 'event_id': event_id and event_id.id or False, } data.update({ key: value for key, value in registration.items() if key in self._fields }) return data @api.one def do_draft(self): self.state = 'draft' @api.one def confirm_registration(self): self.state = 'open' # auto-trigger after_sub (on subscribe) mail schedulers, if needed onsubscribe_schedulers = self.event_id.event_mail_ids.filtered( lambda s: s.interval_type == 'after_sub') onsubscribe_schedulers.execute() @api.one def button_reg_close(self): """ Close Registration """ today = fields.Datetime.now() if self.event_id.date_begin <= today and self.event_id.state == 'confirm': self.write({'state': 'done', 'date_closed': today}) elif self.event_id.state == 'draft': raise UserError( _("You must wait the event confirmation before doing this action." )) else: raise UserError( _("You must wait the event starting day before doing this action." )) @api.one def button_reg_cancel(self): self.state = 'cancel' @api.onchange('partner_id') def _onchange_partner(self): if self.partner_id: contact_id = self.partner_id.address_get().get('contact', False) if contact_id: contact = self.env['res.partner'].browse(contact_id) self.name = contact.name or self.name self.email = contact.email or self.email self.phone = contact.phone or self.phone @api.multi def message_get_suggested_recipients(self): recipients = super(EventRegistration, self).message_get_suggested_recipients() public_users = self.env['res.users'].sudo() public_groups = self.env.ref("base.group_public", raise_if_not_found=False) if public_groups: public_users = public_groups.sudo().with_context( active_test=False).mapped("users") try: for attendee in self: is_public = attendee.sudo().with_context( active_test=False ).partner_id.user_ids in public_users if public_users else False if attendee.partner_id and not is_public: attendee._message_add_suggested_recipient( recipients, partner=attendee.partner_id, reason=_('Customer')) elif attendee.email: attendee._message_add_suggested_recipient( recipients, email=attendee.email, reason=_('Customer Email')) except AccessError: # no read access rights -> ignore suggested recipients pass return recipients def _message_post_after_hook(self, message): if self.email and not self.partner_id: # we consider that posting a message with a specified recipient (not a follower, a specific one) # on a document without customer means that it was created through the chatter using # suggested recipients. This heuristic allows to avoid ugly hacks in JS. new_partner = message.partner_ids.filtered( lambda partner: partner.email == self.email) if new_partner: self.search([ ('partner_id', '=', False), ('email', '=', new_partner.email), ('state', 'not in', ['cancel']), ]).write({'partner_id': new_partner.id}) return super(EventRegistration, self)._message_post_after_hook(message) @api.multi def action_send_badge_email(self): """ Open a window to compose an email, with the template - 'event_badge' message loaded by default """ self.ensure_one() template = self.env.ref('event.event_registration_mail_template_badge') compose_form = self.env.ref('mail.email_compose_message_wizard_form') ctx = dict( default_model='event.registration', default_res_id=self.id, default_use_template=bool(template), default_template_id=template.id, default_composition_mode='comment', ) return { 'name': _('Compose Email'), 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'mail.compose.message', 'views': [(compose_form.id, 'form')], 'view_id': compose_form.id, 'target': 'new', 'context': ctx, } @api.multi def get_date_range_str(self): self.ensure_one() today = fields.Datetime.from_string(fields.Datetime.now()) event_date = fields.Datetime.from_string(self.event_begin_date) diff = (event_date.date() - today.date()) if diff.days <= 0: return _('today') elif diff.days == 1: return _('tomorrow') elif (diff.days < 7): return _('in %d days') % (diff.days, ) elif (diff.days < 14): return _('next week') elif event_date.month == (today + relativedelta(months=+1)).month: return _('next month') else: return _('on ') + format_tz(self.with_context({ 'use_babel': True }).env, self.event_begin_date, tz=self.event_id.date_tz or 'UTC') @api.multi def summary(self): self.ensure_one() return {'information': []}
class Project(models.Model): _name = "project.project" _description = "Project" _inherit = ['mail.alias.mixin', 'mail.thread', 'portal.mixin', 'ir.branch.company.mixin'] _inherits = {'account.analytic.account': "analytic_account_id"} _order = "sequence, name, id" _period_number = 5 def get_alias_model_name(self, vals): return vals.get('alias_model', 'project.task') def get_alias_values(self): values = super(Project, self).get_alias_values() values['alias_defaults'] = {'project_id': self.id} return values @api.multi def unlink(self): analytic_accounts_to_delete = self.env['account.analytic.account'] for project in self: if project.tasks: raise UserError(_('You cannot delete a project containing tasks. You can either delete all the project\'s tasks and then delete the project or simply deactivate the project.')) if project.analytic_account_id and not project.analytic_account_id.line_ids: analytic_accounts_to_delete |= project.analytic_account_id res = super(Project, self).unlink() analytic_accounts_to_delete.unlink() return res def _compute_attached_docs_count(self): Attachment = self.env['ir.attachment'] for project in self: project.doc_count = Attachment.search_count([ '|', '&', ('res_model', '=', 'project.project'), ('res_id', '=', project.id), '&', ('res_model', '=', 'project.task'), ('res_id', 'in', project.task_ids.ids) ]) def _compute_task_count(self): task_data = self.env['project.task'].read_group([('project_id', 'in', self.ids), '|', ('stage_id.fold', '=', False), ('stage_id', '=', False)], ['project_id'], ['project_id']) result = dict((data['project_id'][0], data['project_id_count']) for data in task_data) for project in self: project.task_count = result.get(project.id, 0) def _compute_task_needaction_count(self): projects_data = self.env['project.task'].read_group([ ('project_id', 'in', self.ids), ('message_needaction', '=', True) ], ['project_id'], ['project_id']) mapped_data = {project_data['project_id'][0]: int(project_data['project_id_count']) for project_data in projects_data} for project in self: project.task_needaction_count = mapped_data.get(project.id, 0) @api.multi def attachment_tree_view(self): self.ensure_one() domain = [ '|', '&', ('res_model', '=', 'project.project'), ('res_id', 'in', self.ids), '&', ('res_model', '=', 'project.task'), ('res_id', 'in', self.task_ids.ids)] return { 'name': _('Attachments'), 'domain': domain, 'res_model': 'ir.attachment', 'type': 'ir.actions.act_window', 'view_id': False, 'view_mode': 'kanban,tree,form', 'view_type': 'form', 'help': _('''<p class="oe_view_nocontent_create"> Documents are attached to the tasks and issues of your project.</p><p> Send messages or log internal notes with attachments to link documents to your project. </p>'''), 'limit': 80, 'context': "{'default_res_model': '%s','default_res_id': %d}" % (self._name, self.id) } @api.model def activate_sample_project(self): """ Unarchives the sample project 'project.project_project_data' and reloads the project dashboard """ # Unarchive sample project project = self.env.ref('project.project_project_data', False) if project: project.write({'active': True}) cover_image = self.env.ref('project.msg_task_data_14_attach', False) cover_task = self.env.ref('project.project_task_data_14', False) if cover_image and cover_task: cover_task.write({'displayed_image_id': cover_image.id}) # Change the help message on the action (no more activate project) action = self.env.ref('project.open_view_project_all', False) action_data = None if action: action.sudo().write({ "help": _('''<p class="oe_view_nocontent_create">Click to create a new project.</p>''') }) action_data = action.read()[0] # Reload the dashboard return action_data def _compute_is_favorite(self): for project in self: project.is_favorite = self.env.user in project.favorite_user_ids def _inverse_is_favorite(self): favorite_projects = not_fav_projects = self.env['project.project'].sudo() for project in self: if self.env.user in project.favorite_user_ids: favorite_projects |= project else: not_fav_projects |= project # Project User has no write access for project. not_fav_projects.write({'favorite_user_ids': [(4, self.env.uid)]}) favorite_projects.write({'favorite_user_ids': [(3, self.env.uid)]}) def _get_default_favorite_user_ids(self): return [(6, 0, [self.env.uid])] active = fields.Boolean(default=True, help="If the active field is set to False, it will allow you to hide the project without removing it.") sequence = fields.Integer(default=10, help="Gives the sequence order when displaying a list of Projects.") analytic_account_id = fields.Many2one( 'account.analytic.account', string='Contract/Analytic', help="Link this project to an analytic account if you need financial management on projects. " "It enables you to connect projects with budgets, planning, cost and revenue analysis, timesheets on projects, etc.", ondelete="cascade", required=True, auto_join=True) favorite_user_ids = fields.Many2many( 'res.users', 'project_favorite_user_rel', 'project_id', 'user_id', default=_get_default_favorite_user_ids, string='Members') is_favorite = fields.Boolean(compute='_compute_is_favorite', inverse='_inverse_is_favorite', string='Show Project on dashboard', help="Whether this project should be displayed on the dashboard or not") label_tasks = fields.Char(string='Use Tasks as', default='Tasks', help="Gives label to tasks on project's kanban view.") tasks = fields.One2many('project.task', 'project_id', string="Task Activities") resource_calendar_id = fields.Many2one( 'resource.calendar', string='Working Time', default=lambda self: self.env.user.company_id.resource_calendar_id.id, help="Timetable working hours to adjust the gantt diagram report") type_ids = fields.Many2many('project.task.type', 'project_task_type_rel', 'project_id', 'type_id', string='Tasks Stages') task_count = fields.Integer(compute='_compute_task_count', string="Tasks") task_needaction_count = fields.Integer(compute='_compute_task_needaction_count', string="Tasks") task_ids = fields.One2many('project.task', 'project_id', string='Tasks', domain=['|', ('stage_id.fold', '=', False), ('stage_id', '=', False)]) color = fields.Integer(string='Color Index') user_id = fields.Many2one('res.users', string='Project Manager', default=lambda self: self.env.user, track_visibility="onchange") alias_id = fields.Many2one('mail.alias', string='Alias', ondelete="restrict", required=True, help="Internal email associated with this project. Incoming emails are automatically synchronized " "with Tasks (or optionally Issues if the Issue Tracker module is installed).") privacy_visibility = fields.Selection([ ('followers', _('On invitation only')), ('employees', _('Visible by all employees')), ('portal', _('Visible by following customers')), ], string='Privacy', required=True, default='employees', help="Holds visibility of the tasks or issues that belong to the current project:\n" "- On invitation only: Employees may only see the followed project, tasks or issues\n" "- Visible by all employees: Employees may see all project, tasks or issues\n" "- Visible by following customers: employees see everything;\n" " if website is activated, portal users may see project, tasks or issues followed by\n" " them or by someone of their company\n") doc_count = fields.Integer(compute='_compute_attached_docs_count', string="Number of documents attached") date_start = fields.Date(string='Start Date') date = fields.Date(string='Expiration Date', index=True, track_visibility='onchange') subtask_project_id = fields.Many2one('project.project', string='Sub-task Project', ondelete="restrict", help="Choosing a sub-tasks project will both enable sub-tasks and set their default project (possibly the project itself)") low = fields.Integer("No of Days for Low priority") medium = fields.Integer("No of Days for Medium priority") high = fields.Integer("No of Days for High priority") _sql_constraints = [ ('project_date_greater', 'check(date >= date_start)', 'Error! project start-date must be lower than project end-date.') ] def _compute_portal_url(self): super(Project, self)._compute_portal_url() for project in self: project.portal_url = '/my/project/%s' % project.id @api.multi def map_tasks(self, new_project_id): """ copy and map tasks from old to new project """ tasks = self.env['project.task'] for task in self.tasks: # preserve task name and stage, normally altered during copy defaults = {'stage_id': task.stage_id.id, 'name': task.name} tasks += task.copy(defaults) return self.browse(new_project_id).write({'tasks': [(6, 0, tasks.ids)]}) @api.multi def copy(self, default=None): if default is None: default = {} self = self.with_context(active_test=False) if not default.get('name'): default['name'] = _("%s (copy)") % (self.name) project = super(Project, self).copy(default) for follower in self.message_follower_ids: project.message_subscribe(partner_ids=follower.partner_id.ids, subtype_ids=follower.subtype_ids.ids) if 'tasks' not in default: self.map_tasks(project.id) return project @api.model def create(self, vals): # Prevent double project creation self = self.with_context(project_creation_in_progress=True, mail_create_nosubscribe=True) project = super(Project, self).create(vals) if not vals.get('subtask_project_id'): project.subtask_project_id = project.id if project.privacy_visibility == 'portal' and project.partner_id: project.message_subscribe(project.partner_id.ids) return project @api.multi def write(self, vals): res = super(Project, self).write(vals) if 'active' in vals: # archiving/unarchiving a project does it on its tasks, too self.with_context(active_test=False).mapped('tasks').write({'active': vals['active']}) # archiving/unarchiving a project implies that we don't want to use the analytic account anymore self.with_context(active_test=False).mapped('analytic_account_id').write({'active': vals['active']}) if vals.get('partner_id') or vals.get('privacy_visibility'): for project in self.filtered(lambda project: project.privacy_visibility == 'portal'): project.message_subscribe(project.partner_id.ids) return res @api.multi def get_access_action(self, access_uid=None): """ Instead of the classic form view, redirect to website for portal users that can read the project. """ self.ensure_one() user, record = self.env.user, self if access_uid: user = self.env['res.users'].sudo().browse(access_uid) record = self.sudo(user) if user.share: try: record.check_access_rule('read') except AccessError: pass else: return { 'type': 'ir.actions.act_url', 'url': '/my/project/%s' % self.id, 'target': 'self', 'res_id': self.id, } return super(Project, self).get_access_action(access_uid) @api.multi def message_subscribe(self, partner_ids=None, channel_ids=None, subtype_ids=None, force=True): """ Subscribe to all existing active tasks when subscribing to a project """ res = super(Project, self).message_subscribe(partner_ids=partner_ids, channel_ids=channel_ids, subtype_ids=subtype_ids, force=force) if not subtype_ids or any(subtype.parent_id.res_model == 'project.task' for subtype in self.env['mail.message.subtype'].browse(subtype_ids)): for partner_id in partner_ids or []: self.mapped('tasks').filtered(lambda task: not task.stage_id.fold and partner_id not in task.message_partner_ids.ids).message_subscribe( partner_ids=[partner_id], channel_ids=None, subtype_ids=None, force=False) for channel_id in channel_ids or []: self.mapped('tasks').filtered(lambda task: not task.stage_id.fold and channel_id not in task.message_channel_ids.ids).message_subscribe( partner_ids=None, channel_ids=[channel_id], subtype_ids=None, force=False) return res @api.multi def message_unsubscribe(self, partner_ids=None, channel_ids=None): """ Unsubscribe from all tasks when unsubscribing from a project """ self.mapped('tasks').message_unsubscribe(partner_ids=partner_ids, channel_ids=channel_ids) return super(Project, self).message_unsubscribe(partner_ids=partner_ids, channel_ids=channel_ids) @api.multi def _notification_recipients(self, message, groups): groups = super(Project, self)._notification_recipients(message, groups) for group_name, group_method, group_data in groups: if group_name in ['customer', 'portal']: continue group_data['has_button_access'] = True return groups @api.multi def toggle_favorite(self): favorite_projects = not_fav_projects = self.env['project.project'].sudo() for project in self: if self.env.user in project.favorite_user_ids: favorite_projects |= project else: not_fav_projects |= project # Project User has no write access for project. not_fav_projects.write({'favorite_user_ids': [(4, self.env.uid)]}) favorite_projects.write({'favorite_user_ids': [(3, self.env.uid)]}) @api.multi def close_dialog(self): return {'type': 'ir.actions.act_window_close'} @api.multi def edit_dialog(self): form_view = self.env.ref('project.edit_project') return { 'name': _('Project'), 'res_model': 'project.project', 'res_id': self.id, 'views': [(form_view.id, 'form'),], 'type': 'ir.actions.act_window', 'target': 'inline' }
class AccountEdiDocument(models.Model): _name = 'account.edi.document' _description = 'Electronic Document for an account.move' # == Stored fields == move_id = fields.Many2one('account.move', required=True, ondelete='cascade') edi_format_id = fields.Many2one('account.edi.format', required=True) attachment_id = fields.Many2one( 'ir.attachment', help= 'The file generated by edi_format_id when the invoice is posted (and this document is processed).' ) state = fields.Selection([('to_send', 'To Send'), ('sent', 'Sent'), ('to_cancel', 'To Cancel'), ('cancelled', 'Cancelled')]) error = fields.Html( help= 'The text of the last error that happened during Electronic Invoice operation.' ) # == Not stored fields == name = fields.Char(related='attachment_id.name') edi_format_name = fields.Char(string='Format Name', related='edi_format_id.name') _sql_constraints = [ ( 'unique_edi_document_by_move_by_format', 'UNIQUE(edi_format_id, move_id)', 'Only one edi document by move by format', ), ] def write(self, vals): ''' If account_edi_extended is not installed, a default behaviour is used instead. ''' if 'blocking_level' in vals and 'blocking_level' not in self.env[ 'account.edi.document']._fields: vals.pop('blocking_level') return super().write(vals) def _prepare_jobs(self): """Creates a list of jobs to be performed by '_process_job' for the documents in self. Each document represent a job, BUT if multiple documents have the same state, edi_format_id, doc_type (invoice or payment) and company_id AND the edi_format_id supports batching, they are grouped into a single job. :returns: A list of tuples (documents, doc_type) * documents: The documents related to this job. If edi_format_id does not support batch, length is one * doc_type: Are the moves of this job invoice or payments ? """ # Classify jobs by (edi_format, edi_doc.state, doc_type, move.company_id, custom_key) to_process = {} if 'blocking_level' in self.env['account.edi.document']._fields: documents = self.filtered(lambda d: d.state in ( 'to_send', 'to_cancel') and d.blocking_level != 'error') else: documents = self.filtered(lambda d: d.state in ('to_send', 'to_cancel')) for edi_doc in documents: move = edi_doc.move_id edi_format = edi_doc.edi_format_id if move.is_invoice(include_receipts=True): doc_type = 'invoice' elif move.payment_id or move.statement_line_id: doc_type = 'payment' else: continue custom_key = edi_format._get_batch_key(edi_doc.move_id, edi_doc.state) key = (edi_format, edi_doc.state, doc_type, move.company_id, custom_key) to_process.setdefault(key, self.env['account.edi.document']) to_process[key] |= edi_doc # Order payments/invoice and create batches. invoices = [] payments = [] for key, documents in to_process.items(): edi_format, state, doc_type, company_id, custom_key = key target = invoices if doc_type == 'invoice' else payments batch = self.env['account.edi.document'] for doc in documents: if edi_format._support_batching(move=doc.move_id, state=state, company=company_id): batch |= doc else: target.append((doc, doc_type)) if batch: target.append((batch, doc_type)) return invoices + payments @api.model def _convert_to_old_jobs_format(self, jobs): """ See '_prepare_jobs' : Old format : ((edi_format, state, doc_type, company_id), documents) Since edi_format, state and company_id can be deduced from documents, this is redundant and more prone to unexpected behaviours. New format : (doc_type, documents). However, for backward compatibility of 'process_jobs', we need a way to convert back to the old format. """ return [((documents.edi_format_id, documents[0].state, doc_type, documents.move_id.company_id), documents) for documents, doc_type in jobs] @api.model def _process_jobs(self, to_process): """ Deprecated, use _process_job instead. :param to_process: A list of tuples (key, documents) * key: A tuple (edi_format_id, state, doc_type, company_id) ** edi_format_id: The format to perform the operation with ** state: The state of the documents of this job ** doc_type: Are the moves of this job invoice or payments ? ** company_id: The company the moves belong to * documents: The documents related to this job. If edi_format_id does not support batch, length is one """ for key, documents in to_process: edi_format, state, doc_type, company_id = key self._process_job(documents, doc_type) @api.model def _process_job(self, documents, doc_type): """Post or cancel move_id (invoice or payment) by calling the related methods on edi_format_id. Invoices are processed before payments. :param documents: The documents related to this job. If edi_format_id does not support batch, length is one :param doc_type: Are the moves of this job invoice or payments ? """ def _postprocess_post_edi_results(documents, edi_result): attachments_to_unlink = self.env['ir.attachment'] for document in documents: move = document.move_id move_result = edi_result.get(move, {}) if move_result.get('attachment'): old_attachment = document.attachment_id values = { 'attachment_id': move_result['attachment'].id, 'error': move_result.get('error', False), 'blocking_level': move_result.get('blocking_level', DEFAULT_BLOCKING_LEVEL) if 'error' in move_result else False, } if not values.get('error'): values.update({'state': 'sent'}) document.write(values) if not old_attachment.res_model or not old_attachment.res_id: attachments_to_unlink |= old_attachment else: document.write({ 'error': move_result.get('error', False), 'blocking_level': move_result.get('blocking_level', DEFAULT_BLOCKING_LEVEL) if 'error' in move_result else False, }) # Attachments that are not explicitly linked to a business model could be removed because they are not # supposed to have any traceability from the user. attachments_to_unlink.unlink() def _postprocess_cancel_edi_results(documents, edi_result): invoice_ids_to_cancel = set() # Avoid duplicates attachments_to_unlink = self.env['ir.attachment'] for document in documents: move = document.move_id move_result = edi_result.get(move, {}) if move_result.get('success') is True: old_attachment = document.attachment_id document.write({ 'state': 'cancelled', 'error': False, 'attachment_id': False, 'blocking_level': False, }) if move.is_invoice( include_receipts=True) and move.state == 'posted': # The user requested a cancellation of the EDI and it has been approved. Then, the invoice # can be safely cancelled. invoice_ids_to_cancel.add(move.id) if not old_attachment.res_model or not old_attachment.res_id: attachments_to_unlink |= old_attachment elif not move_result.get('success'): document.write({ 'error': move_result.get('error', False), 'blocking_level': move_result.get('blocking_level', DEFAULT_BLOCKING_LEVEL) if move_result.get('error') else False, }) if invoice_ids_to_cancel: invoices = self.env['account.move'].browse( list(invoice_ids_to_cancel)) invoices.button_draft() invoices.button_cancel() # Attachments that are not explicitly linked to a business model could be removed because they are not # supposed to have any traceability from the user. attachments_to_unlink.unlink() test_mode = self._context.get('edi_test_mode', False) documents.edi_format_id.ensure_one( ) # All account.edi.document of a job should have the same edi_format_id documents.move_id.company_id.ensure_one( ) # All account.edi.document of a job should be from the same company if len(set(doc.state for doc in documents)) != 1: raise ValueError( 'All account.edi.document of a job should have the same state') edi_format = documents.edi_format_id state = documents[0].state if doc_type == 'invoice': if state == 'to_send': edi_result = edi_format._post_invoice_edi(documents.move_id, test_mode=test_mode) _postprocess_post_edi_results(documents, edi_result) elif state == 'to_cancel': edi_result = edi_format._cancel_invoice_edi( documents.move_id, test_mode=test_mode) _postprocess_cancel_edi_results(documents, edi_result) elif doc_type == 'payment': if state == 'to_send': edi_result = edi_format._post_payment_edi(documents.move_id, test_mode=test_mode) _postprocess_post_edi_results(documents, edi_result) elif state == 'to_cancel': edi_result = edi_format._cancel_payment_edi( documents.move_id, test_mode=test_mode) _postprocess_cancel_edi_results(documents, edi_result) def _process_documents_no_web_services(self): """ Post and cancel all the documents that don't need a web service. """ jobs = self.filtered(lambda d: not d.edi_format_id._needs_web_services( ))._prepare_jobs() self._process_jobs(self._convert_to_old_jobs_format(jobs)) def _process_documents_web_services(self, job_count=None, with_commit=True): """ Post and cancel all the documents that need a web service. This is called by CRON. :param job_count: Limit to the number of jobs to process among the ones that are available for treatment. """ jobs = self.filtered( lambda d: d.edi_format_id._needs_web_services())._prepare_jobs() jobs = jobs[0:job_count or len(jobs)] for documents, doc_type in jobs: move_to_cancel = documents.filtered(lambda doc: doc.attachment_id \ and doc.state == 'to_cancel' \ and doc.move_id.is_invoice(include_receipts=True) \ and doc.edi_format_id._is_required_for_invoice(doc.move_id)).move_id attachments_potential_unlink = documents.attachment_id.filtered( lambda a: not a.res_model and not a.res_id) try: with self.env.cr.savepoint(): self._cr.execute( 'SELECT * FROM account_edi_document WHERE id IN %s FOR UPDATE NOWAIT', [tuple(documents.ids)]) # Locks the move that will be cancelled. if move_to_cancel: self._cr.execute( 'SELECT * FROM account_move WHERE id IN %s FOR UPDATE NOWAIT', [tuple(move_to_cancel.ids)]) # Locks the attachments that might be unlinked if attachments_potential_unlink: self._cr.execute( 'SELECT * FROM ir_attachment WHERE id IN %s FOR UPDATE NOWAIT', [tuple(attachments_potential_unlink.ids)]) self._process_job(documents, doc_type) except OperationalError as e: if e.pgcode == '55P03': _logger.debug( 'Another transaction already locked documents rows. Cannot process documents.' ) else: raise e else: if with_commit and len(jobs) > 1: self.env.cr.commit()
class MrpRoutingWorkcenter(models.Model): _name = 'mrp.routing.workcenter' _description = 'Work Center Usage' _order = 'sequence, id' name = fields.Char('Operation', required=True) workcenter_id = fields.Many2one('mrp.workcenter', 'Work Center', required=True) sequence = fields.Integer( 'Sequence', default=100, help= "Gives the sequence order when displaying a list of routing Work Centers." ) routing_id = fields.Many2one( 'mrp.routing', 'Parent Routing', index=True, ondelete='cascade', required=True, help= "The routing contains all the Work Centers used and for how long. This will create work orders afterwards " "which alters the execution of the manufacturing order.") note = fields.Text('Description') company_id = fields.Many2one('res.company', 'Company', readonly=True, related='routing_id.company_id', store=True) worksheet = fields.Binary('worksheet') time_mode = fields.Selection([('auto', 'Compute based on real time'), ('manual', 'Set duration manually')], string='Duration Computation', default='auto') time_mode_batch = fields.Integer('Based on', default=10) time_cycle_manual = fields.Float( 'Manual Duration', default=60, help= "Time in minutes. Is the time used in manual mode, or the first time supposed in real time when there are not any work orders yet." ) time_cycle = fields.Float('Duration', compute="_compute_time_cycle") workorder_count = fields.Integer("# Work Orders", compute="_compute_workorder_count") batch = fields.Selection( [('no', 'Once all products are processed'), ('yes', 'Once a minimum number of products is processed')], string='Next Operation', help= "Set 'no' to schedule the next work order after the previous one. Set 'yes' to produce after the quantity set in 'Quantity To Process' has been produced.", default='no', required=True) batch_size = fields.Float('Quantity to Process', default=1.0) workorder_ids = fields.One2many('mrp.workorder', 'operation_id', string="Work Orders") @api.multi @api.depends('time_cycle_manual', 'time_mode', 'workorder_ids') def _compute_time_cycle(self): manual_ops = self.filtered( lambda operation: operation.time_mode == 'manual') for operation in manual_ops: operation.time_cycle = operation.time_cycle_manual for operation in self - manual_ops: data = self.env['mrp.workorder'].read_group( [('operation_id', '=', operation.id), ('state', '=', 'done')], ['operation_id', 'duration', 'qty_produced'], ['operation_id'], limit=operation.time_mode_batch) count_data = dict((item['operation_id'][0], (item['duration'], item['qty_produced'])) for item in data) if count_data.get(operation.id) and count_data[operation.id][1]: operation.time_cycle = count_data[ operation.id][0] / count_data[operation.id][1] else: operation.time_cycle = operation.time_cycle_manual @api.multi def _compute_workorder_count(self): data = self.env['mrp.workorder'].read_group( [('operation_id', 'in', self.ids), ('state', '=', 'done')], ['operation_id'], ['operation_id']) count_data = dict((item['operation_id'][0], item['operation_id_count']) for item in data) for operation in self: operation.workorder_count = count_data.get(operation.id, 0)
class AccountMoveLineReconcileWriteoff(models.TransientModel): """ It opens the write off wizard form, in that user can define the journal, account, analytic account for reconcile """ _name = 'account.move.line.reconcile.writeoff' _description = 'Account move line reconcile (writeoff)' journal_id = fields.Many2one('account.journal', string='Write-Off Journal', required=True) writeoff_acc_id = fields.Many2one('account.account', string='Write-Off account', required=True, domain=[('deprecated', '=', False)]) date_p = fields.Date(string='Date', default=fields.Date.context_today) comment = fields.Char(required=True, default='Write-off') analytic_id = fields.Many2one('account.analytic.account', string='Analytic Account') @api.multi def trans_rec_addendum(self): view = self.env.ref('account.account_move_line_reconcile_writeoff') return { 'name': _('Reconcile Writeoff'), 'context': self._context, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'account.move.line.reconcile.writeoff', 'views': [(view.id, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', } @api.multi def trans_rec_reconcile_partial(self): context = self._context or {} self.env['account.move.line'].browse(context.get('active_ids', [])).reconcile() return {'type': 'ir.actions.act_window_close'} @api.multi def trans_rec_reconcile(self): context = dict(self._context or {}) context['date_p'] = self.date_p context['comment'] = self.comment if self.analytic_id: context['analytic_id'] = self.analytic_id.id move_lines = self.env['account.move.line'].browse( self._context.get('active_ids', [])) #Don't consider entrires that are already reconciled move_lines_filtered = move_lines.filtered( lambda aml: not aml.reconciled) #Because we are making a full reconcilition in batch, we need to consider use cases as defined in the test test_manual_reconcile_wizard_opw678153 #So we force the reconciliation in company currency only at first, context['skip_full_reconcile_check'] = 'amount_currency_excluded' writeoff = move_lines_filtered.with_context(context).reconcile( self.writeoff_acc_id, self.journal_id) #then in second pass, consider the amounts in secondary currency (only if some lines are still not fully reconciled) if not isinstance(writeoff, bool): move_lines += writeoff move_lines.force_full_reconcile() return {'type': 'ir.actions.act_window_close'}
class IrMailServer(models.Model): """Represents an SMTP server, able to send outgoing emails, with SSL and TLS capabilities.""" _name = "ir.mail_server" NO_VALID_RECIPIENT = ("At least one valid recipient address should be " "specified for outgoing emails (To/Cc/Bcc)") name = fields.Char(string='Description', required=True, index=True) smtp_host = fields.Char(string='SMTP Server', required=True, help="Hostname or IP of SMTP server") smtp_port = fields.Integer( string='SMTP Port', size=5, required=True, default=25, help="SMTP Port. Usually 465 for SSL, and 25 or 587 for other cases.") smtp_user = fields.Char(string='Username', help="Optional username for SMTP authentication") smtp_pass = fields.Char(string='Password', help="Optional password for SMTP authentication") smtp_encryption = fields.Selection( [('none', 'None'), ('starttls', 'TLS (STARTTLS)'), ('ssl', 'SSL/TLS')], string='Connection Security', required=True, default='none', help="Choose the connection encryption scheme:\n" "- None: SMTP sessions are done in cleartext.\n" "- TLS (STARTTLS): TLS encryption is requested at start of SMTP session (Recommended)\n" "- SSL/TLS: SMTP sessions are encrypted with SSL/TLS through a dedicated port (default: 465)" ) smtp_debug = fields.Boolean( string='Debugging', help="If enabled, the full output of SMTP sessions will " "be written to the server log at DEBUG level" "(this is very verbose and may include confidential info!)") sequence = fields.Integer( string='Priority', default=10, help= "When no specific mail server is requested for a mail, the highest priority one " "is used. Default priority is 10 (smaller number = higher priority)") active = fields.Boolean(default=True) @api.multi def name_get(self): return [(server.id, "(%s)" % server.name) for server in self] @api.multi def test_smtp_connection(self): for server in self: smtp = False try: smtp = self.connect(mail_server_id=server.id) except Exception as e: raise UserError( _("Connection Test Failed! Here is what we got instead:\n %s" ) % ustr(e)) finally: try: if smtp: smtp.quit() except Exception: # ignored, just a consequence of the previous exception pass raise UserError( _("Connection Test Succeeded! Everything seems properly set up!")) def connect(self, host=None, port=None, user=None, password=None, encryption=None, smtp_debug=False, mail_server_id=None): """Returns a new SMTP connection to the given SMTP server. When running in test mode, this method does nothing and returns `None`. :param host: host or IP of SMTP server to connect to, if mail_server_id not passed :param int port: SMTP port to connect to :param user: optional username to authenticate with :param password: optional password to authenticate with :param string encryption: optional, ``'ssl'`` | ``'starttls'`` :param bool smtp_debug: toggle debugging of SMTP sessions (all i/o will be output in logs) :param mail_server_id: ID of specific mail server to use (overrides other parameters) """ # Do not actually connect while running in test mode if getattr(threading.currentThread(), 'testing', False): return None mail_server = smtp_encryption = None if mail_server_id: mail_server = self.sudo().browse(mail_server_id) elif not host: mail_server = self.sudo().search([], order='sequence', limit=1) if mail_server: smtp_server = mail_server.smtp_host smtp_port = mail_server.smtp_port smtp_user = mail_server.smtp_user smtp_password = mail_server.smtp_pass smtp_encryption = mail_server.smtp_encryption smtp_debug = smtp_debug or mail_server.smtp_debug else: # we were passed individual smtp parameters or nothing and there is no default server smtp_server = host or tools.config.get('smtp_server') smtp_port = tools.config.get('smtp_port', 25) if port is None else port smtp_user = user or tools.config.get('smtp_user') smtp_password = password or tools.config.get('smtp_password') smtp_encryption = encryption if smtp_encryption is None and tools.config.get('smtp_ssl'): smtp_encryption = 'starttls' # smtp_ssl => STARTTLS as of v7 if not smtp_server: raise UserError((_("Missing SMTP Server") + "\n" + _("Please define at least one SMTP server, " "or provide the SMTP parameters explicitly."))) if smtp_encryption == 'ssl': if 'SMTP_SSL' not in smtplib.__all__: raise UserError( _("Your Flectra Server does not support SMTP-over-SSL. " "You could use STARTTLS instead." "If SSL is needed, an upgrade to Python 2.6 on the server-side " "should do the trick.")) connection = smtplib.SMTP_SSL(smtp_server, smtp_port) else: connection = smtplib.SMTP(smtp_server, smtp_port) connection.set_debuglevel(smtp_debug) if smtp_encryption == 'starttls': # starttls() will perform ehlo() if needed first # and will discard the previous list of services # after successfully performing STARTTLS command, # (as per RFC 3207) so for example any AUTH # capability that appears only on encrypted channels # will be correctly detected for next step connection.starttls() if smtp_user: # Attempt authentication - will raise if AUTH service not supported # The user/password must be converted to bytestrings in order to be usable for # certain hashing schemes, like HMAC. # See also bug #597143 and python issue #5285 smtp_user = pycompat.to_native(ustr(smtp_user)) smtp_password = pycompat.to_native(ustr(smtp_password)) connection.login(smtp_user, smtp_password) return connection def build_email(self, email_from, email_to, subject, body, email_cc=None, email_bcc=None, reply_to=False, attachments=None, message_id=None, references=None, object_id=False, subtype='plain', headers=None, body_alternative=None, subtype_alternative='plain'): """Constructs an RFC2822 email.message.Message object based on the keyword arguments passed, and returns it. :param string email_from: sender email address :param list email_to: list of recipient addresses (to be joined with commas) :param string subject: email subject (no pre-encoding/quoting necessary) :param string body: email body, of the type ``subtype`` (by default, plaintext). If html subtype is used, the message will be automatically converted to plaintext and wrapped in multipart/alternative, unless an explicit ``body_alternative`` version is passed. :param string body_alternative: optional alternative body, of the type specified in ``subtype_alternative`` :param string reply_to: optional value of Reply-To header :param string object_id: optional tracking identifier, to be included in the message-id for recognizing replies. Suggested format for object-id is "res_id-model", e.g. "12345-crm.lead". :param string subtype: optional mime subtype for the text body (usually 'plain' or 'html'), must match the format of the ``body`` parameter. Default is 'plain', making the content part of the mail "text/plain". :param string subtype_alternative: optional mime subtype of ``body_alternative`` (usually 'plain' or 'html'). Default is 'plain'. :param list attachments: list of (filename, filecontents) pairs, where filecontents is a string containing the bytes of the attachment :param list email_cc: optional list of string values for CC header (to be joined with commas) :param list email_bcc: optional list of string values for BCC header (to be joined with commas) :param dict headers: optional map of headers to set on the outgoing mail (may override the other headers, including Subject, Reply-To, Message-Id, etc.) :rtype: email.message.Message (usually MIMEMultipart) :return: the new RFC2822 email message """ email_from = email_from or tools.config.get('email_from') assert email_from, "You must either provide a sender address explicitly or configure "\ "a global sender address in the server configuration or with the "\ "--email-from startup parameter." # Note: we must force all strings to to 8-bit utf-8 when crafting message, # or use encode_header() for headers, which does it automatically. headers = headers or {} # need valid dict later email_cc = email_cc or [] email_bcc = email_bcc or [] body = body or u'' email_body = ustr(body) email_text_part = MIMEText(email_body, _subtype=subtype, _charset='utf-8') msg = MIMEMultipart() if not message_id: if object_id: message_id = tools.generate_tracking_message_id(object_id) else: message_id = make_msgid() msg['Message-Id'] = encode_header(message_id) if references: msg['references'] = encode_header(references) msg['Subject'] = encode_header(subject) msg['From'] = encode_rfc2822_address_header(email_from) del msg['Reply-To'] if reply_to: msg['Reply-To'] = encode_rfc2822_address_header(reply_to) else: msg['Reply-To'] = msg['From'] msg['To'] = encode_rfc2822_address_header(COMMASPACE.join(email_to)) if email_cc: msg['Cc'] = encode_rfc2822_address_header( COMMASPACE.join(email_cc)) if email_bcc: msg['Bcc'] = encode_rfc2822_address_header( COMMASPACE.join(email_bcc)) msg['Date'] = formatdate() # Custom headers may override normal headers or provide additional ones for key, value in headers.items(): msg[pycompat.to_native(ustr(key))] = encode_header(value) if subtype == 'html' and not body_alternative: # Always provide alternative text body ourselves if possible. text = html2text.html2text(email_body) alternative_part = MIMEMultipart(_subtype="alternative") alternative_part.attach( MIMEText(text, _charset='utf-8', _subtype='plain')) alternative_part.attach(email_text_part) msg.attach(alternative_part) elif body_alternative: # Include both alternatives, as specified, within a multipart/alternative part alternative_part = MIMEMultipart(_subtype="alternative") body_alternative_ = ustr(body_alternative) alternative_body_part = MIMEText(body_alternative_, _subtype=subtype_alternative, _charset='utf-8') alternative_part.attach(alternative_body_part) alternative_part.attach(email_text_part) msg.attach(alternative_part) else: msg.attach(email_text_part) if attachments: for (fname, fcontent, mime) in attachments: filename_rfc2047 = encode_header_param(fname) if mime and '/' in mime: maintype, subtype = mime.split('/', 1) part = MIMEBase(maintype, subtype) else: part = MIMEBase('application', "octet-stream") # The default RFC2231 encoding of Message.add_header() works in Thunderbird but not GMail # so we fix it by using RFC2047 encoding for the filename instead. part.set_param('name', filename_rfc2047) part.add_header('Content-Disposition', 'attachment', filename=filename_rfc2047) part.set_payload(fcontent) encoders.encode_base64(part) msg.attach(part) return msg @api.model def _get_default_bounce_address(self): '''Compute the default bounce address. The default bounce address is used to set the envelop address if no envelop address is provided in the message. It is formed by properly joining the parameters "mail.bounce.alias" and "mail.catchall.domain". If "mail.bounce.alias" is not set it defaults to "postmaster-flectra". If "mail.catchall.domain" is not set, return None. ''' get_param = self.env['ir.config_parameter'].sudo().get_param postmaster = get_param('mail.bounce.alias', default='postmaster-flectra') domain = get_param('mail.catchall.domain') if postmaster and domain: return '%s@%s' % (postmaster, domain) @api.model def send_email(self, message, mail_server_id=None, smtp_server=None, smtp_port=None, smtp_user=None, smtp_password=None, smtp_encryption=None, smtp_debug=False, smtp_session=None): """Sends an email directly (no queuing). No retries are done, the caller should handle MailDeliveryException in order to ensure that the mail is never lost. If the mail_server_id is provided, sends using this mail server, ignoring other smtp_* arguments. If mail_server_id is None and smtp_server is None, use the default mail server (highest priority). If mail_server_id is None and smtp_server is not None, use the provided smtp_* arguments. If both mail_server_id and smtp_server are None, look for an 'smtp_server' value in server config, and fails if not found. :param message: the email.message.Message to send. The envelope sender will be extracted from the ``Return-Path`` (if present), or will be set to the default bounce address. The envelope recipients will be extracted from the combined list of ``To``, ``CC`` and ``BCC`` headers. :param smtp_session: optional pre-established SMTP session. When provided, overrides `mail_server_id` and all the `smtp_*` parameters. Passing the matching `mail_server_id` may yield better debugging/log messages. The caller is in charge of disconnecting the session. :param mail_server_id: optional id of ir.mail_server to use for sending. overrides other smtp_* arguments. :param smtp_server: optional hostname of SMTP server to use :param smtp_encryption: optional TLS mode, one of 'none', 'starttls' or 'ssl' (see ir.mail_server fields for explanation) :param smtp_port: optional SMTP port, if mail_server_id is not passed :param smtp_user: optional SMTP user, if mail_server_id is not passed :param smtp_password: optional SMTP password to use, if mail_server_id is not passed :param smtp_debug: optional SMTP debug flag, if mail_server_id is not passed :return: the Message-ID of the message that was just sent, if successfully sent, otherwise raises MailDeliveryException and logs root cause. """ # Use the default bounce address **only if** no Return-Path was # provided by caller. Caller may be using Variable Envelope Return # Path (VERP) to detect no-longer valid email addresses. smtp_from = message['Return-Path'] or self._get_default_bounce_address( ) or message['From'] assert smtp_from, "The Return-Path or From header is required for any outbound email" # The email's "Envelope From" (Return-Path), and all recipient addresses must only contain ASCII characters. from_rfc2822 = extract_rfc2822_addresses(smtp_from) assert from_rfc2822, ( "Malformed 'Return-Path' or 'From' address: %r - " "It should contain one valid plain ASCII email") % smtp_from # use last extracted email, to support rarities like 'Support@MyComp <*****@*****.**>' smtp_from = from_rfc2822[-1] email_to = message['To'] email_cc = message['Cc'] email_bcc = message['Bcc'] smtp_to_list = [ address for base in [email_to, email_cc, email_bcc] for address in extract_rfc2822_addresses(base) if address ] assert smtp_to_list, self.NO_VALID_RECIPIENT x_forge_to = message['X-Forge-To'] if x_forge_to: # `To:` header forged, e.g. for posting on mail.channels, to avoid confusion del message['X-Forge-To'] del message['To'] # avoid multiple To: headers! message['To'] = x_forge_to # Do not actually send emails in testing mode! if getattr(threading.currentThread(), 'testing', False): _test_logger.info("skip sending email in test mode") return message['Message-Id'] try: message_id = message['Message-Id'] smtp = smtp_session try: smtp = smtp or self.connect(smtp_server, smtp_port, smtp_user, smtp_password, smtp_encryption, smtp_debug, mail_server_id=mail_server_id) smtp.sendmail(smtp_from, smtp_to_list, message.as_string()) finally: # do not quit() a pre-established smtp_session if smtp is not None and not smtp_session: smtp.quit() except Exception as e: params = (ustr(smtp_server), e.__class__.__name__, ustr(e)) msg = _( "Mail delivery failed via SMTP server '%s'.\n%s: %s") % params _logger.info(msg) raise MailDeliveryException(_("Mail Delivery Failed"), msg) return message_id @api.onchange('smtp_encryption') def _onchange_encryption(self): result = {} if self.smtp_encryption == 'ssl': self.smtp_port = 465 if not 'SMTP_SSL' in smtplib.__all__: result['warning'] = { 'title': _('Warning'), 'message': _('Your server does not seem to support SSL, you may want to try STARTTLS instead' ), } else: self.smtp_port = 25 return result
class Users(models.Model): _inherit = 'res.users' totp_secret = fields.Char(copy=False, groups=fields.NO_ACCESS) totp_enabled = fields.Boolean(string="Two-factor authentication", compute='_compute_totp_enabled') totp_trusted_device_ids = fields.One2many('res.users.apikeys', 'user_id', string="Trusted Devices", domain=[('scope', '=', TRUSTED_DEVICE_SCOPE)]) api_key_ids = fields.One2many(domain=[('scope', '!=', TRUSTED_DEVICE_SCOPE)]) def __init__(self, pool, cr): init_res = super().__init__(pool, cr) type(self).SELF_READABLE_FIELDS = self.SELF_READABLE_FIELDS + ['totp_enabled', 'totp_trusted_device_ids'] return init_res def _mfa_type(self): r = super()._mfa_type() if r is not None: return r if self.totp_enabled: return 'totp' def _mfa_url(self): r = super()._mfa_url() if r is not None: return r if self._mfa_type() == 'totp': return '/web/login/totp' @api.depends('totp_secret') def _compute_totp_enabled(self): for r, v in zip(self, self.sudo()): r.totp_enabled = bool(v.totp_secret) def _rpc_api_keys_only(self): # 2FA enabled means we can't allow password-based RPC self.ensure_one() return self.totp_enabled or super()._rpc_api_keys_only() def _get_session_token_fields(self): return super()._get_session_token_fields() | {'totp_secret'} def _totp_check(self, code): sudo = self.sudo() key = base64.b32decode(sudo.totp_secret) match = TOTP(key).match(code) if match is None: _logger.info("2FA check: FAIL for %s %r", self, self.login) raise AccessDenied() _logger.info("2FA check: SUCCESS for %s %r", self, self.login) def _totp_try_setting(self, secret, code): if self.totp_enabled or self != self.env.user: _logger.info("2FA enable: REJECT for %s %r", self, self.login) return False secret = compress(secret).upper() match = TOTP(base64.b32decode(secret)).match(code) if match is None: _logger.info("2FA enable: REJECT CODE for %s %r", self, self.login) return False self.sudo().totp_secret = secret if request: self.flush() # update session token so the user does not get logged out (cache cleared by change) new_token = self.env.user._compute_session_token(request.session.sid) request.session.session_token = new_token _logger.info("2FA enable: SUCCESS for %s %r", self, self.login) return True @check_identity def totp_disable(self): logins = ', '.join(map(repr, self.mapped('login'))) if not (self == self.env.user or self.env.user._is_admin() or self.env.su): _logger.info("2FA disable: REJECT for %s (%s) by uid #%s", self, logins, self.env.user.id) return False self.revoke_all_devices() self.sudo().write({'totp_secret': False}) if request and self == self.env.user: self.flush() # update session token so the user does not get logged out (cache cleared by change) new_token = self.env.user._compute_session_token(request.session.sid) request.session.session_token = new_token _logger.info("2FA disable: SUCCESS for %s (%s) by uid #%s", self, logins, self.env.user.id) return { 'type': 'ir.actions.client', 'tag': 'display_notification', 'params': { 'type': 'warning', 'message': _("Two-factor authentication disabled for user(s) %s", logins), 'next': {'type': 'ir.actions.act_window_close'}, } } @check_identity def totp_enable_wizard(self): if self.env.user != self: raise UserError(_("Two-factor authentication can only be enabled for yourself")) if self.totp_enabled: raise UserError(_("Two-factor authentication already enabled")) secret_bytes_count = TOTP_SECRET_SIZE // 8 secret = base64.b32encode(os.urandom(secret_bytes_count)).decode() # format secret in groups of 4 characters for readability secret = ' '.join(map(''.join, zip(*[iter(secret)]*4))) w = self.env['auth_totp.wizard'].create({ 'user_id': self.id, 'secret': secret, }) return { 'type': 'ir.actions.act_window', 'target': 'new', 'res_model': 'auth_totp.wizard', 'name': _("Enable Two-Factor Authentication"), 'res_id': w.id, 'views': [(False, 'form')], } @check_identity def revoke_all_devices(self): self._revoke_all_devices() def _revoke_all_devices(self): self.totp_trusted_device_ids._remove() @api.model def change_password(self, old_passwd, new_passwd): self.env.user._revoke_all_devices() return super().change_password(old_passwd, new_passwd)
class StockPicking(models.Model): _inherit = 'stock.picking' def _default_uom(self): weight_uom_id = self.env.ref('product.product_uom_kgm', raise_if_not_found=False) if not weight_uom_id: uom_categ_id = self.env.ref('product.product_uom_categ_kgm').id weight_uom_id = self.env['product.uom'].search( [('category_id', '=', uom_categ_id), ('factor', '=', 1)], limit=1) return weight_uom_id @api.one @api.depends('move_line_ids') def _compute_packages(self): self.ensure_one() packs = set() for move_line in self.move_line_ids: if move_line.result_package_id: packs.add(move_line.result_package_id.id) self.package_ids = list(packs) @api.one @api.depends('move_line_ids') def _compute_bulk_weight(self): weight = 0.0 for move_line in self.move_line_ids: if move_line.product_id and not move_line.result_package_id: weight += move_line.product_uom_id._compute_quantity( move_line.qty_done, move_line.product_id.uom_id) * move_line.product_id.weight self.weight_bulk = weight @api.one @api.depends('package_ids', 'weight_bulk') def _compute_shipping_weight(self): self.shipping_weight = self.weight_bulk + sum( [pack.shipping_weight for pack in self.package_ids]) carrier_price = fields.Float(string="Shipping Cost") delivery_type = fields.Selection(related='carrier_id.delivery_type', readonly=True) carrier_id = fields.Many2one("delivery.carrier", string="Carrier") volume = fields.Float(copy=False) weight = fields.Float(compute='_cal_weight', digits=dp.get_precision('Stock Weight'), store=True) carrier_tracking_ref = fields.Char(string='Tracking Reference', copy=False) carrier_tracking_url = fields.Char(string='Tracking URL', compute='_compute_carrier_tracking_url') number_of_packages = fields.Integer(string='Number of Packages', copy=False) weight_uom_id = fields.Many2one('product.uom', string='Unit of Measure', required=True, readonly="1", help="Unit of measurement for Weight", default=_default_uom) package_ids = fields.Many2many('stock.quant.package', compute='_compute_packages', string='Packages') weight_bulk = fields.Float('Bulk Weight', compute='_compute_bulk_weight') shipping_weight = fields.Float("Weight for Shipping", compute='_compute_shipping_weight') @api.depends('carrier_id', 'carrier_tracking_ref') def _compute_carrier_tracking_url(self): for picking in self: picking.carrier_tracking_url = picking.carrier_id.get_tracking_link( picking ) if picking.carrier_id and picking.carrier_tracking_ref else False @api.depends('product_id', 'move_lines') def _cal_weight(self): for picking in self: picking.weight = sum(move.weight for move in picking.move_lines if move.state != 'cancel') @api.multi def do_transfer(self): # TDE FIXME: should work in batch self.ensure_one() res = super(StockPicking, self).do_transfer() if self.carrier_id and self.carrier_id.integration_level == 'rate_and_ship': self.send_to_shipper() if self.carrier_id: self._add_delivery_cost_to_so() return res @api.multi def put_in_pack(self): if self.carrier_id and self.carrier_id.delivery_type not in [ 'base_on_rule', 'fixed' ]: view_id = self.env.ref( 'delivery.choose_delivery_package_view_form').id return { 'name': _('Package Details'), 'type': 'ir.actions.act_window', 'view_mode': 'form', 'res_model': 'choose.delivery.package', 'view_id': view_id, 'views': [(view_id, 'form')], 'target': 'new', 'context': { 'current_package_carrier_type': self.carrier_id.delivery_type, } } else: return self._put_in_pack() @api.multi def action_send_confirmation_email(self): self.ensure_one() delivery_template_id = self.env.ref( 'delivery.mail_template_data_delivery_confirmation').id compose_form_id = self.env.ref( 'mail.email_compose_message_wizard_form').id ctx = dict( default_composition_mode='comment', default_res_id=self.id, default_model='stock.picking', default_use_template=bool(delivery_template_id), default_template_id=delivery_template_id, custom_layout='delivery.mail_template_data_delivery_notification') return { 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'mail.compose.message', 'view_id': compose_form_id, 'target': 'new', 'context': ctx, } @api.multi def send_to_shipper(self): self.ensure_one() res = self.carrier_id.send_shipping(self)[0] self.carrier_price = res['exact_price'] self.carrier_tracking_ref = res['tracking_number'] order_currency = self.sale_id.currency_id or self.company_id.currency_id msg = _( "Shipment sent to carrier %s for shipping with tracking number %s<br/>Cost: %.2f %s" ) % (self.carrier_id.name, self.carrier_tracking_ref, self.carrier_price, order_currency.name) self.message_post(body=msg) @api.multi def _add_delivery_cost_to_so(self): self.ensure_one() sale_order = self.sale_id if sale_order.invoice_shipping_on_delivery: sale_order._create_delivery_line(self.carrier_id, self.carrier_price) @api.multi def open_website_url(self): self.ensure_one() if not self.carrier_tracking_url: raise UserError( _("Your delivery method has no redirect on courier provider's website to track this order." )) client_action = { 'type': 'ir.actions.act_url', 'name': "Shipment Tracking Page", 'target': 'new', 'url': self.carrier_tracking_url, } return client_action @api.one def cancel_shipment(self): self.carrier_id.cancel_shipment(self) msg = "Shipment %s cancelled" % self.carrier_tracking_ref self.message_post(body=msg) self.carrier_tracking_ref = False @api.multi def check_packages_are_identical(self): '''Some shippers require identical packages in the same shipment. This utility checks it.''' self.ensure_one() if self.package_ids: packages = [p.packaging_id for p in self.package_ids] if len(set(packages)) != 1: package_names = ', '.join([str(p.name) for p in packages]) raise UserError( _('You are shipping different packaging types in the same shipment.\nPackaging Types: %s' % package_names)) return True
class Inventory(models.Model): _name = "stock.inventory" _description = "Inventory" _order = "date desc, id desc" _inherit = ['mail.thread', 'mail.activity.mixin'] name = fields.Char('Inventory Reference', default="Inventory", readonly=True, required=True, states={'draft': [('readonly', False)]}) date = fields.Datetime( 'Inventory Date', readonly=True, required=True, default=fields.Datetime.now, help= "If the inventory adjustment is not validated, date at which the theoritical quantities have been checked.\n" "If the inventory adjustment is validated, date at which the inventory adjustment has been validated." ) line_ids = fields.One2many('stock.inventory.line', 'inventory_id', string='Inventories', copy=False, readonly=False, states={'done': [('readonly', True)]}) move_ids = fields.One2many('stock.move', 'inventory_id', string='Created Moves', states={'done': [('readonly', True)]}) state = fields.Selection(string='Status', selection=[('draft', 'Draft'), ('cancel', 'Cancelled'), ('confirm', 'In Progress'), ('done', 'Validated')], copy=False, index=True, readonly=True, tracking=True, default='draft') company_id = fields.Many2one('res.company', 'Company', readonly=True, index=True, required=True, states={'draft': [('readonly', False)]}, default=lambda self: self.env.company) location_ids = fields.Many2many( 'stock.location', string='Locations', readonly=True, check_company=True, states={'draft': [('readonly', False)]}, domain= "[('company_id', '=', company_id), ('usage', 'in', ['internal', 'transit'])]" ) product_ids = fields.Many2many( 'product.product', string='Products', check_company=True, domain= "[('type', '=', 'product'), '|', ('company_id', '=', False), ('company_id', '=', company_id)]", readonly=True, states={'draft': [('readonly', False)]}, help="Specify Products to focus your inventory on particular Products." ) start_empty = fields.Boolean( 'Empty Inventory', help="Allows to start with an empty inventory.") prefill_counted_quantity = fields.Selection( string='Counted Quantities', help= "Allows to start with a pre-filled counted quantity for each lines or " "with all counted quantities set to zero.", default='counted', selection=[('counted', 'Default to stock on hand'), ('zero', 'Default to zero')]) exhausted = fields.Boolean('Include Exhausted Products', readonly=True, states={'draft': [('readonly', False)]}, help="Include also products with quantity of 0") @api.onchange('company_id') def _onchange_company_id(self): # If the multilocation group is not active, default the location to the one of the main # warehouse. if not self.user_has_groups('stock.group_stock_multi_locations'): warehouse = self.env['stock.warehouse'].search( [('company_id', '=', self.company_id.id)], limit=1) if warehouse: self.location_ids = warehouse.lot_stock_id def copy_data(self, default=None): name = _("%s (copy)") % (self.name) default = dict(default or {}, name=name) return super(Inventory, self).copy_data(default) def unlink(self): for inventory in self: if (inventory.state not in ('draft', 'cancel') and not self.env.context.get(MODULE_UNINSTALL_FLAG, False)): raise UserError( _('You can only delete a draft inventory adjustment. If the inventory adjustment is not done, you can cancel it.' )) return super(Inventory, self).unlink() def action_validate(self): if not self.exists(): return self.ensure_one() if not self.user_has_groups('stock.group_stock_manager'): raise UserError( _("Only a stock manager can validate an inventory adjustment.") ) if self.state != 'confirm': raise UserError( _( "You can't validate the inventory '%s', maybe this inventory " "has been already validated or isn't ready.", self.name)) inventory_lines = self.line_ids.filtered( lambda l: l.product_id.tracking in ['lot', 'serial'] and not l. prod_lot_id and l.theoretical_qty != l.product_qty) lines = self.line_ids.filtered(lambda l: float_compare( l.product_qty, 1, precision_rounding=l.product_uom_id.rounding ) > 0 and l.product_id.tracking == 'serial' and l.prod_lot_id) if inventory_lines and not lines: wiz_lines = [(0, 0, { 'product_id': product.id, 'tracking': product.tracking }) for product in inventory_lines.mapped('product_id')] wiz = self.env['stock.track.confirmation'].create({ 'inventory_id': self.id, 'tracking_line_ids': wiz_lines }) return { 'name': _('Tracked Products in Inventory Adjustment'), 'type': 'ir.actions.act_window', 'view_mode': 'form', 'views': [(False, 'form')], 'res_model': 'stock.track.confirmation', 'target': 'new', 'res_id': wiz.id, } self._action_done() self.line_ids._check_company() self._check_company() return True def _action_done(self): negative = next( (line for line in self.mapped('line_ids') if line.product_qty < 0 and line.product_qty != line.theoretical_qty), False) if negative: raise UserError( _( 'You cannot set a negative product quantity in an inventory line:\n\t%s - qty: %s', negative.product_id.display_name, negative.product_qty)) self.action_check() self.write({'state': 'done', 'date': fields.Datetime.now()}) self.post_inventory() return True def post_inventory(self): # The inventory is posted as a single step which means quants cannot be moved from an internal location to another using an inventory # as they will be moved to inventory loss, and other quants will be created to the encoded quant location. This is a normal behavior # as quants cannot be reuse from inventory location (users can still manually move the products before/after the inventory if they want). self.mapped('move_ids').filtered( lambda move: move.state != 'done')._action_done() return True def action_check(self): """ Checks the inventory and computes the stock move to do """ # tde todo: clean after _generate_moves for inventory in self.filtered(lambda x: x.state not in ('done', 'cancel')): # first remove the existing stock moves linked to this inventory inventory.with_context( prefetch_fields=False).mapped('move_ids').unlink() inventory.line_ids._generate_moves() def action_cancel_draft(self): self.mapped('move_ids')._action_cancel() self.line_ids.unlink() self.write({'state': 'draft'}) def action_start(self): self.ensure_one() self._action_start() self._check_company() return self.action_open_inventory_lines() def _action_start(self): """ Confirms the Inventory Adjustment and generates its inventory lines if its state is draft and don't have already inventory lines (can happen with demo data or tests). """ for inventory in self: if inventory.state != 'draft': continue vals = {'state': 'confirm', 'date': fields.Datetime.now()} if not inventory.line_ids and not inventory.start_empty: self.env['stock.inventory.line'].create( inventory._get_inventory_lines_values()) inventory.write(vals) def action_open_inventory_lines(self): self.ensure_one() action = { 'type': 'ir.actions.act_window', 'view_mode': 'tree', 'name': _('Inventory Lines'), 'res_model': 'stock.inventory.line', } context = { 'default_is_editable': True, 'default_inventory_id': self.id, 'default_company_id': self.company_id.id, } # Define domains and context domain = [('inventory_id', '=', self.id), ('location_id.usage', 'in', ['internal', 'transit'])] if self.location_ids: context['default_location_id'] = self.location_ids[0].id if len(self.location_ids) == 1: if not self.location_ids[0].child_ids: context['readonly_location_id'] = True if self.product_ids: # no_create on product_id field action['view_id'] = self.env.ref( 'stock.stock_inventory_line_tree_no_product_create').id if len(self.product_ids) == 1: context['default_product_id'] = self.product_ids[0].id else: # no product_ids => we're allowed to create new products in tree action['view_id'] = self.env.ref( 'stock.stock_inventory_line_tree').id action['context'] = context action['domain'] = domain return action def action_view_related_move_lines(self): self.ensure_one() domain = [('move_id', 'in', self.move_ids.ids)] action = { 'name': _('Product Moves'), 'type': 'ir.actions.act_window', 'res_model': 'stock.move.line', 'view_type': 'list', 'view_mode': 'list,form', 'domain': domain, } return action def action_print(self): return self.env.ref('stock.action_report_inventory').report_action( self) def _get_quantities(self): """Return quantities group by product_id, location_id, lot_id, package_id and owner_id :return: a dict with keys as tuple of group by and quantity as value :rtype: dict """ self.ensure_one() if self.location_ids: domain_loc = [('id', 'child_of', self.location_ids.ids)] else: domain_loc = [('company_id', '=', self.company_id.id), ('usage', 'in', ['internal', 'transit'])] locations_ids = [ l['id'] for l in self.env['stock.location'].search_read( domain_loc, ['id']) ] domain = [('company_id', '=', self.company_id.id), ('quantity', '!=', '0'), ('location_id', 'in', locations_ids)] if self.prefill_counted_quantity == 'zero': domain.append(('product_id.active', '=', True)) if self.product_ids: domain = expression.AND( [domain, [('product_id', 'in', self.product_ids.ids)]]) fields = [ 'product_id', 'location_id', 'lot_id', 'package_id', 'owner_id', 'quantity:sum' ] group_by = [ 'product_id', 'location_id', 'lot_id', 'package_id', 'owner_id' ] quants = self.env['stock.quant'].read_group(domain, fields, group_by, lazy=False) return {(quant['product_id'] and quant['product_id'][0] or False, quant['location_id'] and quant['location_id'][0] or False, quant['lot_id'] and quant['lot_id'][0] or False, quant['package_id'] and quant['package_id'][0] or False, quant['owner_id'] and quant['owner_id'][0] or False): quant['quantity'] for quant in quants} def _get_exhausted_inventory_lines_vals(self, non_exhausted_set): """Return the values of the inventory lines to create if the user wants to include exhausted products. Exhausted products are products without quantities or quantity equal to 0. :param non_exhausted_set: set of tuple (product_id, location_id) of non exhausted product-location :return: a list containing the `stock.inventory.line` values to create :rtype: list """ self.ensure_one() if self.product_ids: product_ids = self.product_ids.ids else: product_ids = self.env['product.product'].search_read([ '|', ('company_id', '=', self.company_id.id), ('company_id', '=', False), ('type', '=', 'product'), ('active', '=', True) ], ['id']) product_ids = [p['id'] for p in product_ids] if self.location_ids: location_ids = self.location_ids.ids else: location_ids = self.env['stock.warehouse'].search([ ('company_id', '=', self.company_id.id) ]).lot_stock_id.ids vals = [] for product_id in product_ids: for location_id in location_ids: if ((product_id, location_id) not in non_exhausted_set): vals.append({ 'inventory_id': self.id, 'product_id': product_id, 'location_id': location_id, 'theoretical_qty': 0 }) return vals def _get_inventory_lines_values(self): """Return the values of the inventory lines to create for this inventory. :return: a list containing the `stock.inventory.line` values to create :rtype: list """ self.ensure_one() quants_groups = self._get_quantities() vals = [] product_ids = OrderedSet() for (product_id, location_id, lot_id, package_id, owner_id), quantity in quants_groups.items(): line_values = { 'inventory_id': self.id, 'product_qty': 0 if self.prefill_counted_quantity == "zero" else quantity, 'theoretical_qty': quantity, 'prod_lot_id': lot_id, 'partner_id': owner_id, 'product_id': product_id, 'location_id': location_id, 'package_id': package_id } product_ids.add(product_id) vals.append(line_values) product_id_to_product = dict( zip(product_ids, self.env['product.product'].browse(product_ids))) for val in vals: val['product_uom_id'] = product_id_to_product[ val['product_id']].product_tmpl_id.uom_id.id if self.exhausted: vals += self._get_exhausted_inventory_lines_vals({ (l['product_id'], l['location_id']) for l in vals }) return vals
class Import(models.TransientModel): _name = 'base_import.import' # allow imports to survive for 12h in case user is slow _transient_max_hours = 12.0 res_model = fields.Char('Model') file = fields.Binary( 'File', help="File to check and/or import, raw binary (not base64)") file_name = fields.Char('File Name') file_type = fields.Char('File Type') @api.model def get_fields(self, model, depth=FIELDS_RECURSION_LIMIT): """ Recursively get fields for the provided model (through fields_get) and filter them according to importability The output format is a list of ``Field``, with ``Field`` defined as: .. class:: Field .. attribute:: id (str) A non-unique identifier for the field, used to compute the span of the ``required`` attribute: if multiple ``required`` fields have the same id, only one of them is necessary. .. attribute:: name (str) The field's logical (Flectra) name within the scope of its parent. .. attribute:: string (str) The field's human-readable name (``@string``) .. attribute:: required (bool) Whether the field is marked as required in the model. Clients must provide non-empty import values for all required fields or the import will error out. .. attribute:: fields (list(Field)) The current field's subfields. The database and external identifiers for m2o and m2m fields; a filtered and transformed fields_get for o2m fields (to a variable depth defined by ``depth``). Fields with no sub-fields will have an empty list of sub-fields. :param str model: name of the model to get fields form :param int landing: depth of recursion into o2m fields """ Model = self.env[model] importable_fields = [{ 'id': 'id', 'name': 'id', 'string': _("External ID"), 'required': False, 'fields': [], 'type': 'id', }] model_fields = Model.fields_get() blacklist = models.MAGIC_COLUMNS + [Model.CONCURRENCY_CHECK_FIELD] for name, field in model_fields.items(): if name in blacklist: continue # an empty string means the field is deprecated, @deprecated must # be absent or False to mean not-deprecated if field.get('deprecated', False) is not False: continue if field.get('readonly'): states = field.get('states') if not states: continue # states = {state: [(attr, value), (attr2, value2)], state2:...} if not any(attr == 'readonly' and value is False for attr, value in itertools.chain.from_iterable( states.values())): continue field_value = { 'id': name, 'name': name, 'string': field['string'], # Y U NO ALWAYS HAS REQUIRED 'required': bool(field.get('required')), 'fields': [], 'type': field['type'], } if field['type'] in ('many2many', 'many2one'): field_value['fields'] = [ dict(field_value, name='id', string=_("External ID"), type='id'), dict(field_value, name='.id', string=_("Database ID"), type='id'), ] elif field['type'] == 'one2many' and depth: field_value['fields'] = self.get_fields(field['relation'], depth=depth - 1) if self.user_has_groups('base.group_no_one'): field_value['fields'].append({ 'id': '.id', 'name': '.id', 'string': _("Database ID"), 'required': False, 'fields': [], 'type': 'id' }) importable_fields.append(field_value) # TODO: cache on model? return importable_fields @api.multi def _read_file(self, options): """ Dispatch to specific method to read file content, according to its mimetype or file type :param options : dict of reading options (quoting, separator, ...) """ self.ensure_one() # guess mimetype from file content mimetype = guess_mimetype(self.file) (file_extension, handler, req) = FILE_TYPE_DICT.get(mimetype, (None, None, None)) if handler: try: return getattr(self, '_read_' + file_extension)(options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %d) using guessed mimetype %s", self.file_name or '<unknown>', self.id, mimetype) # try reading with user-provided mimetype (file_extension, handler, req) = FILE_TYPE_DICT.get(self.file_type, (None, None, None)) if handler: try: return getattr(self, '_read_' + file_extension)(options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %d) using user-provided mimetype %s", self.file_name or '<unknown>', self.id, self.file_type) # fallback on file extensions as mime types can be unreliable (e.g. # software setting incorrect mime types, or non-installed software # leading to browser not sending mime types) if self.file_name: p, ext = os.path.splitext(self.file_name) if ext in EXTENSIONS: try: return getattr(self, '_read_' + ext[1:])(options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %s) using file extension", self.file_name, self.id) if req: raise ImportError( _("Unable to load \"{extension}\" file: requires Python module \"{modname}\"" ).format(extension=file_extension, modname=req)) raise ValueError( _("Unsupported file format \"{}\", import only supports CSV, ODS, XLS and XLSX" ).format(self.file_type)) @api.multi def _read_xls(self, options): """ Read file content, using xlrd lib """ book = xlrd.open_workbook(file_contents=self.file) return self._read_xls_book(book) def _read_xls_book(self, book): sheet = book.sheet_by_index(0) # emulate Sheet.get_rows for pre-0.9.4 for row in pycompat.imap(sheet.row, range(sheet.nrows)): values = [] for cell in row: if cell.ctype is xlrd.XL_CELL_NUMBER: is_float = cell.value % 1 != 0.0 values.append( pycompat.text_type(cell.value) if is_float else pycompat.text_type(int(cell.value))) elif cell.ctype is xlrd.XL_CELL_DATE: is_datetime = cell.value % 1 != 0.0 # emulate xldate_as_datetime for pre-0.9.3 dt = datetime.datetime(*xlrd.xldate.xldate_as_tuple( cell.value, book.datemode)) values.append( dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT ) if is_datetime else dt. strftime(DEFAULT_SERVER_DATE_FORMAT)) elif cell.ctype is xlrd.XL_CELL_BOOLEAN: values.append(u'True' if cell.value else u'False') elif cell.ctype is xlrd.XL_CELL_ERROR: raise ValueError( _("Error cell found while reading XLS/XLSX file: %s") % xlrd.error_text_from_code.get( cell.value, "unknown error code %s" % cell.value)) else: values.append(cell.value) if any(x for x in values if x.strip()): yield values # use the same method for xlsx and xls files _read_xlsx = _read_xls @api.multi def _read_ods(self, options): """ Read file content using ODSReader custom lib """ doc = odf_ods_reader.ODSReader(file=io.BytesIO(self.file)) return (row for row in doc.getFirstSheet() if any(x for x in row if x.strip())) @api.multi def _read_csv(self, options): """ Returns a CSV-parsed iterator of all empty lines in the file :throws csv.Error: if an error is detected during CSV parsing :throws UnicodeDecodeError: if ``options.encoding`` is incorrect """ csv_data = self.file # TODO: guess encoding with chardet? Or https://github.com/aadsm/jschardet encoding = options.get('encoding', 'utf-8') if encoding != 'utf-8': # csv module expect utf-8, see http://docs.python.org/2/library/csv.html csv_data = csv_data.decode(encoding).encode('utf-8') csv_iterator = pycompat.csv_reader(io.BytesIO(csv_data), quotechar=str(options['quoting']), delimiter=str(options['separator'])) return (row for row in csv_iterator if any(x for x in row if x.strip())) @api.model def _try_match_column(self, preview_values, options): """ Returns the potential field types, based on the preview values, using heuristics :param preview_values : list of value for the column to determine :param options : parsing options """ # If all values are empty in preview than can be any field if all([v == '' for v in preview_values]): return ['all'] # If all values starts with __export__ this is probably an id if all(v.startswith('__export__') for v in preview_values): return ['id', 'many2many', 'many2one', 'one2many'] # If all values can be cast to int type is either id, float or monetary # Exception: if we only have 1 and 0, it can also be a boolean try: field_type = [ 'id', 'integer', 'char', 'float', 'monetary', 'many2one', 'many2many', 'one2many' ] res = set(int(v) for v in preview_values if v) if {0, 1}.issuperset(res): field_type.append('boolean') return field_type except ValueError: pass # If all values are either True or False, type is boolean if all(val.lower() in ('true', 'false', 't', 'f', '') for val in preview_values): return ['boolean'] # If all values can be cast to float, type is either float or monetary # Or a date/datetime if it matches the pattern results = [] try: thousand_separator = decimal_separator = False for val in preview_values: val = val.strip() if not val: continue # value might have the currency symbol left or right from the value val = self._remove_currency_symbol(val) if val: if options.get('float_thousand_separator') and options.get( 'float_decimal_separator'): val = val.replace( options['float_thousand_separator'], '').replace(options['float_decimal_separator'], '.') # We are now sure that this is a float, but we still need to find the # thousand and decimal separator else: if val.count('.') > 1: options['float_thousand_separator'] = '.' options['float_decimal_separator'] = ',' elif val.count(',') > 1: options['float_thousand_separator'] = ',' options['float_decimal_separator'] = '.' elif val.find('.') > val.find(','): thousand_separator = ',' decimal_separator = '.' elif val.find(',') > val.find('.'): thousand_separator = '.' decimal_separator = ',' else: # This is not a float so exit this try float('a') if thousand_separator and not options.get( 'float_decimal_separator'): options['float_thousand_separator'] = thousand_separator options['float_decimal_separator'] = decimal_separator results = ['float', 'monetary'] except ValueError: pass # Try to see if all values are a date or datetime dt = datetime.datetime separator = [' ', '/', '-'] date_format = ['%mr%dr%Y', '%dr%mr%Y', '%Yr%mr%d', '%Yr%dr%m'] date_patterns = [options['date_format'] ] if options.get('date_format') else [] if not date_patterns: date_patterns = [ pattern.replace('r', sep) for sep in separator for pattern in date_format ] date_patterns.extend([p.replace('Y', 'y') for p in date_patterns]) datetime_patterns = [options['datetime_format'] ] if options.get('datetime_format') else [] if not datetime_patterns: datetime_patterns = [ pattern + ' %H:%M:%S' for pattern in date_patterns ] current_date_pattern = False current_datetime_pattern = False def check_patterns(patterns, preview_values): for pattern in patterns: match = True for val in preview_values: if not val: continue try: dt.strptime(val, pattern) except ValueError: match = False break if match: return pattern return False current_date_pattern = check_patterns(date_patterns, preview_values) if current_date_pattern: options['date_format'] = current_date_pattern results += ['date'] current_datetime_pattern = check_patterns(datetime_patterns, preview_values) if current_datetime_pattern: options['datetime_format'] = current_datetime_pattern results += ['datetime'] if results: return results return [ 'id', 'text', 'char', 'datetime', 'selection', 'many2one', 'one2many', 'many2many', 'html' ] @api.model def _find_type_from_preview(self, options, preview): type_fields = [] if preview: for column in range(0, len(preview[0])): preview_values = [value[column].strip() for value in preview] type_field = self._try_match_column(preview_values, options) type_fields.append(type_field) return type_fields def _match_header(self, header, fields, options): """ Attempts to match a given header to a field of the imported model. :param str header: header name from the CSV file :param fields: :param dict options: :returns: an empty list if the header couldn't be matched, or all the fields to traverse :rtype: list(Field) """ string_match = None for field in fields: # FIXME: should match all translations & original # TODO: use string distance (levenshtein? hamming?) if header.lower() == field['name'].lower(): return [field] if header.lower() == field['string'].lower(): # matching string are not reliable way because # strings have no unique constraint string_match = field if string_match: # this behavior is only applied if there is no matching field['name'] return [string_match] if '/' not in header: return [] # relational field path traversal = [] subfields = fields # Iteratively dive into fields tree for section in header.split('/'): # Strip section in case spaces are added around '/' for # readability of paths match = self._match_header(section.strip(), subfields, options) # Any match failure, exit if not match: return [] # prep subfields for next iteration within match[0] field = match[0] subfields = field['fields'] traversal.append(field) return traversal def _match_headers(self, rows, fields, options): """ Attempts to match the imported model's fields to the titles of the parsed CSV file, if the file is supposed to have headers. Will consume the first line of the ``rows`` iterator. Returns a pair of (None, None) if headers were not requested or the list of headers and a dict mapping cell indices to key paths in the ``fields`` tree :param Iterator rows: :param dict fields: :param dict options: :rtype: (None, None) | (list(str), dict(int: list(str))) """ if not options.get('headers'): return [], {} headers = next(rows) return headers, { index: [ field['name'] for field in self._match_header(header, fields, options) ] or None for index, header in enumerate(headers) } @api.multi def parse_preview(self, options, count=10): """ Generates a preview of the uploaded files, and performs fields-matching between the import's file data and the model's columns. If the headers are not requested (not options.headers), ``matches`` and ``headers`` are both ``False``. :param int count: number of preview lines to generate :param options: format-specific options. CSV: {encoding, quoting, separator, headers} :type options: {str, str, str, bool} :returns: {fields, matches, headers, preview} | {error, preview} :rtype: {dict(str: dict(...)), dict(int, list(str)), list(str), list(list(str))} | {str, str} """ self.ensure_one() fields = self.get_fields(self.res_model) try: rows = self._read_file(options) headers, matches = self._match_headers(rows, fields, options) # Match should have consumed the first row (iif headers), get # the ``count`` next rows for preview preview = list(itertools.islice(rows, count)) assert preview, "CSV file seems to have no content" header_types = self._find_type_from_preview(options, preview) if options.get('keep_matches', False) and len( options.get('fields', [])): matches = {} for index, match in enumerate(options.get('fields')): if match: matches[index] = match.split('/') return { 'fields': fields, 'matches': matches or False, 'headers': headers or False, 'headers_type': header_types or False, 'preview': preview, 'options': options, 'advanced_mode': any([ len(models.fix_import_export_id_paths(col)) > 1 for col in headers or [] ]), 'debug': self.user_has_groups('base.group_no_one'), } except Exception as error: # Due to lazy generators, UnicodeDecodeError (for # instance) may only be raised when serializing the # preview to a list in the return. _logger.debug("Error during parsing preview", exc_info=True) preview = None if self.file_type == 'text/csv': preview = self.file[:ERROR_PREVIEW_BYTES].decode('iso-8859-1') return { 'error': str(error), # iso-8859-1 ensures decoding will always succeed, # even if it yields non-printable characters. This is # in case of UnicodeDecodeError (or csv.Error # compounded with UnicodeDecodeError) 'preview': preview, } @api.model def _convert_import_data(self, fields, options): """ Extracts the input BaseModel and fields list (with ``False``-y placeholders for fields to *not* import) into a format Model.import_data can use: a fields list without holes and the precisely matching data matrix :param list(str|bool): fields :returns: (data, fields) :rtype: (list(list(str)), list(str)) :raises ValueError: in case the import data could not be converted """ # Get indices for non-empty fields indices = [index for index, field in enumerate(fields) if field] if not indices: raise ValueError( _("You must configure at least one field to import")) # If only one index, itemgetter will return an atom rather # than a 1-tuple if len(indices) == 1: mapper = lambda row: [row[indices[0]]] else: mapper = operator.itemgetter(*indices) # Get only list of actually imported fields import_fields = [f for f in fields if f] rows_to_import = self._read_file(options) if options.get('headers'): rows_to_import = itertools.islice(rows_to_import, 1, None) data = [ list(row) for row in pycompat.imap(mapper, rows_to_import) # don't try inserting completely empty rows (e.g. from # filtering out o2m fields) if any(row) ] return data, import_fields @api.model def _remove_currency_symbol(self, value): value = value.strip() negative = False # Careful that some countries use () for negative so replace it by - sign if value.startswith('(') and value.endswith(')'): value = value[1:-1] negative = True float_regex = re.compile(r'([-]?[0-9.,]+)') split_value = [g for g in float_regex.split(value) if g] if len(split_value) > 2: # This is probably not a float return False if len(split_value) == 1: if float_regex.search(split_value[0]) is not None: return split_value[0] if not negative else '-' + split_value[0] return False else: # String has been split in 2, locate which index contains the float and which does not currency_index = 0 if float_regex.search(split_value[0]) is not None: currency_index = 1 # Check that currency exists currency = self.env['res.currency'].search([ ('symbol', '=', split_value[currency_index].strip()) ]) if len(currency): return split_value[(currency_index + 1) % 2] if not negative else '-' + split_value[ (currency_index + 1) % 2] # Otherwise it is not a float with a currency symbol return False @api.model def _parse_float_from_data(self, data, index, name, options): thousand_separator = options.get('float_thousand_separator', ' ') decimal_separator = options.get('float_decimal_separator', '.') for line in data: line[index] = line[index].strip() if not line[index]: continue line[index] = line[index].replace(thousand_separator, '').replace( decimal_separator, '.') old_value = line[index] line[index] = self._remove_currency_symbol(line[index]) if line[index] is False: raise ValueError( _("Column %s contains incorrect values (value: %s)" % (name, old_value))) @api.multi def _parse_import_data(self, data, import_fields, options): """ Lauch first call to _parse_import_data_recursive with an empty prefix. _parse_import_data_recursive will be run recursively for each relational field. """ return self._parse_import_data_recursive(self.res_model, '', data, import_fields, options) @api.multi def _parse_import_data_recursive(self, model, prefix, data, import_fields, options): # Get fields of type date/datetime all_fields = self.env[model].fields_get() for name, field in all_fields.items(): name = prefix + name if field['type'] in ('date', 'datetime') and name in import_fields: # Parse date index = import_fields.index(name) dt = datetime.datetime server_format = DEFAULT_SERVER_DATE_FORMAT if field[ 'type'] == 'date' else DEFAULT_SERVER_DATETIME_FORMAT if options.get('%s_format' % field['type'], server_format) != server_format: # datetime.str[fp]time takes *native strings* in both # versions, for both data and pattern user_format = pycompat.to_native( options.get('%s_format' % field['type'])) for num, line in enumerate(data): if line[index]: line[index] = line[index].strip() if line[index]: try: line[index] = dt.strftime( dt.strptime( pycompat.to_native(line[index]), user_format), server_format) except ValueError as e: raise ValueError( _("Column %s contains incorrect values. Error in line %d: %s" ) % (name, num + 1, e)) except Exception as e: raise ValueError( _("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e)) # Check if the field is in import_field and is a relational (followed by /) # Also verify that the field name exactly match the import_field at the correct level. elif any(name + '/' in import_field and name == import_field.split('/')[prefix.count('/')] for import_field in import_fields): # Recursive call with the relational as new model and add the field name to the prefix self._parse_import_data_recursive(field['relation'], name + '/', data, import_fields, options) elif field['type'] in ('float', 'monetary') and name in import_fields: # Parse float, sometimes float values from file have currency symbol or () to denote a negative value # We should be able to manage both case index = import_fields.index(name) self._parse_float_from_data(data, index, name, options) return data @api.multi def do(self, fields, options, dryrun=False): """ Actual execution of the import :param fields: import mapping: maps each column to a field, ``False`` for the columns to ignore :type fields: list(str|bool) :param dict options: :param bool dryrun: performs all import operations (and validations) but rollbacks writes, allows getting as much errors as possible without the risk of clobbering the database. :returns: A list of errors. If the list is empty the import executed fully and correctly. If the list is non-empty it contains dicts with 3 keys ``type`` the type of error (``error|warning``); ``message`` the error message associated with the error (a string) and ``record`` the data which failed to import (or ``false`` if that data isn't available or provided) :rtype: list({type, message, record}) """ self.ensure_one() self._cr.execute('SAVEPOINT import') try: data, import_fields = self._convert_import_data(fields, options) # Parse date and float field data = self._parse_import_data(data, import_fields, options) except ValueError as error: return [{ 'type': 'error', 'message': pycompat.text_type(error), 'record': False, }] _logger.info('importing %d rows...', len(data)) model = self.env[self.res_model].with_context(import_file=True) defer_parent_store = self.env.context.get( 'defer_parent_store_computation', True) if defer_parent_store and model._parent_store: model = model.with_context(defer_parent_store_computation=True) import_result = model.load(import_fields, data) _logger.info('done') # If transaction aborted, RELEASE SAVEPOINT is going to raise # an InternalError (ROLLBACK should work, maybe). Ignore that. # TODO: to handle multiple errors, create savepoint around # write and release it in case of write error (after # adding error to errors array) => can keep on trying to # import stuff, and rollback at the end if there is any # error in the results. try: if dryrun: self._cr.execute('ROLLBACK TO SAVEPOINT import') else: self._cr.execute('RELEASE SAVEPOINT import') except psycopg2.InternalError: pass return import_result['messages']
class ResPartner(models.Model): _inherit = 'res.partner' l10n_ar_vat = fields.Char( compute='_compute_l10n_ar_vat', string="VAT", help='Computed field that returns VAT or nothing if this one' ' is not set for the partner') l10n_ar_formatted_vat = fields.Char( compute='_compute_l10n_ar_formatted_vat', string="Formatted VAT", help='Computed field that will convert the' ' given VAT number to the format {person_category:2}-{number:10}-{validation_number:1}' ) l10n_ar_gross_income_number = fields.Char('Gross Income Number') l10n_ar_gross_income_type = fields.Selection( [('multilateral', 'Multilateral'), ('local', 'Local'), ('exempt', 'Exempt')], 'Gross Income Type', help='Type of gross income: exempt, local, multilateral') l10n_ar_afip_responsibility_type_id = fields.Many2one( 'l10n_ar.afip.responsibility.type', string='AFIP Responsibility Type', index=True, help='Defined by AFIP to' ' identify the type of responsibilities that a person or a legal entity could have and that impacts in the' ' type of operations and requirements they need.') l10n_ar_special_purchase_document_type_ids = fields.Many2many( 'l10n_latam.document.type', 'res_partner_document_type_rel', 'partner_id', 'document_type_id', string='Other Purchase Documents', help='Set here if this partner can issue other documents further than' ' invoices, credit notes and debit notes') @api.depends('l10n_ar_vat') def _compute_l10n_ar_formatted_vat(self): """ This will add some dash to the CUIT number (VAT AR) in order to show in his natural format: {person_category}-{number}-{validation_number} """ recs_ar_vat = self.filtered('l10n_ar_vat') for rec in recs_ar_vat: try: rec.l10n_ar_formatted_vat = stdnum.ar.cuit.format( rec.l10n_ar_vat) except Exception as error: rec.l10n_ar_formatted_vat = rec.l10n_ar_vat _logger.runbot("Argentinian VAT was not formatted: %s", repr(error)) remaining = self - recs_ar_vat remaining.l10n_ar_formatted_vat = False @api.depends('vat', 'l10n_latam_identification_type_id') def _compute_l10n_ar_vat(self): """ We add this computed field that returns cuit (VAT AR) or nothing if this one is not set for the partner. This Validation can be also done by calling ensure_vat() method that returns the cuit (VAT AR) or error if this one is not found """ recs_ar_vat = self.filtered( lambda x: x.l10n_latam_identification_type_id.l10n_ar_afip_code == '80' and x.vat) for rec in recs_ar_vat: rec.l10n_ar_vat = stdnum.ar.cuit.compact(rec.vat) remaining = self - recs_ar_vat remaining.l10n_ar_vat = False @api.constrains('vat', 'l10n_latam_identification_type_id') def check_vat(self): """ Since we validate more documents than the vat for Argentinian partners (CUIT - VAT AR, CUIL, DNI) we extend this method in order to process it. """ # NOTE by the moment we include the CUIT (VAT AR) validation also here because we extend the messages # errors to be more friendly to the user. In a future when Flectra improve the base_vat message errors # we can change this method and use the base_vat.check_vat_ar method.s l10n_ar_partners = self.filtered( lambda x: x.l10n_latam_identification_type_id.l10n_ar_afip_code) l10n_ar_partners.l10n_ar_identification_validation() return super(ResPartner, self - l10n_ar_partners).check_vat() @api.model def _commercial_fields(self): return super()._commercial_fields() + [ 'l10n_ar_afip_responsibility_type_id' ] def ensure_vat(self): """ This method is a helper that returns the VAT number is this one is defined if not raise an UserError. VAT is not mandatory field but for some Argentinian operations the VAT is required, for eg validate an electronic invoice, build a report, etc. This method can be used to validate is the VAT is proper defined in the partner """ self.ensure_one() if not self.l10n_ar_vat: raise UserError( _('No VAT configured for partner [%i] %s') % (self.id, self.name)) return self.l10n_ar_vat def _get_validation_module(self): self.ensure_one() if self.l10n_latam_identification_type_id.l10n_ar_afip_code in [ '80', '86' ]: return stdnum.ar.cuit elif self.l10n_latam_identification_type_id.l10n_ar_afip_code == '96': return stdnum.ar.dni def l10n_ar_identification_validation(self): for rec in self.filtered('vat'): try: module = rec._get_validation_module() except Exception as error: module = False _logger.runbot("Argentinian document was not validated: %s", repr(error)) if not module: continue try: module.validate(rec.vat) except module.InvalidChecksum: raise ValidationError( _('The validation digit is not valid for "%s"', rec.l10n_latam_identification_type_id.name)) except module.InvalidLength: raise ValidationError( _('Invalid length for "%s"', rec.l10n_latam_identification_type_id.name)) except module.InvalidFormat: raise ValidationError( _('Only numbers allowed for "%s"', rec.l10n_latam_identification_type_id.name)) except Exception as error: raise ValidationError(repr(error)) def _get_id_number_sanitize(self): """ Sanitize the identification number. Return the digits/integer value of the identification number If not vat number defined return 0 """ self.ensure_one() if not self.vat: return 0 if self.l10n_latam_identification_type_id.l10n_ar_afip_code in [ '80', '86' ]: # Compact is the number clean up, remove all separators leave only digits res = int(stdnum.ar.cuit.compact(self.vat)) else: id_number = re.sub('[^0-9]', '', self.vat) res = int(id_number) return res
class ResPartner(models.Model): _inherit = 'res.partner' signup_token = fields.Char(copy=False, groups="base.group_erp_manager") signup_type = fields.Char(string='Signup Token Type', copy=False, groups="base.group_erp_manager") signup_expiration = fields.Datetime(copy=False, groups="base.group_erp_manager") signup_valid = fields.Boolean(compute='_compute_signup_valid', string='Signup Token is Valid') signup_url = fields.Char(compute='_compute_signup_url', string='Signup URL') @api.multi @api.depends('signup_token', 'signup_expiration') def _compute_signup_valid(self): dt = now() for partner, partner_sudo in pycompat.izip(self, self.sudo()): partner.signup_valid = bool(partner_sudo.signup_token) and \ (not partner_sudo.signup_expiration or dt <= partner_sudo.signup_expiration) @api.multi def _compute_signup_url(self): """ proxy for function field towards actual implementation """ result = self.sudo()._get_signup_url_for_action() for partner in self: if any( u.has_group('base.group_user') for u in partner.user_ids if u != self.env.user): self.env['res.users'].check_access_rights('write') partner.signup_url = result.get(partner.id, False) @api.multi def _get_signup_url_for_action(self, action=None, view_type=None, menu_id=None, res_id=None, model=None): """ generate a signup url for the given partner ids and action, possibly overriding the url state components (menu_id, id, view_type) """ res = dict.fromkeys(self.ids, False) base_url = self.env['ir.config_parameter'].sudo().get_param( 'web.base.url') for partner in self: # when required, make sure the partner has a valid signup token if self.env.context.get('signup_valid') and not partner.user_ids: partner.sudo().signup_prepare() route = 'login' # the parameters to encode for the query query = dict(db=self.env.cr.dbname) signup_type = self.env.context.get( 'signup_force_type_in_url', partner.sudo().signup_type or '') if signup_type: route = 'reset_password' if signup_type == 'reset' else signup_type if partner.sudo().signup_token and signup_type: query['token'] = partner.sudo().signup_token elif partner.user_ids: query['login'] = partner.user_ids[0].login else: continue # no signup token, no user, thus no signup url! fragment = dict() base = '/web#' if action == '/mail/view': base = '/mail/view?' elif action: fragment['action'] = action if view_type: fragment['view_type'] = view_type if menu_id: fragment['menu_id'] = menu_id if model: fragment['model'] = model if res_id: fragment['res_id'] = res_id if fragment: query['redirect'] = base + werkzeug.urls.url_encode(fragment) res[partner.id] = werkzeug.urls.url_join( base_url, "/web/%s?%s" % (route, werkzeug.urls.url_encode(query))) return res @api.multi def action_signup_prepare(self): return self.signup_prepare() def signup_get_auth_param(self): """ Get a signup token related to the partner if signup is enabled. If the partner already has a user, get the login parameter. """ res = defaultdict(dict) allow_signup = self.env['ir.config_parameter'].sudo().get_param( 'auth_signup.allow_uninvited', 'False').lower() == 'true' for partner in self: if allow_signup and not partner.user_ids: partner = partner.sudo() partner.signup_prepare() res[partner.id]['auth_signup_token'] = partner.signup_token elif partner.user_ids: res[partner.id]['auth_login'] = partner.user_ids[0].login return res @api.multi def signup_cancel(self): return self.write({ 'signup_token': False, 'signup_type': False, 'signup_expiration': False }) @api.multi def signup_prepare(self, signup_type="signup", expiration=False): """ generate a new token for the partners with the given validity, if necessary :param expiration: the expiration datetime of the token (string, optional) """ for partner in self: if expiration or not partner.signup_valid: token = random_token() while self._signup_retrieve_partner(token): token = random_token() partner.write({ 'signup_token': token, 'signup_type': signup_type, 'signup_expiration': expiration }) return True @api.model def _signup_retrieve_partner(self, token, check_validity=False, raise_exception=False): """ find the partner corresponding to a token, and possibly check its validity :param token: the token to resolve :param check_validity: if True, also check validity :param raise_exception: if True, raise exception instead of returning False :return: partner (browse record) or False (if raise_exception is False) """ partner = self.search([('signup_token', '=', token)], limit=1) if not partner: if raise_exception: raise exceptions.UserError( _("Signup token '%s' is not valid") % token) return False if check_validity and not partner.signup_valid: if raise_exception: raise exceptions.UserError( _("Signup token '%s' is no longer valid") % token) return False return partner @api.model def signup_retrieve_info(self, token): """ retrieve the user info about the token :return: a dictionary with the user information: - 'db': the name of the database - 'token': the token, if token is valid - 'name': the name of the partner, if token is valid - 'login': the user login, if the user already exists - 'email': the partner email, if the user does not exist """ partner = self._signup_retrieve_partner(token, raise_exception=True) res = {'db': self.env.cr.dbname} if partner.signup_valid: res['token'] = token res['name'] = partner.name if partner.user_ids: res['login'] = partner.user_ids[0].login else: res['email'] = res['login'] = partner.email or '' return res
class MailBlackListMixin(models.AbstractModel): """ Mixin that is inherited by all model with opt out. This mixin stores a normalized email based on primary_email field. A normalized email is considered as : - having a left part + @ + a right part (the domain can be without '.something') - being lower case - having no name before the address. Typically, having no 'Name <>' Ex: - Formatted Email : 'Name <*****@*****.**>' - Normalized Email : '*****@*****.**' The primary email field can be specified on the parent model, if it differs from the default one ('email') The email_normalized field can than be used on that model to search quickly on emails (by simple comparison and not using time consuming regex anymore). Using this email_normalized field, blacklist status is computed. Mail Thread capabilities are required for this mixin. """ _name = 'mail.thread.blacklist' _inherit = ['mail.thread'] _description = 'Mail Blacklist mixin' _primary_email = 'email' email_normalized = fields.Char( string='Normalized Email', compute="_compute_email_normalized", compute_sudo=True, store=True, invisible=True, help= "This field is used to search on email address as the primary email field can contain more than strictly an email address." ) # Note : is_blacklisted sould only be used for display. As the compute is not depending on the blacklist, # once read, it won't be re-computed again if the blacklist is modified in the same request. is_blacklisted = fields.Boolean( string='Blacklist', compute="_compute_is_blacklisted", compute_sudo=True, store=False, search="_search_is_blacklisted", groups="base.group_user", help= "If the email address is on the blacklist, the contact won't receive mass mailing anymore, from any list" ) # messaging message_bounce = fields.Integer( 'Bounce', help="Counter of the number of bounced emails for this contact", default=0) @api.depends(lambda self: [self._primary_email]) def _compute_email_normalized(self): self._assert_primary_email() for record in self: record.email_normalized = tools.email_normalize( record[self._primary_email]) @api.model def _search_is_blacklisted(self, operator, value): # Assumes operator is '=' or '!=' and value is True or False self.flush(['email_normalized']) self.env['mail.blacklist'].flush(['email', 'active']) self._assert_primary_email() if operator != '=': if operator == '!=' and isinstance(value, bool): value = not value else: raise NotImplementedError() if value: query = """ SELECT m.id FROM mail_blacklist bl JOIN %s m ON m.email_normalized = bl.email AND bl.active """ else: query = """ SELECT m.id FROM %s m LEFT JOIN mail_blacklist bl ON m.email_normalized = bl.email AND bl.active WHERE bl.id IS NULL """ self._cr.execute(query % self._table) res = self._cr.fetchall() if not res: return [(0, '=', 1)] return [('id', 'in', [r[0] for r in res])] @api.depends('email_normalized') def _compute_is_blacklisted(self): # TODO : Should remove the sudo as compute_sudo defined on methods. # But if user doesn't have access to mail.blacklist, doen't work without sudo(). blacklist = set(self.env['mail.blacklist'].sudo().search([ ('email', 'in', self.mapped('email_normalized')) ]).mapped('email')) for record in self: record.is_blacklisted = record.email_normalized in blacklist def _assert_primary_email(self): if not hasattr(self, "_primary_email") or not isinstance( self._primary_email, str): raise UserError( _('Invalid primary email field on model %s', self._name)) if self._primary_email not in self._fields or self._fields[ self._primary_email].type != 'char': raise UserError( _('Invalid primary email field on model %s', self._name)) def _message_receive_bounce(self, email, partner): """ Override of mail.thread generic method. Purpose is to increment the bounce counter of the record. """ super(MailBlackListMixin, self)._message_receive_bounce(email, partner) for record in self: record.message_bounce = record.message_bounce + 1 def _message_reset_bounce(self, email): """ Override of mail.thread generic method. Purpose is to reset the bounce counter of the record. """ super(MailBlackListMixin, self)._message_reset_bounce(email) self.write({'message_bounce': 0}) def mail_action_blacklist_remove(self): # wizard access rights currently not working as expected and allows users without access to # open this wizard, therefore we check to make sure they have access before the wizard opens. can_access = self.env['mail.blacklist'].check_access_rights( 'write', raise_exception=False) if can_access: return { 'name': _('Are you sure you want to unblacklist this Email Address?'), 'type': 'ir.actions.act_window', 'view_mode': 'form', 'res_model': 'mail.blacklist.remove', 'target': 'new', } else: raise AccessError( _("You do not have the access right to unblacklist emails. Please contact your administrator." ))
class FleetVehicleLogContract(models.Model): _inherit = ['mail.thread'] _inherits = {'fleet.vehicle.cost': 'cost_id'} _name = 'fleet.vehicle.log.contract' _description = 'Contract information on a vehicle' _order = 'state desc,expiration_date' def compute_next_year_date(self, strdate): oneyear = relativedelta(years=1) start_date = fields.Date.from_string(strdate) return fields.Date.to_string(start_date + oneyear) @api.model def default_get(self, default_fields): res = super(FleetVehicleLogContract, self).default_get(default_fields) contract = self.env.ref('fleet.type_contract_leasing', raise_if_not_found=False) res.update({ 'date': fields.Date.context_today(self), 'cost_subtype_id': contract and contract.id or False, 'cost_type': 'contract' }) return res name = fields.Text(compute='_compute_contract_name', store=True) active = fields.Boolean(default=True) start_date = fields.Date( 'Contract Start Date', default=fields.Date.context_today, help='Date when the coverage of the contract begins') expiration_date = fields.Date( 'Contract Expiration Date', default=lambda self: self.compute_next_year_date( fields.Date.context_today(self)), help= 'Date when the coverage of the contract expirates (by default, one year after begin date)' ) days_left = fields.Integer(compute='_compute_days_left', string='Warning Date') insurer_id = fields.Many2one('res.partner', 'Vendor') purchaser_id = fields.Many2one( 'res.partner', 'Contractor', default=lambda self: self.env.user.partner_id.id, help='Person to which the contract is signed for') ins_ref = fields.Char('Contract Reference', size=64, copy=False) state = fields.Selection( [('futur', 'Incoming'), ('open', 'In Progress'), ('expired', 'Expired'), ('diesoon', 'Expiring Soon'), ('closed', 'Closed')], 'Status', default='open', readonly=True, help='Choose whether the contract is still valid or not', track_visibility="onchange", copy=False) notes = fields.Text( 'Terms and Conditions', help= 'Write here all supplementary information relative to this contract', copy=False) cost_generated = fields.Float( 'Recurring Cost Amount', help="Costs paid at regular intervals, depending on the cost frequency. " "If the cost frequency is set to unique, the cost will be logged at the start date" ) cost_frequency = fields.Selection([('no', 'No'), ('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly'), ('yearly', 'Yearly')], 'Recurring Cost Frequency', default='no', help='Frequency of the recuring cost', required=True) generated_cost_ids = fields.One2many('fleet.vehicle.cost', 'contract_id', 'Generated Costs') sum_cost = fields.Float(compute='_compute_sum_cost', string='Indicative Costs Total') cost_id = fields.Many2one('fleet.vehicle.cost', 'Cost', required=True, ondelete='cascade') # we need to keep this field as a related with store=True because the graph view doesn't support # (1) to address fields from inherited table # (2) fields that aren't stored in database cost_amount = fields.Float(related='cost_id.amount', string='Amount', store=True) odometer = fields.Float( string='Odometer at creation', help= 'Odometer measure of the vehicle at the moment of the contract creation' ) @api.depends('vehicle_id', 'cost_subtype_id', 'date') def _compute_contract_name(self): for record in self: name = record.vehicle_id.name if record.cost_subtype_id.name: name += ' / ' + record.cost_subtype_id.name if record.date: name += ' / ' + record.date record.name = name @api.depends('expiration_date', 'state') def _compute_days_left(self): """return a dict with as value for each contract an integer if contract is in an open state and is overdue, return 0 if contract is in a closed state, return -1 otherwise return the number of days before the contract expires """ for record in self: if (record.expiration_date and (record.state == 'open' or record.state == 'expired')): today = fields.Date.from_string(fields.Date.today()) renew_date = fields.Date.from_string(record.expiration_date) diff_time = (renew_date - today).days record.days_left = diff_time > 0 and diff_time or 0 else: record.days_left = -1 @api.depends('cost_ids.amount') def _compute_sum_cost(self): for contract in self: contract.sum_cost = sum(contract.cost_ids.mapped('amount')) @api.onchange('vehicle_id') def _onchange_vehicle(self): if self.vehicle_id: self.odometer_unit = self.vehicle_id.odometer_unit @api.multi def contract_close(self): for record in self: record.state = 'closed' @api.multi def contract_open(self): for record in self: record.state = 'open' @api.multi def act_renew_contract(self): assert len( self.ids ) == 1, "This operation should only be done for 1 single contract at a time, as it it suppose to open a window as result" for element in self: # compute end date startdate = fields.Date.from_string(element.start_date) enddate = fields.Date.from_string(element.expiration_date) diffdate = (enddate - startdate) default = { 'date': fields.Date.context_today(self), 'start_date': fields.Date.to_string( fields.Date.from_string(element.expiration_date) + relativedelta(days=1)), 'expiration_date': fields.Date.to_string(enddate + diffdate), } newid = element.copy(default).id return { 'name': _("Renew Contract"), 'view_mode': 'form', 'view_id': self.env.ref('fleet.fleet_vehicle_log_contract_view_form').id, 'view_type': 'tree,form', 'res_model': 'fleet.vehicle.log.contract', 'type': 'ir.actions.act_window', 'domain': '[]', 'res_id': newid, 'context': { 'active_id': newid }, } @api.model def scheduler_manage_auto_costs(self): # This method is called by a cron task # It creates costs for contracts having the "recurring cost" field setted, depending on their frequency # For example, if a contract has a reccuring cost of 200 with a weekly frequency, this method creates a cost of 200 on the # first day of each week, from the date of the last recurring costs in the database to today # If the contract has not yet any recurring costs in the database, the method generates the recurring costs from the start_date to today # The created costs are associated to a contract thanks to the many2one field contract_id # If the contract has no start_date, no cost will be created, even if the contract has recurring costs VehicleCost = self.env['fleet.vehicle.cost'] deltas = { 'yearly': relativedelta(years=+1), 'monthly': relativedelta(months=+1), 'weekly': relativedelta(weeks=+1), 'daily': relativedelta(days=+1) } contracts = self.env['fleet.vehicle.log.contract'].search( [('state', '!=', 'closed')], offset=0, limit=None, order=None) for contract in contracts: if not contract.start_date or contract.cost_frequency == 'no': continue found = False last_cost_date = contract.start_date if contract.generated_cost_ids: last_autogenerated_cost = VehicleCost.search( [('contract_id', '=', contract.id), ('auto_generated', '=', True)], offset=0, limit=1, order='date desc') if last_autogenerated_cost: found = True last_cost_date = last_autogenerated_cost.date startdate = fields.Date.from_string(last_cost_date) if found: startdate += deltas.get(contract.cost_frequency) today = fields.Date.from_string(fields.Date.context_today(self)) while (startdate <= today) & (startdate <= fields.Date.from_string( contract.expiration_date)): data = { 'amount': contract.cost_generated, 'date': fields.Date.context_today(self), 'vehicle_id': contract.vehicle_id.id, 'cost_subtype_id': contract.cost_subtype_id.id, 'contract_id': contract.id, 'auto_generated': True } self.env['fleet.vehicle.cost'].create(data) startdate += deltas.get(contract.cost_frequency) return True @api.model def scheduler_manage_contract_expiration(self): # This method is called by a cron task # It manages the state of a contract, possibly by posting a message on the vehicle concerned and updating its status date_today = fields.Date.from_string(fields.Date.today()) in_fifteen_days = fields.Date.to_string(date_today + relativedelta(days=+15)) nearly_expired_contracts = self.search([('state', '=', 'open'), ('expiration_date', '<', in_fifteen_days)]) res = {} for contract in nearly_expired_contracts: if contract.vehicle_id.id in res: res[contract.vehicle_id.id] += 1 else: res[contract.vehicle_id.id] = 1 Vehicle = self.env['fleet.vehicle'] for vehicle, value in res.items(): Vehicle.browse(vehicle).message_post(body=_( '%s contract(s) will expire soon and should be renewed and/or closed!' ) % value) nearly_expired_contracts.write({'state': 'diesoon'}) expired_contracts = self.search([('state', '!=', 'expired'), ('expiration_date', '<', fields.Date.today())]) expired_contracts.write({'state': 'expired'}) futur_contracts = self.search([ ('state', 'not in', ['futur', 'closed']), ('start_date', '>', fields.Date.today()) ]) futur_contracts.write({'state': 'futur'}) now_running_contracts = self.search([('state', '=', 'futur'), ('start_date', '<=', fields.Date.today())]) now_running_contracts.write({'state': 'open'}) @api.model def run_scheduler(self): self.scheduler_manage_auto_costs() self.scheduler_manage_contract_expiration()