Example #1
0
class SaleReport(models.Model):
    _name = "sale.report"
    _description = "Sales Analysis Report"
    _auto = False
    _rec_name = 'date'
    _order = 'date desc'

    @api.model
    def _get_done_states(self):
        return ['sale', 'done', 'paid']

    name = fields.Char('Order Reference', readonly=True)
    date = fields.Datetime('Order Date', readonly=True)
    product_id = fields.Many2one('product.product',
                                 'Product Variant',
                                 readonly=True)
    product_uom = fields.Many2one('uom.uom', 'Unit of Measure', readonly=True)
    product_uom_qty = fields.Float('Qty Ordered', readonly=True)
    qty_delivered = fields.Float('Qty Delivered', readonly=True)
    qty_to_invoice = fields.Float('Qty To Invoice', readonly=True)
    qty_invoiced = fields.Float('Qty Invoiced', readonly=True)
    partner_id = fields.Many2one('res.partner', 'Customer', readonly=True)
    company_id = fields.Many2one('res.company', 'Company', readonly=True)
    user_id = fields.Many2one('res.users', 'Salesperson', readonly=True)
    price_total = fields.Float('Total', readonly=True)
    price_subtotal = fields.Float('Untaxed Total', readonly=True)
    untaxed_amount_to_invoice = fields.Float('Untaxed Amount To Invoice',
                                             readonly=True)
    untaxed_amount_invoiced = fields.Float('Untaxed Amount Invoiced',
                                           readonly=True)
    product_tmpl_id = fields.Many2one('product.template',
                                      'Product',
                                      readonly=True)
    categ_id = fields.Many2one('product.category',
                               'Product Category',
                               readonly=True)
    nbr = fields.Integer('# of Lines', readonly=True)
    pricelist_id = fields.Many2one('product.pricelist',
                                   'Pricelist',
                                   readonly=True)
    analytic_account_id = fields.Many2one('account.analytic.account',
                                          'Analytic Account',
                                          readonly=True)
    team_id = fields.Many2one('crm.team', 'Sales Team', readonly=True)
    country_id = fields.Many2one('res.country',
                                 'Customer Country',
                                 readonly=True)
    industry_id = fields.Many2one('res.partner.industry',
                                  'Customer Industry',
                                  readonly=True)
    commercial_partner_id = fields.Many2one('res.partner',
                                            'Customer Entity',
                                            readonly=True)
    state = fields.Selection([
        ('draft', 'Draft Quotation'),
        ('sent', 'Quotation Sent'),
        ('sale', 'Sales Order'),
        ('done', 'Sales Done'),
        ('cancel', 'Cancelled'),
    ],
                             string='Status',
                             readonly=True)
    weight = fields.Float('Gross Weight', readonly=True)
    volume = fields.Float('Volume', readonly=True)

    discount = fields.Float('Discount %', readonly=True)
    discount_amount = fields.Float('Discount Amount', readonly=True)
    campaign_id = fields.Many2one('utm.campaign', 'Campaign')
    medium_id = fields.Many2one('utm.medium', 'Medium')
    source_id = fields.Many2one('utm.source', 'Source')

    order_id = fields.Many2one('sale.order', 'Order #', readonly=True)

    def _select_sale(self, fields=None):
        if not fields:
            fields = {}
        select_ = """
            coalesce(min(l.id), -s.id) as id,
            l.product_id as product_id,
            t.uom_id as product_uom,
            CASE WHEN l.product_id IS NOT NULL THEN sum(l.product_uom_qty / u.factor * u2.factor) ELSE 0 END as product_uom_qty,
            CASE WHEN l.product_id IS NOT NULL THEN sum(l.qty_delivered / u.factor * u2.factor) ELSE 0 END as qty_delivered,
            CASE WHEN l.product_id IS NOT NULL THEN sum(l.qty_invoiced / u.factor * u2.factor) ELSE 0 END as qty_invoiced,
            CASE WHEN l.product_id IS NOT NULL THEN sum(l.qty_to_invoice / u.factor * u2.factor) ELSE 0 END as qty_to_invoice,
            CASE WHEN l.product_id IS NOT NULL THEN sum(l.price_total / CASE COALESCE(s.currency_rate, 0) WHEN 0 THEN 1.0 ELSE s.currency_rate END) ELSE 0 END as price_total,
            CASE WHEN l.product_id IS NOT NULL THEN sum(l.price_subtotal / CASE COALESCE(s.currency_rate, 0) WHEN 0 THEN 1.0 ELSE s.currency_rate END) ELSE 0 END as price_subtotal,
            CASE WHEN l.product_id IS NOT NULL THEN sum(l.untaxed_amount_to_invoice / CASE COALESCE(s.currency_rate, 0) WHEN 0 THEN 1.0 ELSE s.currency_rate END) ELSE 0 END as untaxed_amount_to_invoice,
            CASE WHEN l.product_id IS NOT NULL THEN sum(l.untaxed_amount_invoiced / CASE COALESCE(s.currency_rate, 0) WHEN 0 THEN 1.0 ELSE s.currency_rate END) ELSE 0 END as untaxed_amount_invoiced,
            count(*) as nbr,
            s.name as name,
            s.date_order as date,
            s.state as state,
            s.partner_id as partner_id,
            s.user_id as user_id,
            s.company_id as company_id,
            s.campaign_id as campaign_id,
            s.medium_id as medium_id,
            s.source_id as source_id,
            extract(epoch from avg(date_trunc('day',s.date_order)-date_trunc('day',s.create_date)))/(24*60*60)::decimal(16,2) as delay,
            t.categ_id as categ_id,
            s.pricelist_id as pricelist_id,
            s.analytic_account_id as analytic_account_id,
            s.team_id as team_id,
            p.product_tmpl_id,
            partner.country_id as country_id,
            partner.industry_id as industry_id,
            partner.commercial_partner_id as commercial_partner_id,
            CASE WHEN l.product_id IS NOT NULL THEN sum(p.weight * l.product_uom_qty / u.factor * u2.factor) ELSE 0 END as weight,
            CASE WHEN l.product_id IS NOT NULL THEN sum(p.volume * l.product_uom_qty / u.factor * u2.factor) ELSE 0 END as volume,
            l.discount as discount,
            CASE WHEN l.product_id IS NOT NULL THEN sum((l.price_unit * l.product_uom_qty * l.discount / 100.0 / CASE COALESCE(s.currency_rate, 0) WHEN 0 THEN 1.0 ELSE s.currency_rate END))ELSE 0 END as discount_amount,
            s.id as order_id
        """

        for field in fields.values():
            select_ += field
        return select_

    def _from_sale(self, from_clause=''):
        from_ = """
                sale_order_line l
                      right outer join sale_order s on (s.id=l.order_id)
                      join res_partner partner on s.partner_id = partner.id
                        left join product_product p on (l.product_id=p.id)
                            left join product_template t on (p.product_tmpl_id=t.id)
                    left join uom_uom u on (u.id=l.product_uom)
                    left join uom_uom u2 on (u2.id=t.uom_id)
                    left join product_pricelist pp on (s.pricelist_id = pp.id)
                %s
        """ % from_clause
        return from_

    def _group_by_sale(self, groupby=''):
        groupby_ = """
            l.product_id,
            l.order_id,
            t.uom_id,
            t.categ_id,
            s.name,
            s.date_order,
            s.partner_id,
            s.user_id,
            s.state,
            s.company_id,
            s.campaign_id,
            s.medium_id,
            s.source_id,
            s.pricelist_id,
            s.analytic_account_id,
            s.team_id,
            p.product_tmpl_id,
            partner.country_id,
            partner.industry_id,
            partner.commercial_partner_id,
            l.discount,
            s.id %s
        """ % (groupby)
        return groupby_

    def _query(self, with_clause='', fields=None, groupby='', from_clause=''):
        if not fields:
            fields = {}
        with_ = ("WITH %s" % with_clause) if with_clause else ""
        return '%s (SELECT %s FROM %s WHERE l.display_type IS NULL GROUP BY %s)' % \
               (with_, self._select_sale(fields), self._from_sale(from_clause), self._group_by_sale(groupby))

    def init(self):
        # self._table = sale_report
        tools.drop_view_if_exists(self.env.cr, self._table)
        self.env.cr.execute("""CREATE or REPLACE VIEW %s as (%s)""" %
                            (self._table, self._query()))
Example #2
0
class QcInspection(models.Model):
    _name = 'qc.inspection'
    _description = 'Quality control inspection'
    _inherit = ['mail.thread', 'mail.activity.mixin']

    @api.depends('inspection_lines', 'inspection_lines.success')
    def _compute_success(self):
        for i in self:
            i.success = all([x.success for x in i.inspection_lines])

    @api.multi
    def _links_get(self):
        link_obj = self.env['res.request.link']
        return [(r.object, r.name) for r in link_obj.search([])]

    @api.depends('object_id')
    def _compute_product_id(self):
        for i in self:
            if i.object_id and i.object_id._name == 'product.product':
                i.product_id = i.object_id
            else:
                i.product_id = False

    name = fields.Char(
        string='Inspection number', required=True, default='/',
        readonly=True, states={'draft': [('readonly', False)]}, copy=False)
    date = fields.Datetime(
        string='Date', required=True, readonly=True, copy=False,
        default=fields.Datetime.now,
        states={'draft': [('readonly', False)]})
    object_id = fields.Reference(
        string='Reference', selection=_links_get, readonly=True,
        states={'draft': [('readonly', False)]}, ondelete="set null")
    product_id = fields.Many2one(
        comodel_name="product.product", compute="_compute_product_id",
        store=True, help="Product associated with the inspection",
        oldname='product')
    qty = fields.Float(string="Quantity", default=1.0)
    test = fields.Many2one(
        comodel_name='qc.test', string='Test', readonly=True)
    inspection_lines = fields.One2many(
        comodel_name='qc.inspection.line', inverse_name='inspection_id',
        string='Inspection lines', readonly=True,
        states={'ready': [('readonly', False)]})
    internal_notes = fields.Text(string='Internal notes')
    external_notes = fields.Text(
        string='External notes',
        states={'success': [('readonly', True)],
                'failed': [('readonly', True)]})
    state = fields.Selection(
        [('draft', 'Draft'),
         ('ready', 'Ready'),
         ('waiting', 'Waiting supervisor approval'),
         ('success', 'Quality success'),
         ('failed', 'Quality failed'),
         ('canceled', 'Canceled')],
        string='State', readonly=True, default='draft',
        track_visibility='onchange')
    success = fields.Boolean(
        compute="_compute_success", string='Success',
        help='This field will be marked if all tests have succeeded.',
        store=True)
    auto_generated = fields.Boolean(
        string='Auto-generated', readonly=True, copy=False,
        help='If an inspection is auto-generated, it can be canceled but not '
             'removed.')
    company_id = fields.Many2one(
        comodel_name='res.company', string='Company', readonly=True,
        states={'draft': [('readonly', False)]},
        default=lambda self: self.env['res.company']._company_default_get(
            'qc.inspection'))
    user = fields.Many2one(
        comodel_name='res.users', string='Responsible',
        track_visibility='always', default=lambda self: self.env.user)

    @api.model
    def create(self, vals):
        if vals.get('name', '/') == '/':
            vals['name'] = self.env['ir.sequence'] \
                .next_by_code('qc.inspection')
        return super(QcInspection, self).create(vals)

    @api.multi
    def unlink(self):
        for inspection in self:
            if inspection.auto_generated:
                raise exceptions.UserError(
                    _("You cannot remove an auto-generated inspection."))
            if inspection.state != 'draft':
                raise exceptions.UserError(
                    _("You cannot remove an inspection that is not in draft "
                      "state."))
        return super(QcInspection, self).unlink()

    @api.multi
    def action_draft(self):
        self.write({'state': 'draft'})

    @api.multi
    def action_todo(self):
        for inspection in self:
            if not inspection.test:
                raise exceptions.UserError(
                    _("You must first set the test to perform."))
        self.write({'state': 'ready'})

    @api.multi
    def action_confirm(self):
        for inspection in self:
            for line in inspection.inspection_lines:
                if line.question_type == 'qualitative':
                    if not line.qualitative_value:
                        raise exceptions.UserError(
                            _("You should provide an answer for all "
                              "qualitative questions."))
                else:
                    if not line.uom_id:
                        raise exceptions.UserError(
                            _("You should provide a unit of measure for "
                              "quantitative questions."))
            if inspection.success:
                inspection.state = 'success'
            else:
                inspection.state = 'waiting'

    @api.multi
    def action_approve(self):
        for inspection in self:
            if inspection.success:
                inspection.state = 'success'
            else:
                inspection.state = 'failed'

    @api.multi
    def action_cancel(self):
        self.write({'state': 'canceled'})

    @api.multi
    def set_test(self, trigger_line, force_fill=False):
        for inspection in self:
            header = self._prepare_inspection_header(
                inspection.object_id, trigger_line)
            del header['state']  # don't change current status
            del header['auto_generated']  # don't change auto_generated flag
            del header['user']  # don't change current user
            inspection.write(header)
            inspection.inspection_lines.unlink()
            inspection.inspection_lines = inspection._prepare_inspection_lines(
                trigger_line.test, force_fill=force_fill)

    @api.multi
    def _make_inspection(self, object_ref, trigger_line):
        """Overridable hook method for creating inspection from test.
        :param object_ref: Object instance
        :param trigger_line: Trigger line instance
        :return: Inspection object
        """
        inspection = self.create(self._prepare_inspection_header(
            object_ref, trigger_line))
        inspection.set_test(trigger_line)
        return inspection

    @api.multi
    def _prepare_inspection_header(self, object_ref, trigger_line):
        """Overridable hook method for preparing inspection header.
        :param object_ref: Object instance
        :param trigger_line: Trigger line instance
        :return: List of values for creating the inspection
        """
        return {
            'object_id': object_ref and '%s,%s' % (object_ref._name,
                                                   object_ref.id) or False,
            'state': 'ready',
            'test': trigger_line.test.id,
            'user': trigger_line.user.id,
            'auto_generated': True,
        }

    @api.multi
    def _prepare_inspection_lines(self, test, force_fill=False):
        new_data = []
        for line in test.test_lines:
            data = self._prepare_inspection_line(
                test, line, fill=test.fill_correct_values or force_fill)
            new_data.append((0, 0, data))
        return new_data

    @api.multi
    def _prepare_inspection_line(self, test, line, fill=None):
        data = {
            'name': line.name,
            'test_line': line.id,
            'notes': line.notes,
            'min_value': line.min_value,
            'max_value': line.max_value,
            'test_uom_id': line.uom_id.id,
            'uom_id': line.uom_id.id,
            'question_type': line.type,
            'possible_ql_values': [x.id for x in line.ql_values]
        }
        if fill:
            if line.type == 'qualitative':
                # Fill with the first correct value found
                for value in line.ql_values:
                    if value.ok:
                        data['qualitative_value'] = value.id
                        break
            else:
                # Fill with a value inside the interval
                data['quantitative_value'] = (line.min_value +
                                              line.max_value) * 0.5
        return data
Example #3
0
class Picking(models.Model):
    _name = "stock.picking"
    _inherit = ['mail.thread', 'mail.activity.mixin']
    _description = "Transfer"
    _order = "priority desc, date asc, id desc"

    name = fields.Char('Reference',
                       default='/',
                       copy=False,
                       index=True,
                       states={
                           'done': [('readonly', True)],
                           'cancel': [('readonly', True)]
                       })
    origin = fields.Char('Source Document',
                         index=True,
                         states={
                             'done': [('readonly', True)],
                             'cancel': [('readonly', True)]
                         },
                         help="Reference of the document")
    note = fields.Text('Notes')

    backorder_id = fields.Many2one(
        'stock.picking',
        'Back Order of',
        copy=False,
        index=True,
        states={
            'done': [('readonly', True)],
            'cancel': [('readonly', True)]
        },
        help=
        "If this shipment was split, then this field links to the shipment which contains the already processed part."
    )

    move_type = fields.Selection(
        [('direct', 'As soon as possible'),
         ('one', 'When all products are ready')],
        'Shipping Policy',
        default='direct',
        required=True,
        states={
            'done': [('readonly', True)],
            'cancel': [('readonly', True)]
        },
        help="It specifies goods to be deliver partially or all at once")

    state = fields.Selection(
        [
            ('draft', 'Draft'),
            ('waiting', 'Waiting Another Operation'),
            ('confirmed', 'Waiting'),
            ('assigned', 'Ready'),
            ('done', 'Done'),
            ('cancel', 'Cancelled'),
        ],
        string='Status',
        compute='_compute_state',
        copy=False,
        index=True,
        readonly=True,
        store=True,
        track_visibility='onchange',
        help=
        " * Draft: not confirmed yet and will not be scheduled until confirmed.\n"
        " * Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows).\n"
        " * Waiting: if it is not ready to be sent because the required products could not be reserved.\n"
        " * Ready: products are reserved and ready to be sent. If the shipping policy is 'As soon as possible' this happens as soon as anything is reserved.\n"
        " * Done: has been processed, can't be modified or cancelled anymore.\n"
        " * Cancelled: has been cancelled, can't be confirmed anymore.")

    group_id = fields.Many2one('procurement.group',
                               'Procurement Group',
                               readonly=True,
                               related='move_lines.group_id',
                               store=True)

    priority = fields.Selection(
        PROCUREMENT_PRIORITIES,
        string='Priority',
        compute='_compute_priority',
        inverse='_set_priority',
        store=True,
        # default='1', required=True,  # TDE: required, depending on moves ? strange
        index=True,
        track_visibility='onchange',
        states={
            'done': [('readonly', True)],
            'cancel': [('readonly', True)]
        },
        help=
        "Priority for this picking. Setting manually a value here would set it as priority for all the moves"
    )
    scheduled_date = fields.Datetime(
        'Scheduled Date',
        compute='_compute_scheduled_date',
        inverse='_set_scheduled_date',
        store=True,
        index=True,
        track_visibility='onchange',
        states={
            'done': [('readonly', True)],
            'cancel': [('readonly', True)]
        },
        help=
        "Scheduled time for the first part of the shipment to be processed. Setting manually a value here would set it as expected date for all the stock moves."
    )
    date = fields.Datetime('Creation Date',
                           default=fields.Datetime.now,
                           index=True,
                           track_visibility='onchange',
                           states={
                               'done': [('readonly', True)],
                               'cancel': [('readonly', True)]
                           },
                           help="Creation Date, usually the time of the order")
    date_done = fields.Datetime('Date of Transfer',
                                copy=False,
                                readonly=True,
                                help="Completion Date of Transfer")

    location_id = fields.Many2one(
        'stock.location',
        "Source Location",
        default=lambda self: self.env['stock.picking.type'].browse(
            self._context.get('default_picking_type_id')
        ).default_location_src_id,
        readonly=True,
        required=True,
        states={'draft': [('readonly', False)]})
    location_dest_id = fields.Many2one(
        'stock.location',
        "Destination Location",
        default=lambda self: self.env['stock.picking.type'].browse(
            self._context.get('default_picking_type_id')
        ).default_location_dest_id,
        readonly=True,
        required=True,
        states={'draft': [('readonly', False)]})
    move_lines = fields.One2many('stock.move',
                                 'picking_id',
                                 string="Stock Moves",
                                 copy=True)
    has_scrap_move = fields.Boolean('Has Scrap Moves',
                                    compute='_has_scrap_move')
    picking_type_id = fields.Many2one('stock.picking.type',
                                      'Operation Type',
                                      required=True,
                                      states={
                                          'done': [('readonly', True)],
                                          'cancel': [('readonly', True)]
                                      })
    picking_type_code = fields.Selection([('incoming', 'Vendors'),
                                          ('outgoing', 'Customers'),
                                          ('internal', 'Internal')],
                                         related='picking_type_id.code',
                                         readonly=True)
    picking_type_entire_packs = fields.Boolean(
        related='picking_type_id.show_entire_packs', readonly=True)

    partner_id = fields.Many2one('res.partner',
                                 'Partner',
                                 states={
                                     'done': [('readonly', True)],
                                     'cancel': [('readonly', True)]
                                 })
    company_id = fields.Many2one('res.company',
                                 'Company',
                                 default=lambda self: self.env['res.company'].
                                 _company_default_get('stock.picking'),
                                 index=True,
                                 required=True,
                                 states={
                                     'done': [('readonly', True)],
                                     'cancel': [('readonly', True)]
                                 })

    move_line_ids = fields.One2many('stock.move.line', 'picking_id',
                                    'Operations')

    move_line_exist = fields.Boolean(
        'Has Pack Operations',
        compute='_compute_move_line_exist',
        help='Check the existence of pack operation on the picking')

    has_packages = fields.Boolean(
        'Has Packages',
        compute='_compute_has_packages',
        help='Check the existence of destination packages on move lines')

    show_check_availability = fields.Boolean(
        compute='_compute_show_check_availability',
        help=
        'Technical field used to compute whether the check availability button should be shown.'
    )
    show_mark_as_todo = fields.Boolean(
        compute='_compute_show_mark_as_todo',
        help=
        'Technical field used to compute whether the mark as todo button should be shown.'
    )
    show_validate = fields.Boolean(
        compute='_compute_show_validate',
        help=
        'Technical field used to compute whether the validate should be shown.'
    )

    owner_id = fields.Many2one('res.partner',
                               'Owner',
                               states={
                                   'done': [('readonly', True)],
                                   'cancel': [('readonly', True)]
                               },
                               help="Default Owner")
    printed = fields.Boolean('Printed')
    is_locked = fields.Boolean(
        default=True,
        help='When the picking is not done this allows changing the '
        'initial demand. When the picking is done this allows '
        'changing the done quantities.')
    # Used to search on pickings
    product_id = fields.Many2one('product.product',
                                 'Product',
                                 related='move_lines.product_id')
    show_operations = fields.Boolean(compute='_compute_show_operations')

    _sql_constraints = [
        ('name_uniq', 'unique(name, company_id)',
         'Reference must be unique per company!'),
    ]

    @api.depends('picking_type_id.show_operations')
    def _compute_show_operations(self):
        for picking in self:
            if self.env.context.get('force_detailed_view'):
                picking.show_operations = True
                break
            if picking.picking_type_id.show_operations:
                if (picking.state == 'draft'
                        and not self.env.context.get('planned_picking')
                    ) or picking.state != 'draft':
                    picking.show_operations = True
                else:
                    picking.show_operations = False
            else:
                picking.show_operations = False

    @api.depends('move_type', 'move_lines.state', 'move_lines.picking_id')
    @api.one
    def _compute_state(self):
        ''' State of a picking depends on the state of its related stock.move
        - Draft: only used for "planned pickings"
        - Waiting: if the picking is not ready to be sent so if
          - (a) no quantity could be reserved at all or if
          - (b) some quantities could be reserved and the shipping policy is "deliver all at once"
        - Waiting another move: if the picking is waiting for another move
        - Ready: if the picking is ready to be sent so if:
          - (a) all quantities are reserved or if
          - (b) some quantities could be reserved and the shipping policy is "as soon as possible"
        - Done: if the picking is done.
        - Cancelled: if the picking is cancelled
        '''
        if not self.move_lines:
            self.state = 'draft'
        elif any(move.state == 'draft'
                 for move in self.move_lines):  # TDE FIXME: should be all ?
            self.state = 'draft'
        elif all(move.state == 'cancel' for move in self.move_lines):
            self.state = 'cancel'
        elif all(move.state in ['cancel', 'done'] for move in self.move_lines):
            self.state = 'done'
        else:
            relevant_move_state = self.move_lines._get_relevant_state_among_moves(
            )
            if relevant_move_state == 'partially_available':
                self.state = 'assigned'
            else:
                self.state = relevant_move_state

    @api.one
    @api.depends('move_lines.priority')
    def _compute_priority(self):
        if self.mapped('move_lines'):
            priorities = [
                priority
                for priority in self.mapped('move_lines.priority') if priority
            ] or ['1']
            self.priority = max(priorities)
        else:
            self.priority = '1'

    @api.one
    def _set_priority(self):
        self.move_lines.write({'priority': self.priority})

    @api.one
    @api.depends('move_lines.date_expected')
    def _compute_scheduled_date(self):
        if self.move_type == 'direct':
            self.scheduled_date = min(
                self.move_lines.mapped('date_expected') or [False])
        else:
            self.scheduled_date = max(
                self.move_lines.mapped('date_expected') or [False])

    @api.one
    def _set_scheduled_date(self):
        self.move_lines.write({'date_expected': self.scheduled_date})

    @api.one
    def _has_scrap_move(self):
        # TDE FIXME: better implementation
        self.has_scrap_move = bool(self.env['stock.move'].search_count([
            ('picking_id', '=', self.id), ('scrapped', '=', True)
        ]))

    @api.one
    def _compute_move_line_exist(self):
        self.move_line_exist = bool(self.move_line_ids)

    @api.one
    def _compute_has_packages(self):
        has_packages = False
        for pack_op in self.move_line_ids:
            if pack_op.result_package_id:
                has_packages = True
                break
        self.has_packages = has_packages

    @api.multi
    def _compute_show_check_availability(self):
        for picking in self:
            has_moves_to_reserve = any(
                move.state in ('waiting', 'confirmed', 'partially_available')
                and float_compare(move.product_uom_qty,
                                  0,
                                  precision_rounding=move.product_uom.rounding)
                for move in picking.move_lines)
            picking.show_check_availability = picking.is_locked and picking.state in (
                'confirmed', 'waiting', 'assigned') and has_moves_to_reserve

    @api.multi
    @api.depends('state', 'move_lines')
    def _compute_show_mark_as_todo(self):
        for picking in self:
            if not picking.move_lines:
                picking.show_mark_as_todo = False
            elif self._context.get(
                    'planned_picking') and picking.state == 'draft':
                picking.show_mark_as_todo = True
            elif picking.state != 'draft' or not picking.id:
                picking.show_mark_as_todo = False
            else:
                picking.show_mark_as_todo = True

    @api.multi
    @api.depends('state', 'is_locked')
    def _compute_show_validate(self):
        for picking in self:
            if self._context.get(
                    'planned_picking') and picking.state == 'draft':
                picking.show_validate = False
            elif picking.state not in ('draft', 'confirmed',
                                       'assigned') or not picking.is_locked:
                picking.show_validate = False
            else:
                picking.show_validate = True

    @api.onchange('picking_type_id', 'partner_id')
    def onchange_picking_type(self):
        if self.picking_type_id:
            if self.picking_type_id.default_location_src_id:
                location_id = self.picking_type_id.default_location_src_id.id
            elif self.partner_id:
                location_id = self.partner_id.property_stock_supplier.id
            else:
                customerloc, location_id = self.env[
                    'stock.warehouse']._get_partner_locations()

            if self.picking_type_id.default_location_dest_id:
                location_dest_id = self.picking_type_id.default_location_dest_id.id
            elif self.partner_id:
                location_dest_id = self.partner_id.property_stock_customer.id
            else:
                location_dest_id, supplierloc = self.env[
                    'stock.warehouse']._get_partner_locations()

            self.location_id = location_id
            self.location_dest_id = location_dest_id
        # TDE CLEANME move into onchange_partner_id
        if self.partner_id:
            if self.partner_id.picking_warn == 'no-message' and self.partner_id.parent_id:
                partner = self.partner_id.parent_id
            elif self.partner_id.picking_warn not in (
                    'no-message', 'block'
            ) and self.partner_id.parent_id.picking_warn == 'block':
                partner = self.partner_id.parent_id
            else:
                partner = self.partner_id
            if partner.picking_warn != 'no-message':
                if partner.picking_warn == 'block':
                    self.partner_id = False
                return {
                    'warning': {
                        'title': ("Warning for %s") % partner.name,
                        'message': partner.picking_warn_msg
                    }
                }

    @api.model
    def create(self, vals):
        # TDE FIXME: clean that brol
        defaults = self.default_get(['name', 'picking_type_id'])
        if vals.get('name', '/') == '/' and defaults.get(
                'name', '/') == '/' and vals.get(
                    'picking_type_id', defaults.get('picking_type_id')):
            vals['name'] = self.env['stock.picking.type'].browse(
                vals.get(
                    'picking_type_id',
                    defaults.get('picking_type_id'))).sequence_id.next_by_id()

        # TDE FIXME: what ?
        # As the on_change in one2many list is WIP, we will overwrite the locations on the stock moves here
        # As it is a create the format will be a list of (0, 0, dict)
        if vals.get('move_lines') and vals.get('location_id') and vals.get(
                'location_dest_id'):
            for move in vals['move_lines']:
                if len(move) == 3:
                    move[2]['location_id'] = vals['location_id']
                    move[2]['location_dest_id'] = vals['location_dest_id']
        res = super(Picking, self).create(vals)
        res._autoconfirm_picking()
        return res

    @api.multi
    def write(self, vals):
        res = super(Picking, self).write(vals)
        # Change locations of moves if those of the picking change
        after_vals = {}
        if vals.get('location_id'):
            after_vals['location_id'] = vals['location_id']
        if vals.get('location_dest_id'):
            after_vals['location_dest_id'] = vals['location_dest_id']
        if after_vals:
            self.mapped('move_lines').filtered(
                lambda move: not move.scrapped).write(after_vals)
        if vals.get('move_lines'):
            self._autoconfirm_picking()
        return res

    @api.multi
    def unlink(self):
        self.mapped('move_lines')._action_cancel()
        self.mapped('move_lines').unlink()  # Checks if moves are not done
        return super(Picking, self).unlink()

    # Actions
    # ----------------------------------------

    @api.one
    def action_assign_owner(self):
        self.move_line_ids.write({'owner_id': self.owner_id.id})

    @api.multi
    def do_print_picking(self):
        self.write({'printed': True})
        return self.env.ref('stock.action_report_picking').report_action(self)

    @api.multi
    def action_confirm(self):
        # call `_action_confirm` on every draft move
        self.mapped('move_lines')\
            .filtered(lambda move: move.state == 'draft')\
            ._action_confirm()
        # call `_action_assign` on every confirmed move which location_id bypasses the reservation
        self.filtered(lambda picking: picking.location_id.usage in ('supplier', 'inventory', 'production') and picking.state == 'confirmed')\
            .mapped('move_lines')._action_assign()
        return True

    @api.multi
    def action_assign(self):
        """ Check availability of picking moves.
        This has the effect of changing the state and reserve quants on available moves, and may
        also impact the state of the picking as it is computed based on move's states.
        @return: True
        """
        self.filtered(
            lambda picking: picking.state == 'draft').action_confirm()
        moves = self.mapped('move_lines').filtered(
            lambda move: move.state not in ('draft', 'cancel', 'done'))
        if not moves:
            raise UserError(_('Nothing to check the availability for.'))
        moves._action_assign()
        return True

    @api.multi
    def force_assign(self):
        """ Changes state of picking to available if moves are confirmed or waiting.
        @return: True
        """
        self.mapped('move_lines').filtered(
            lambda move: move.state in
            ['confirmed', 'waiting', 'partially_available'])._force_assign()
        return True

    @api.multi
    def action_cancel(self):
        self.mapped('move_lines')._action_cancel()
        self.write({'is_locked': True})
        return True

    @api.multi
    def action_done(self):
        """Changes picking state to done by processing the Stock Moves of the Picking

        Normally that happens when the button "Done" is pressed on a Picking view.
        @return: True
        """
        # TDE FIXME: remove decorator when migration the remaining
        # TDE FIXME: draft -> automatically done, if waiting ?? CLEAR ME
        todo_moves = self.mapped('move_lines').filtered(
            lambda self: self.state in
            ['draft', 'partially_available', 'assigned', 'confirmed'])
        # Check if there are ops not linked to moves yet
        for pick in self:
            # # Explode manually added packages
            # for ops in pick.move_line_ids.filtered(lambda x: not x.move_id and not x.product_id):
            #     for quant in ops.package_id.quant_ids: #Or use get_content for multiple levels
            #         self.move_line_ids.create({'product_id': quant.product_id.id,
            #                                    'package_id': quant.package_id.id,
            #                                    'result_package_id': ops.result_package_id,
            #                                    'lot_id': quant.lot_id.id,
            #                                    'owner_id': quant.owner_id.id,
            #                                    'product_uom_id': quant.product_id.uom_id.id,
            #                                    'product_qty': quant.qty,
            #                                    'qty_done': quant.qty,
            #                                    'location_id': quant.location_id.id, # Could be ops too
            #                                    'location_dest_id': ops.location_dest_id.id,
            #                                    'picking_id': pick.id
            #                                    }) # Might change first element
            # # Link existing moves or add moves when no one is related
            for ops in pick.move_line_ids.filtered(lambda x: not x.move_id):
                # Search move with this product
                moves = pick.move_lines.filtered(
                    lambda x: x.product_id == ops.product_id)
                if moves:  #could search move that needs it the most (that has some quantities left)
                    ops.move_id = moves[0].id
                else:
                    new_move = self.env['stock.move'].create({
                        'name':
                        _('New Move:') + ops.product_id.display_name,
                        'product_id':
                        ops.product_id.id,
                        'product_uom_qty':
                        ops.qty_done,
                        'product_uom':
                        ops.product_uom_id.id,
                        'location_id':
                        pick.location_id.id,
                        'location_dest_id':
                        pick.location_dest_id.id,
                        'picking_id':
                        pick.id,
                    })
                    ops.move_id = new_move.id
                    new_move._action_confirm()
                    todo_moves |= new_move
                    #'qty_done': ops.qty_done})
        todo_moves._action_done()
        self.write({'date_done': fields.Datetime.now()})
        return True

    do_transfer = action_done  #TODO:replace later

    @api.multi
    def _check_entire_pack(self):
        """ This function check if entire packs are moved in the picking"""
        for picking in self:
            origin_packages = picking.move_line_ids.mapped("package_id")
            for pack in origin_packages:
                all_in = True
                packops = picking.move_line_ids.filtered(
                    lambda x: x.package_id == pack)
                keys = ['product_id', 'lot_id']

                grouped_quants = {}
                for k, g in groupby(sorted(pack.quant_ids,
                                           key=itemgetter(*keys)),
                                    key=itemgetter(*keys)):
                    grouped_quants[k] = sum(self.env['stock.quant'].concat(
                        *list(g)).mapped('quantity'))

                grouped_ops = {}
                for k, g in groupby(sorted(packops, key=itemgetter(*keys)),
                                    key=itemgetter(*keys)):
                    grouped_ops[k] = sum(self.env['stock.move.line'].concat(
                        *list(g)).mapped('product_qty'))
                if any(grouped_quants[key] - grouped_ops.get(key, 0) != 0 for key in grouped_quants)\
                        or any(grouped_ops[key] - grouped_quants[key] != 0 for key in grouped_ops):
                    all_in = False
                if all_in and packops:
                    packops.write({'result_package_id': pack.id})

    @api.multi
    def do_unreserve(self):
        for move in self:
            for move_line in move.move_lines:
                move_line._do_unreserve()
        self.write({'state': 'confirmed'})

    @api.multi
    def button_validate(self):
        self.ensure_one()
        if not self.move_lines and not self.move_line_ids:
            raise UserError(_('Please add some lines to move'))

        # If no lots when needed, raise error
        picking_type = self.picking_type_id
        no_quantities_done = all(line.qty_done == 0.0
                                 for line in self.move_line_ids)
        no_initial_demand = all(move.product_uom_qty == 0.0
                                for move in self.move_lines)
        if no_initial_demand and no_quantities_done:
            raise UserError(
                _('You cannot validate a transfer if you have not processed any quantity.'
                  ))

        if picking_type.use_create_lots or picking_type.use_existing_lots:
            lines_to_check = self.move_line_ids
            if not no_quantities_done:
                lines_to_check = lines_to_check.filtered(
                    lambda line: float_compare(line.qty_done,
                                               0,
                                               precision_rounding=line.
                                               product_uom_id.rounding))

            for line in lines_to_check:
                product = line.product_id
                if product and product.tracking != 'none' and (
                        line.qty_done == 0 or
                    (not line.lot_name and not line.lot_id)):
                    raise UserError(
                        _('You need to supply a lot/serial number for %s.') %
                        product.name)

        if no_quantities_done:
            view = self.env.ref('stock.view_immediate_transfer')
            wiz = self.env['stock.immediate.transfer'].create(
                {'pick_ids': [(4, self.id)]})
            return {
                'name': _('Immediate Transfer?'),
                'type': 'ir.actions.act_window',
                'view_type': 'form',
                'view_mode': 'form',
                'res_model': 'stock.immediate.transfer',
                'views': [(view.id, 'form')],
                'view_id': view.id,
                'target': 'new',
                'res_id': wiz.id,
                'context': self.env.context,
            }

        if self._get_overprocessed_stock_moves(
        ) and not self._context.get('skip_overprocessed_check'):
            view = self.env.ref('stock.view_overprocessed_transfer')
            wiz = self.env['stock.overprocessed.transfer'].create(
                {'picking_id': self.id})
            return {
                'type': 'ir.actions.act_window',
                'view_type': 'form',
                'view_mode': 'form',
                'res_model': 'stock.overprocessed.transfer',
                'views': [(view.id, 'form')],
                'view_id': view.id,
                'target': 'new',
                'res_id': wiz.id,
                'context': self.env.context,
            }

        # Check backorder should check for other barcodes
        if self._check_backorder():
            return self.action_generate_backorder_wizard()
        self.action_done()
        return

    def action_generate_backorder_wizard(self):
        view = self.env.ref('stock.view_backorder_confirmation')
        wiz = self.env['stock.backorder.confirmation'].create(
            {'pick_ids': [(4, p.id) for p in self]})
        return {
            'name': _('Create Backorder?'),
            'type': 'ir.actions.act_window',
            'view_type': 'form',
            'view_mode': 'form',
            'res_model': 'stock.backorder.confirmation',
            'views': [(view.id, 'form')],
            'view_id': view.id,
            'target': 'new',
            'res_id': wiz.id,
            'context': self.env.context,
        }

    def action_toggle_is_locked(self):
        self.ensure_one()
        self.is_locked = not self.is_locked
        return True

    def _check_backorder(self):
        """ This method will loop over all the move lines of self and
        check if creating a backorder is necessary. This method is
        called during button_validate if the user has already processed
        some quantities and in the immediate transfer wizard that is
        displayed if the user has not processed any quantities.

        :return: True if a backorder is necessary else False
        """
        quantity_todo = {}
        quantity_done = {}
        for move in self.mapped('move_lines'):
            quantity_todo.setdefault(move.product_id.id, 0)
            quantity_done.setdefault(move.product_id.id, 0)
            quantity_todo[move.product_id.id] += move.product_uom_qty
            quantity_done[move.product_id.id] += move.quantity_done
        for ops in self.mapped('move_line_ids').filtered(
                lambda x: x.package_id and not x.product_id and not x.move_id):
            for quant in ops.package_id.quant_ids:
                quantity_done.setdefault(quant.product_id.id, 0)
                quantity_done[quant.product_id.id] += quant.qty
        for pack in self.mapped('move_line_ids').filtered(
                lambda x: x.product_id and not x.move_id):
            quantity_done.setdefault(pack.product_id.id, 0)
            quantity_done[pack.product_id.id] += pack.qty_done
        return any(quantity_done[x] < quantity_todo.get(x, 0)
                   for x in quantity_done)

    @api.multi
    def _autoconfirm_picking(self):
        if not self._context.get('planned_picking'):
            for picking in self.filtered(lambda picking: picking.state not in (
                    'done', 'cancel') and picking.move_lines):
                picking.action_confirm()

    def _get_overprocessed_stock_moves(self):
        self.ensure_one()
        return self.move_lines.filtered(
            lambda move: move.product_uom_qty != 0 and float_compare(
                move.quantity_done,
                move.product_uom_qty,
                precision_rounding=move.product_uom.rounding) == 1)

    @api.multi
    def _create_backorder(self, backorder_moves=[]):
        """ Move all non-done lines into a new backorder picking. If the key 'do_only_split' is given in the context, then move all lines not in context.get('split', []) instead of all non-done lines.
        """
        # TDE note: o2o conversion, todo multi
        backorders = self.env['stock.picking']
        for picking in self:
            backorder_moves = backorder_moves or picking.move_lines
            if self._context.get('do_only_split'):
                not_done_bo_moves = backorder_moves.filtered(
                    lambda move: move.id not in self._context.get('split', []))
            else:
                not_done_bo_moves = backorder_moves.filtered(
                    lambda move: move.state not in ('done', 'cancel'))
            if not not_done_bo_moves:
                continue
            backorder_picking = picking.copy({
                'name': '/',
                'move_lines': [],
                'move_line_ids': [],
                'backorder_id': picking.id
            })
            picking.message_post(
                body=_("Back order <em>%s</em> <b>created</b>.") %
                (backorder_picking.name))
            not_done_bo_moves.write({'picking_id': backorder_picking.id})
            if not picking.date_done:
                picking.write({
                    'date_done':
                    time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
                })
            backorder_picking.action_confirm()
            backorder_picking.action_assign()
            backorders |= backorder_picking
        return backorders

    def _put_in_pack(self):
        package = False
        for pick in self:
            operations = pick.move_line_ids.filtered(
                lambda o: o.qty_done > 0 and not o.result_package_id)
            operation_ids = self.env['stock.move.line']
            if operations:
                package = self.env['stock.quant.package'].create({})
                for operation in operations:
                    if float_compare(operation.qty_done,
                                     operation.product_uom_qty,
                                     precision_rounding=operation.
                                     product_uom_id.rounding) >= 0:
                        operation_ids |= operation
                    else:
                        quantity_left_todo = float_round(
                            operation.product_uom_qty - operation.qty_done,
                            precision_rounding=operation.product_uom_id.
                            rounding,
                            rounding_method='UP')
                        new_operation = operation.copy(
                            default={
                                'product_uom_qty': operation.qty_done,
                                'qty_done': operation.qty_done
                            })
                        operation.write({
                            'product_uom_qty': quantity_left_todo,
                            'qty_done': 0.0
                        })
                        operation_ids |= new_operation

                operation_ids.write({'result_package_id': package.id})
            else:
                raise UserError(
                    _('Please process some quantities to put in the pack first!'
                      ))
        return package

    def put_in_pack(self):
        return self._put_in_pack()

    def button_scrap(self):
        self.ensure_one()
        products = self.env['product.product']
        for move in self.move_lines:
            if move.state not in (
                    'draft', 'cancel') and move.product_id.type in ('product',
                                                                    'consu'):
                products |= move.product_id
        return {
            'name': _('Scrap'),
            'view_type': 'form',
            'view_mode': 'form',
            'res_model': 'stock.scrap',
            'view_id': self.env.ref('stock.stock_scrap_form_view2').id,
            'type': 'ir.actions.act_window',
            'context': {
                'default_picking_id': self.id,
                'product_ids': products.ids
            },
            'target': 'new',
        }

    def action_see_move_scrap(self):
        self.ensure_one()
        action = self.env.ref('stock.action_stock_scrap').read()[0]
        scraps = self.env['stock.scrap'].search([('picking_id', '=', self.id)])
        action['domain'] = [('id', 'in', scraps.ids)]
        return action

    def action_see_packages(self):
        self.ensure_one()
        action = self.env.ref('stock.action_package_view').read()[0]
        packages = self.move_line_ids.mapped('result_package_id')
        action['domain'] = [('id', 'in', packages.ids)]
        action['context'] = {'picking_id': self.id}
        return action
Example #4
0
class MassMailing(models.Model):
    """ MassMailing models a wave of emails for a mass mailign campaign.
    A mass mailing is an occurence of sending emails. """

    _name = 'mail.mass_mailing'
    _description = 'Mass Mailing'
    # number of periods for tracking mail_mail statistics
    _period_number = 6
    _order = 'sent_date DESC'
    _inherits = {'utm.source': 'source_id'}
    _rec_name = "source_id"

    @api.model
    def default_get(self, fields):
        res = super(MassMailing, self).default_get(fields)
        if 'reply_to_mode' in fields and not 'reply_to_mode' in res and res.get(
                'mailing_model_real'):
            if res['mailing_model_real'] in [
                    'res.partner', 'mail.mass_mailing.contact'
            ]:
                res['reply_to_mode'] = 'email'
            else:
                res['reply_to_mode'] = 'thread'
        return res

    active = fields.Boolean(default=True)
    email_from = fields.Char(
        string='From',
        required=True,
        default=lambda self: self.env['mail.message']._get_default_from())
    create_date = fields.Datetime(string='Creation Date')
    sent_date = fields.Datetime(string='Sent Date', oldname='date', copy=False)
    schedule_date = fields.Datetime(string='Schedule in the Future')
    body_html = fields.Html(string='Body', sanitize_attributes=False)
    attachment_ids = fields.Many2many('ir.attachment',
                                      'mass_mailing_ir_attachments_rel',
                                      'mass_mailing_id',
                                      'attachment_id',
                                      string='Attachments')
    keep_archives = fields.Boolean(string='Keep Archives')
    mass_mailing_campaign_id = fields.Many2one('mail.mass_mailing.campaign',
                                               string='Mass Mailing Campaign')
    campaign_id = fields.Many2one(
        'utm.campaign',
        string='Campaign',
        help=
        "This name helps you tracking your different campaign efforts, e.g. Fall_Drive, Christmas_Special"
    )
    source_id = fields.Many2one(
        'utm.source',
        string='Subject',
        required=True,
        ondelete='cascade',
        help=
        "This is the link source, e.g. Search Engine, another domain, or name of email list"
    )
    medium_id = fields.Many2one(
        'utm.medium',
        string='Medium',
        help="This is the delivery method, e.g. Postcard, Email, or Banner Ad",
        default=lambda self: self.env.ref('utm.utm_medium_email'))
    clicks_ratio = fields.Integer(compute="_compute_clicks_ratio",
                                  string="Number of Clicks")
    state = fields.Selection([('draft', 'Draft'), ('in_queue', 'In Queue'),
                              ('sending', 'Sending'), ('done', 'Sent')],
                             string='Status',
                             required=True,
                             copy=False,
                             default='draft')
    color = fields.Integer(string='Color Index')
    # mailing options
    reply_to_mode = fields.Selection(
        [('thread', 'Followers of leads/applicants'),
         ('email', 'Specified Email Address')],
        string='Reply-To Mode',
        required=True)
    reply_to = fields.Char(
        string='Reply To',
        help='Preferred Reply-To Address',
        default=lambda self: self.env['mail.message']._get_default_from())
    # recipients
    mailing_model_real = fields.Char(compute='_compute_model',
                                     string='Recipients Real Model',
                                     default='mail.mass_mailing.contact',
                                     required=True)
    mailing_model_id = fields.Many2one(
        'ir.model',
        string='Recipients Model',
        domain=[('model', 'in', MASS_MAILING_BUSINESS_MODELS)],
        default=lambda self: self.env.ref(
            'mass_mailing.model_mail_mass_mailing_list').id)
    mailing_model_name = fields.Char(related='mailing_model_id.model',
                                     string='Recipients Model Name',
                                     readonly=True,
                                     related_sudo=True)
    mailing_domain = fields.Char(string='Domain', oldname='domain', default=[])
    contact_list_ids = fields.Many2many('mail.mass_mailing.list',
                                        'mail_mass_mailing_list_rel',
                                        string='Mailing Lists')
    contact_ab_pc = fields.Integer(
        string='A/B Testing percentage',
        help=
        'Percentage of the contacts that will be mailed. Recipients will be taken randomly.',
        default=100)
    # statistics data
    statistics_ids = fields.One2many('mail.mail.statistics',
                                     'mass_mailing_id',
                                     string='Emails Statistics')
    total = fields.Integer(compute="_compute_total")
    scheduled = fields.Integer(compute="_compute_statistics")
    failed = fields.Integer(compute="_compute_statistics")
    sent = fields.Integer(compute="_compute_statistics")
    delivered = fields.Integer(compute="_compute_statistics")
    opened = fields.Integer(compute="_compute_statistics")
    replied = fields.Integer(compute="_compute_statistics")
    bounced = fields.Integer(compute="_compute_statistics")
    failed = fields.Integer(compute="_compute_statistics")
    received_ratio = fields.Integer(compute="_compute_statistics",
                                    string='Received Ratio')
    opened_ratio = fields.Integer(compute="_compute_statistics",
                                  string='Opened Ratio')
    replied_ratio = fields.Integer(compute="_compute_statistics",
                                   string='Replied Ratio')
    bounced_ratio = fields.Integer(compute="_compute_statistics",
                                   String='Bounced Ratio')
    next_departure = fields.Datetime(compute="_compute_next_departure",
                                     string='Scheduled date')

    def _compute_total(self):
        for mass_mailing in self:
            mass_mailing.total = len(mass_mailing.sudo().get_recipients())

    def _compute_clicks_ratio(self):
        self.env.cr.execute(
            """
            SELECT COUNT(DISTINCT(stats.id)) AS nb_mails, COUNT(DISTINCT(clicks.mail_stat_id)) AS nb_clicks, stats.mass_mailing_id AS id
            FROM mail_mail_statistics AS stats
            LEFT OUTER JOIN link_tracker_click AS clicks ON clicks.mail_stat_id = stats.id
            WHERE stats.mass_mailing_id IN %s
            GROUP BY stats.mass_mailing_id
        """, (tuple(self.ids), ))

        mass_mailing_data = self.env.cr.dictfetchall()
        mapped_data = dict([(m['id'], 100 * m['nb_clicks'] / m['nb_mails'])
                            for m in mass_mailing_data])
        for mass_mailing in self:
            mass_mailing.clicks_ratio = mapped_data.get(mass_mailing.id, 0)

    @api.depends('mailing_model_id')
    def _compute_model(self):
        for record in self:
            record.mailing_model_real = (
                record.mailing_model_name != 'mail.mass_mailing.list'
            ) and record.mailing_model_name or 'mail.mass_mailing.contact'

    def _compute_statistics(self):
        """ Compute statistics of the mass mailing """
        self.env.cr.execute(
            """
            SELECT
                m.id as mailing_id,
                COUNT(s.id) AS total,
                COUNT(CASE WHEN s.sent is not null THEN 1 ELSE null END) AS sent,
                COUNT(CASE WHEN s.scheduled is not null AND s.sent is null AND s.exception is null THEN 1 ELSE null END) AS scheduled,
                COUNT(CASE WHEN s.scheduled is not null AND s.sent is null AND s.exception is not null THEN 1 ELSE null END) AS failed,
                COUNT(CASE WHEN s.sent is not null AND s.bounced is null THEN 1 ELSE null END) AS delivered,
                COUNT(CASE WHEN s.opened is not null THEN 1 ELSE null END) AS opened,
                COUNT(CASE WHEN s.replied is not null THEN 1 ELSE null END) AS replied,
                COUNT(CASE WHEN s.bounced is not null THEN 1 ELSE null END) AS bounced,
                COUNT(CASE WHEN s.exception is not null THEN 1 ELSE null END) AS failed
            FROM
                mail_mail_statistics s
            RIGHT JOIN
                mail_mass_mailing m
                ON (m.id = s.mass_mailing_id)
            WHERE
                m.id IN %s
            GROUP BY
                m.id
        """, (tuple(self.ids), ))
        for row in self.env.cr.dictfetchall():
            total = row.pop('total') or 1
            row['received_ratio'] = 100.0 * row['delivered'] / total
            row['opened_ratio'] = 100.0 * row['opened'] / total
            row['replied_ratio'] = 100.0 * row['replied'] / total
            row['bounced_ratio'] = 100.0 * row['bounced'] / total
            self.browse(row.pop('mailing_id')).update(row)

    @api.multi
    def _unsubscribe_token(self, res_id, email):
        """Generate a secure hash for this mailing list and parameters.

        This is appended to the unsubscription URL and then checked at
        unsubscription time to ensure no malicious unsubscriptions are
        performed.

        :param int res_id:
            ID of the resource that will be unsubscribed.

        :param str email:
            Email of the resource that will be unsubscribed.
        """
        secret = self.env["ir.config_parameter"].sudo().get_param(
            "database.secret")
        token = (self.env.cr.dbname, self.id, int(res_id), tools.ustr(email))
        return hmac.new(secret.encode('utf-8'),
                        repr(token).encode('utf-8'),
                        hashlib.sha512).hexdigest()

    def _compute_next_departure(self):
        cron_next_call = self.env.ref(
            'mass_mailing.ir_cron_mass_mailing_queue').sudo().nextcall
        str2dt = fields.Datetime.from_string
        cron_time = str2dt(cron_next_call)
        for mass_mailing in self:
            if mass_mailing.schedule_date:
                schedule_date = str2dt(mass_mailing.schedule_date)
                mass_mailing.next_departure = max(schedule_date, cron_time)
            else:
                mass_mailing.next_departure = cron_time

    @api.onchange('mass_mailing_campaign_id')
    def _onchange_mass_mailing_campaign_id(self):
        if self.mass_mailing_campaign_id:
            dic = {
                'campaign_id': self.mass_mailing_campaign_id.campaign_id,
                'source_id': self.mass_mailing_campaign_id.source_id,
                'medium_id': self.mass_mailing_campaign_id.medium_id
            }
            self.update(dic)

    @api.onchange('mailing_model_id', 'contact_list_ids')
    def _onchange_model_and_list(self):
        if self.mailing_model_name == 'mail.mass_mailing.list':
            if self.contact_list_ids:
                self.mailing_domain = "[('list_ids', 'in', [%s]), ('opt_out', '=', False)]" % (
                    ','.join(str(id) for id in self.contact_list_ids.ids), )
            else:
                self.mailing_domain = "[(0, '=', 1)]"
        elif self.mailing_model_name and 'opt_out' in self.env[
                self.mailing_model_name]._fields and not self.mailing_domain:
            self.mailing_domain = "[('opt_out', '=', False)]"
        self.body_html = "on_change_model_and_list"

    #------------------------------------------------------
    # Technical stuff
    #------------------------------------------------------

    @api.model
    def name_create(self, name):
        """ _rec_name is source_id, creates a utm.source instead """
        mass_mailing = self.create({'name': name})
        return mass_mailing.name_get()[0]

    @api.multi
    def copy(self, default=None):
        self.ensure_one()
        default = dict(default or {}, name=_('%s (copy)') % self.name)
        return super(MassMailing, self).copy(default=default)

    @api.model
    def read_group(self,
                   domain,
                   fields,
                   groupby,
                   offset=0,
                   limit=None,
                   orderby=False,
                   lazy=True):
        """ Override read_group to always display all states. """
        if groupby and groupby[0] == "state":
            # Default result structure
            states = [('draft', _('Draft')), ('in_queue', _('In Queue')),
                      ('sending', _('Sending')), ('done', _('Sent'))]
            read_group_all_states = [{
                '__context': {
                    'group_by': groupby[1:]
                },
                '__domain':
                domain + [('state', '=', state_value)],
                'state':
                state_value,
                'state_count':
                0,
            } for state_value, state_name in states]
            # Get standard results
            read_group_res = super(MassMailing,
                                   self).read_group(domain,
                                                    fields,
                                                    groupby,
                                                    offset=offset,
                                                    limit=limit,
                                                    orderby=orderby)
            # Update standard results with default results
            result = []
            for state_value, state_name in states:
                res = [x for x in read_group_res if x['state'] == state_value]
                if not res:
                    res = [
                        x for x in read_group_all_states
                        if x['state'] == state_value
                    ]
                res[0]['state'] = state_value
                result.append(res[0])
            return result
        else:
            return super(MassMailing, self).read_group(domain,
                                                       fields,
                                                       groupby,
                                                       offset=offset,
                                                       limit=limit,
                                                       orderby=orderby)

    def update_opt_out(self, email, res_ids, value):
        model = self.env[self.mailing_model_real].with_context(
            active_test=False)
        if 'opt_out' in model._fields:
            email_fname = 'email_from'
            if 'email' in model._fields:
                email_fname = 'email'
            records = model.search([('id', 'in', res_ids),
                                    (email_fname, 'ilike', email)])
            records.write({'opt_out': value})

    #------------------------------------------------------
    # Views & Actions
    #------------------------------------------------------

    @api.multi
    def action_duplicate(self):
        self.ensure_one()
        mass_mailing_copy = self.copy()
        if mass_mailing_copy:
            context = dict(self.env.context)
            context['form_view_initial_mode'] = 'edit'
            return {
                'type': 'ir.actions.act_window',
                'view_type': 'form',
                'view_mode': 'form',
                'res_model': 'mail.mass_mailing',
                'res_id': mass_mailing_copy.id,
                'context': context,
            }
        return False

    @api.multi
    def action_test_mailing(self):
        self.ensure_one()
        ctx = dict(self.env.context, default_mass_mailing_id=self.id)
        return {
            'name': _('Test Mailing'),
            'type': 'ir.actions.act_window',
            'view_mode': 'form',
            'res_model': 'mail.mass_mailing.test',
            'target': 'new',
            'context': ctx,
        }

    @api.multi
    def put_in_queue(self):
        self.write({'sent_date': fields.Datetime.now(), 'state': 'in_queue'})

    @api.multi
    def cancel_mass_mailing(self):
        self.write({'state': 'draft'})

    @api.multi
    def retry_failed_mail(self):
        failed_mails = self.env['mail.mail'].search([
            ('mailing_id', 'in', self.ids), ('state', '=', 'exception')
        ])
        failed_mails.mapped('statistics_ids').unlink()
        failed_mails.sudo().unlink()
        self.write({'state': 'in_queue'})

    #------------------------------------------------------
    # Email Sending
    #------------------------------------------------------

    def _get_blacklist(self):
        """Returns a set of emails opted-out in target model"""
        # TODO: implement a global blacklist table, to easily share
        # it and update it.
        self.ensure_one()
        blacklist = {}
        target = self.env[self.mailing_model_real]
        mail_field = 'email' if 'email' in target._fields else 'email_from'
        if 'opt_out' in target._fields:
            # avoid loading a large number of records in memory
            # + use a basic heuristic for extracting emails
            query = """
                SELECT lower(substring(%(mail_field)s, '([^ ,;<@]+@[^> ,;]+)'))
                  FROM %(target)s
                 WHERE opt_out AND
                       substring(%(mail_field)s, '([^ ,;<@]+@[^> ,;]+)') IS NOT NULL;
            """
            query = query % {'target': target._table, 'mail_field': mail_field}
            self._cr.execute(query)
            blacklist = set(m[0] for m in self._cr.fetchall())
            _logger.info("Mass-mailing %s targets %s, blacklist: %s emails",
                         self, target._name, len(blacklist))
        else:
            _logger.info("Mass-mailing %s targets %s, no blacklist available",
                         self, target._name)
        return blacklist

    def _get_seen_list(self):
        """Returns a set of emails already targeted by current mailing/campaign (no duplicates)"""
        self.ensure_one()
        target = self.env[self.mailing_model_real]
        mail_field = 'email' if 'email' in target._fields else 'email_from'
        # avoid loading a large number of records in memory
        # + use a basic heuristic for extracting emails
        query = """
            SELECT lower(substring(%(mail_field)s, '([^ ,;<@]+@[^> ,;]+)'))
              FROM mail_mail_statistics s
              JOIN %(target)s t ON (s.res_id = t.id)
             WHERE substring(%(mail_field)s, '([^ ,;<@]+@[^> ,;]+)') IS NOT NULL
        """
        if self.mass_mailing_campaign_id.unique_ab_testing:
            query += """
               AND s.mass_mailing_campaign_id = %%(mailing_campaign_id)s;
            """
        else:
            query += """
               AND s.mass_mailing_id = %%(mailing_id)s;
            """
        query = query % {'target': target._table, 'mail_field': mail_field}
        params = {
            'mailing_id': self.id,
            'mailing_campaign_id': self.mass_mailing_campaign_id.id
        }
        self._cr.execute(query, params)
        seen_list = set(m[0] for m in self._cr.fetchall())
        _logger.info("Mass-mailing %s has already reached %s %s emails", self,
                     len(seen_list), target._name)
        return seen_list

    def _get_mass_mailing_context(self):
        """Returns extra context items with pre-filled blacklist and seen list for massmailing"""
        return {
            'mass_mailing_blacklist': self._get_blacklist(),
            'mass_mailing_seen_list': self._get_seen_list(),
        }

    def get_recipients(self):
        if self.mailing_domain:
            domain = safe_eval(self.mailing_domain)
            res_ids = self.env[self.mailing_model_real].search(domain).ids
        else:
            res_ids = []
            domain = [('id', 'in', res_ids)]

        # randomly choose a fragment
        if self.contact_ab_pc < 100:
            contact_nbr = self.env[self.mailing_model_real].search_count(
                domain)
            topick = int(contact_nbr / 100.0 * self.contact_ab_pc)
            if self.mass_mailing_campaign_id and self.mass_mailing_campaign_id.unique_ab_testing:
                already_mailed = self.mass_mailing_campaign_id.get_recipients(
                )[self.mass_mailing_campaign_id.id]
            else:
                already_mailed = set([])
            remaining = set(res_ids).difference(already_mailed)
            if topick > len(remaining):
                topick = len(remaining)
            res_ids = random.sample(remaining, topick)
        return res_ids

    def get_remaining_recipients(self):
        res_ids = self.get_recipients()
        already_mailed = self.env['mail.mail.statistics'].search_read(
            [('model', '=', self.mailing_model_real),
             ('res_id', 'in', res_ids), ('mass_mailing_id', '=', self.id)],
            ['res_id'])
        already_mailed_res_ids = [
            record['res_id'] for record in already_mailed
        ]
        return list(set(res_ids) - set(already_mailed_res_ids))

    def send_mail(self, res_ids=None):
        author_id = self.env.user.partner_id.id

        for mailing in self:
            if not res_ids:
                res_ids = mailing.get_remaining_recipients()
            if not res_ids:
                raise UserError(_('Please select recipients.'))

            # Convert links in absolute URLs before the application of the shortener
            mailing.body_html = self.env['mail.template']._replace_local_links(
                mailing.body_html)

            composer_values = {
                'author_id':
                author_id,
                'attachment_ids':
                [(4, attachment.id) for attachment in mailing.attachment_ids],
                'body':
                mailing.convert_links()[mailing.id],
                'subject':
                mailing.name,
                'model':
                mailing.mailing_model_real,
                'email_from':
                mailing.email_from,
                'record_name':
                False,
                'composition_mode':
                'mass_mail',
                'mass_mailing_id':
                mailing.id,
                'mailing_list_ids':
                [(4, l.id) for l in mailing.contact_list_ids],
                'no_auto_thread':
                mailing.reply_to_mode != 'thread',
            }
            if mailing.reply_to_mode == 'email':
                composer_values['reply_to'] = mailing.reply_to

            composer = self.env['mail.compose.message'].with_context(
                active_ids=res_ids).create(composer_values)
            extra_context = self._get_mass_mailing_context()
            composer = composer.with_context(active_ids=res_ids,
                                             **extra_context)
            # auto-commit except in testing mode
            auto_commit = not getattr(threading.currentThread(), 'testing',
                                      False)
            composer.send_mail(auto_commit=auto_commit)
            mailing.state = 'done'
        return True

    def convert_links(self):
        res = {}
        for mass_mailing in self:
            utm_mixin = mass_mailing.mass_mailing_campaign_id if mass_mailing.mass_mailing_campaign_id else mass_mailing
            html = mass_mailing.body_html if mass_mailing.body_html else ''

            vals = {'mass_mailing_id': mass_mailing.id}

            if mass_mailing.mass_mailing_campaign_id:
                vals[
                    'mass_mailing_campaign_id'] = mass_mailing.mass_mailing_campaign_id.id
            if utm_mixin.campaign_id:
                vals['campaign_id'] = utm_mixin.campaign_id.id
            if utm_mixin.source_id:
                vals['source_id'] = utm_mixin.source_id.id
            if utm_mixin.medium_id:
                vals['medium_id'] = utm_mixin.medium_id.id

            res[mass_mailing.id] = self.env['link.tracker'].convert_links(
                html, vals, blacklist=['/unsubscribe_from_list'])

        return res

    @api.model
    def _process_mass_mailing_queue(self):
        mass_mailings = self.search([('state', 'in', ('in_queue', 'sending')),
                                     '|',
                                     ('schedule_date', '<',
                                      fields.Datetime.now()),
                                     ('schedule_date', '=', False)])
        for mass_mailing in mass_mailings:
            user = mass_mailing.write_uid or self.env.user
            mass_mailing = mass_mailing.with_context(**user.sudo(
                user=user).context_get())
            if len(mass_mailing.get_remaining_recipients()) > 0:
                mass_mailing.state = 'sending'
                mass_mailing.send_mail()
            else:
                mass_mailing.state = 'done'
Example #5
0
class Recurring(models.Model):
    _name = "recurring"
    _description = "Recurring"

    @api.model
    def default_get(self, fields):
        res = super(Recurring, self).default_get(fields)
        active_model = self._context.get('active_model')
        active_id = self._context.get('active_id')
        if active_model and active_id:
            record = self.env[active_model].browse(active_id)
            if 'partner_id' in self.env[active_model]._fields:
                res['partner_id'] = record.partner_id.id
            else:
                res['name'] = record.name
                if not res['name']:
                    res['name'] = record.number
        return res

    @api.onchange('partner_id')
    def _onchange_partner_id(self):
        active_model = self._context.get('active_model')
        active_id = self._context.get('active_id')
        if self.partner_id and active_model and active_id:
            record = self.env[active_model].browse(active_id)
            name = record.name
            if not name:
                name = record.number
            if name:
                self.name = name + '-' + self.partner_id.name
            else:
                self.name = self.partner_id.name

    @api.constrains('partner_id', 'doc_source')
    def _check_partner_id_doc_source(self):
        for record in self:
            if record.partner_id and record.doc_source and 'partner_id' in \
                    self.env[record.doc_source._name]._fields and \
                    record.doc_source.partner_id != record.partner_id:
                raise ValidationError(_(
                    'Error! Source Document should be related to partner %s' %
                    record.doc_source.partner_id.name))

    name = fields.Char(string='Name')
    active = fields.Boolean(
        help="If the active field is set to False, it will allow you to hide "
             "the recurring without removing it.", default=True)
    partner_id = fields.Many2one('res.partner', string='Partner')
    notes = fields.Text(string='Internal Notes')
    user_id = fields.Many2one('res.users', string='User',
                              default=lambda self: self.env.user)
    interval_number = fields.Integer(string='Internal Qty', default=1)
    interval_type = fields.Selection(
        [('minutes', 'Minutes'), ('hours', 'Hours'), ('days', 'Days'),
         ('weeks', 'Weeks'), ('months', 'Months')], string='Interval Unit',
        default='months')
    exec_init = fields.Integer(string='Number of Documents')
    date_init = fields.Datetime(string='First Date',
                                default=fields.Datetime.now)
    state = fields.Selection(
        [('draft', 'Draft'), ('running', 'Running'), ('done', 'Done')],
        string='Status', copy=False, default='draft')
    doc_source = fields.Reference(
        selection=_get_document_types, string='Source Document',
        help="User can choose the source document on which he wants to "
             "create documents")
    doc_lines = fields.One2many('recurring.history',
                                'recurring_id', string='Documents created')
    cron_id = fields.Many2one('ir.cron', string='Cron Job',
                              help="Scheduler which runs on recurring",
                              states={'running': [('readonly', True)],
                                      'done': [('readonly', True)]})
    note = fields.Text(string='Notes',
                       help="Description or Summary of Recurring")

    @api.model
    def _auto_end(self):
        super(Recurring, self)._auto_end()
        # drop the FK from recurring to ir.cron, as it would cause deadlocks
        # during cron job execution. When model_copy() tries to write() on
        # the recurring,
        # it has to wait for an ExclusiveLock on the cron job record,
        # but the latter is locked by the cron system for the duration of
        # the job!
        # FIXME: the recurring module should be reviewed to simplify the
        # scheduling process
        # and to use a unique cron job for all recurrings, so that it
        # never needs to be updated during its execution.
        self.env.cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (
            self._table, '%s_cron_id_fkey' % self._table))

    @api.multi
    def create_recurring_type(self):
        rec_doc_obj = self.env['recurring.document']
        ir_model_id = self.env['ir.model'].search(
            [('model', '=', self._context.get('active_model', False))])
        rec_doc_id = rec_doc_obj.search([('model', '=', ir_model_id.id)])
        if not rec_doc_id:
            rec_doc_id = rec_doc_obj.create({
                'name': ir_model_id.name,
                'model': ir_model_id.id,
            })
        return rec_doc_id

    @api.multi
    def btn_recurring(self):
        self.ensure_one()
        rec_doc_id = self.create_recurring_type()
        if rec_doc_id:
            active_model = self._context.get('active_model')
            active_id = self._context.get('active_id')
            if active_id and active_model:
                record = self.env[active_model].browse(active_id)
                self.doc_source = record._name + "," + str(record.id)
                record.recurring_id = self.id
                record.rec_source_id = record.id
            if self._context.get('process') == 'start':
                self.set_process()

    @api.multi
    def set_process(self):
        for recurring in self:
            model = 'recurring'
            cron_data = {
                'name': recurring.name,
                'interval_number': recurring.interval_number,
                'interval_type': recurring.interval_type,
                'numbercall': recurring.exec_init,
                'nextcall': recurring.date_init,
                'model_id': self.env['ir.model'].search(
                    [('model', '=', model)]).id,
                'priority': 6,
                'user_id': recurring.user_id.id,
                'state': 'code',
                'code': 'model._cron_model_copy('+repr([recurring.id])+')',
            }
            cron = self.env['ir.cron'].sudo().create(cron_data)
            recurring.write({'cron_id': cron.id, 'state': 'running'})

    @api.multi
    def set_recurring_id(self):
        if self.doc_source and 'recurring_id' and 'rec_source_id' in \
                self.env[self.doc_source._name]._fields:
            rec_id = self.env[self.doc_source._name].browse(self.doc_source.id)
            if not rec_id.recurring_id and not rec_id.rec_source_id:
                rec_id.recurring_id = self.id
                rec_id.rec_source_id = self.doc_source.id
            else:
                raise ValidationError(
                    _('Document is already recurring'))

    @api.model
    def create(self, vals):
        if vals.get('doc_source', False) and self.search(
                [('doc_source', '=', vals['doc_source'])]):
            raise ValidationError(
                _('Recurring of the selected Source Document already exist'))
        res = super(Recurring, self).create(vals)
        res.set_recurring_id()
        return res

    @api.multi
    def write(self, values):
        doc_source_id = False
        if values.get('doc_source', False):
            doc_source_id = self.doc_source
        res = super(Recurring, self).write(values)
        if doc_source_id:
            rec_id = self.env[doc_source_id._name].browse(doc_source_id.id)
            rec_id.recurring_id = False
            self.set_recurring_id()
        return res

    @api.multi
    def get_recurring(self, model, active_id):
        result = self.env.ref('recurring.action_recurring_form').read()[0]
        record = self.env[model].browse(active_id)
        rec_ids = self.env['recurring'].search(
            [('doc_source', '=', record._name + "," + str(record.id))])
        result['domain'] = [('id', 'in', rec_ids.ids)]
        return result

    @api.multi
    def get_recurring_documents(self, model, action, recurring_id):
        result = self.env.ref(action).read()[0]
        res_ids = self.env[model].search(
            [('recurring_id', '=', recurring_id.id)])
        result['domain'] = [('id', 'in', res_ids.ids)]
        return result

    @api.model
    def _cron_model_copy(self, ids):
        self.browse(ids).model_copy()

    @api.multi
    def model_copy(self):
        for recurring in self.filtered(lambda sub: sub.cron_id):
            if not recurring.doc_source.exists():
                raise UserError(_('Please provide another source '
                                  'document.\nThis one does not exist!'))

            default = {}
            documents = self.env['recurring.document'].search(
                [('model.model', '=', recurring.doc_source._name)], limit=1)
            fieldnames = dict((f.field.name, f.value == 'date' and
                               fields.Date.today() or False)
                              for f in documents.field_ids)
            default.update(fieldnames)
            # if there was only one remaining document to generate
            # the recurring is over and we mark it as being done
            if recurring.cron_id.numbercall == 1:
                recurring.write({'state': 'done'})
            else:
                recurring.write({'state': 'running'})
            copied_doc = recurring.doc_source.copy(default)
            self.env['recurring.history'].create({
                'recurring_id': recurring.id,
                'date': fields.Datetime.now(),
                'document_id': '%s,%s' % (recurring.doc_source._name,
                                          copied_doc.id)})

    @api.multi
    def unlink(self):
        if any(self.filtered(lambda s: s.state == "running")):
            raise UserError(_('You cannot delete an active recurring!'))
        return super(Recurring, self).unlink()

    @api.multi
    def set_done(self):
        self.mapped('cron_id').write({'active': False})
        self.write({'state': 'done'})

    @api.multi
    def set_draft(self):
        self.write({'state': 'draft'})
Example #6
0
class MailTracking(models.Model):
    _name = 'mail.tracking.value'
    _description = 'Mail Tracking Value'
    _rec_name = 'field'
    _order = 'tracking_sequence asc'

    field = fields.Many2one('ir.model.fields',
                            required=True,
                            readonly=1,
                            ondelete='cascade')
    field_desc = fields.Char('Field Description', required=True, readonly=1)
    field_type = fields.Char('Field Type')
    field_groups = fields.Char(compute='_compute_field_groups')

    old_value_integer = fields.Integer('Old Value Integer', readonly=1)
    old_value_float = fields.Float('Old Value Float', readonly=1)
    old_value_monetary = fields.Float('Old Value Monetary', readonly=1)
    old_value_char = fields.Char('Old Value Char', readonly=1)
    old_value_text = fields.Text('Old Value Text', readonly=1)
    old_value_datetime = fields.Datetime('Old Value DateTime', readonly=1)

    new_value_integer = fields.Integer('New Value Integer', readonly=1)
    new_value_float = fields.Float('New Value Float', readonly=1)
    new_value_monetary = fields.Float('New Value Monetary', readonly=1)
    new_value_char = fields.Char('New Value Char', readonly=1)
    new_value_text = fields.Text('New Value Text', readonly=1)
    new_value_datetime = fields.Datetime('New Value Datetime', readonly=1)

    mail_message_id = fields.Many2one('mail.message',
                                      'Message ID',
                                      required=True,
                                      index=True,
                                      ondelete='cascade')

    tracking_sequence = fields.Integer('Tracking field sequence',
                                       readonly=1,
                                       default=100)

    def _compute_field_groups(self):
        for tracking in self:
            model = self.env[tracking.mail_message_id.model]
            field = model._fields.get(tracking.field.name)
            tracking.field_groups = field.groups if field else 'base.group_system'

    @api.model
    def create_tracking_values(self, initial_value, new_value, col_name,
                               col_info, tracking_sequence, model_name):
        tracked = True

        field = self.env['ir.model.fields']._get(model_name, col_name)
        if not field:
            return

        values = {
            'field': field.id,
            'field_desc': col_info['string'],
            'field_type': col_info['type'],
            'tracking_sequence': tracking_sequence
        }

        if col_info['type'] in [
                'integer', 'float', 'char', 'text', 'datetime', 'monetary'
        ]:
            values.update({
                'old_value_%s' % col_info['type']: initial_value,
                'new_value_%s' % col_info['type']: new_value
            })
        elif col_info['type'] == 'date':
            values.update({
                'old_value_datetime':
                initial_value and fields.Datetime.to_string(
                    datetime.combine(fields.Date.from_string(initial_value),
                                     datetime.min.time())) or False,
                'new_value_datetime':
                new_value and fields.Datetime.to_string(
                    datetime.combine(fields.Date.from_string(new_value),
                                     datetime.min.time())) or False,
            })
        elif col_info['type'] == 'boolean':
            values.update({
                'old_value_integer': initial_value,
                'new_value_integer': new_value
            })
        elif col_info['type'] == 'selection':
            values.update({
                'old_value_char':
                initial_value and dict(col_info['selection'])[initial_value]
                or '',
                'new_value_char':
                new_value and dict(col_info['selection'])[new_value] or ''
            })
        elif col_info['type'] == 'many2one':
            values.update({
                'old_value_integer':
                initial_value and initial_value.id or 0,
                'new_value_integer':
                new_value and new_value.id or 0,
                'old_value_char':
                initial_value and initial_value.sudo().name_get()[0][1] or '',
                'new_value_char':
                new_value and new_value.sudo().name_get()[0][1] or ''
            })
        else:
            tracked = False

        if tracked:
            return values
        return {}

    def get_display_value(self, type):
        assert type in ('new', 'old')
        result = []
        for record in self:
            if record.field_type in [
                    'integer', 'float', 'char', 'text', 'monetary'
            ]:
                result.append(
                    getattr(record, '%s_value_%s' % (type, record.field_type)))
            elif record.field_type == 'datetime':
                if record['%s_value_datetime' % type]:
                    new_datetime = getattr(record, '%s_value_datetime' % type)
                    result.append('%sZ' % new_datetime)
                else:
                    result.append(record['%s_value_datetime' % type])
            elif record.field_type == 'date':
                if record['%s_value_datetime' % type]:
                    new_date = record['%s_value_datetime' % type]
                    result.append(fields.Date.to_string(new_date))
                else:
                    result.append(record['%s_value_datetime' % type])
            elif record.field_type == 'boolean':
                result.append(bool(record['%s_value_integer' % type]))
            else:
                result.append(record['%s_value_char' % type])
        return result

    def get_old_display_value(self):
        # grep : # old_value_integer | old_value_datetime | old_value_char
        return self.get_display_value('old')

    def get_new_display_value(self):
        # grep : # new_value_integer | new_value_datetime | new_value_char
        return self.get_display_value('new')
Example #7
0
class OauthAccessToken(models.Model):
    _name = 'oauth.access_token'

    token = fields.Char('Access Token', required=True)
    user_id = fields.Many2one('res.users', string='User', required=True)
    expires = fields.Datetime('Expires', required=True)
    scope = fields.Char('Scope')

    @api.multi
    def _get_access_token(self, user_id=None, create=False):
        if not user_id:
            user_id = self.env.user.id

        access_token = self.env['oauth.access_token'].sudo().search(
            [('user_id', '=', user_id)], order='id DESC', limit=1)
        if access_token:
            access_token = access_token[0]
            if access_token.is_expired():
                access_token = None
        if not access_token and create:
            expires = datetime.now() + timedelta(seconds=int(
                self.env.ref(
                    'rest_api.oauth2_access_token_expires_in').sudo().value))
            vals = {
                'user_id': user_id,
                'scope': 'userinfo',
                'expires': expires.strftime(DEFAULT_SERVER_DATETIME_FORMAT),
                'token': oauthlib_common.generate_token(),
            }
            access_token = self.env['oauth.access_token'].sudo().create(vals)
            # we have to commit now, because /oauth2/tokeninfo could
            # be called before we finish current transaction.
            self._cr.commit()
        if not access_token:
            return None
        return access_token.token

    @api.multi
    def is_valid(self, scopes=None):
        """
        Checks if the access token is valid.

        :param scopes: An iterable containing the scopes to check or None
        """
        self.ensure_one()
        return not self.is_expired() and self._allow_scopes(scopes)

    @api.multi
    def is_expired(self):
        self.ensure_one()
        return datetime.now() > fields.Datetime.from_string(self.expires)

    @api.multi
    def _allow_scopes(self, scopes):
        self.ensure_one()
        if not scopes:
            return True

        provided_scopes = set(self.scope.split())
        resource_scopes = set(scopes)

        return resource_scopes.issubset(provided_scopes)
Example #8
0
class ImLivechatReportChannel(models.Model):
    """ Livechat Support Report on the Channels """

    _name = "im_livechat.report.channel"
    _description = "Livechat Support Report"
    _order = 'start_date, technical_name'
    _auto = False

    uuid = fields.Char('UUID', readonly=True)
    channel_id = fields.Many2one('mail.channel', 'Conversation', readonly=True)
    channel_name = fields.Char('Channel Name', readonly=True)
    technical_name = fields.Char('Code', readonly=True)
    livechat_channel_id = fields.Many2one('im_livechat.channel',
                                          'Channel',
                                          readonly=True)
    start_date = fields.Datetime('Start Date of session',
                                 readonly=True,
                                 help="Start date of the conversation")
    start_date_hour = fields.Char('Hour of start Date of session',
                                  readonly=True)
    duration = fields.Float('Average duration',
                            digits=(16, 2),
                            readonly=True,
                            group_operator="avg",
                            help="Duration of the conversation (in seconds)")
    nbr_speaker = fields.Integer('# of speakers',
                                 readonly=True,
                                 group_operator="avg",
                                 help="Number of different speakers")
    nbr_message = fields.Integer('Average message',
                                 readonly=True,
                                 group_operator="avg",
                                 help="Number of message in the conversation")
    partner_id = fields.Many2one('res.partner', 'Operator', readonly=True)

    @api.model_cr
    def init(self):
        # Note : start_date_hour must be remove when the read_group will allow grouping on the hour of a datetime. Don't forget to change the view !
        tools.drop_view_if_exists(self.env.cr, 'im_livechat_report_channel')
        self.env.cr.execute("""
            CREATE OR REPLACE VIEW im_livechat_report_channel AS (
                SELECT
                    C.id as id,
                    C.uuid as uuid,
                    C.id as channel_id,
                    C.name as channel_name,
                    CONCAT(L.name, ' / ', C.id) as technical_name,
                    C.livechat_channel_id as livechat_channel_id,
                    C.create_date as start_date,
                    to_char(date_trunc('hour', C.create_date), 'YYYY-MM-DD HH24:MI:SS') as start_date_hour,
                    EXTRACT('epoch' FROM (max((SELECT (max(M.create_date)) FROM mail_message M JOIN mail_message_mail_channel_rel R ON (R.mail_message_id = M.id) WHERE R.mail_channel_id = C.id))-C.create_date)) as duration,
                    count(distinct P.id) as nbr_speaker,
                    count(distinct M.id) as nbr_message,
                    MAX(S.partner_id) as partner_id
                FROM mail_channel C
                    JOIN mail_message_mail_channel_rel R ON (C.id = R.mail_channel_id)
                    JOIN mail_message M ON (M.id = R.mail_message_id)
                    JOIN mail_channel_partner S ON (S.channel_id = C.id)
                    JOIN im_livechat_channel L ON (L.id = C.livechat_channel_id)
                    LEFT JOIN res_partner P ON (M.author_id = P.id)
                GROUP BY C.id, C.name, C.livechat_channel_id, L.name, C.create_date, C.uuid
            )
        """)
Example #9
0
class BlogPost(models.Model):
    _name = "blog.post"
    _description = "Blog Post"
    _inherit = [
        'mail.thread', 'website.seo.metadata', 'website.published.multi.mixin',
        'website.cover_properties.mixin'
    ]
    _order = 'id DESC'
    _mail_post_access = 'read'

    def _compute_website_url(self):
        super(BlogPost, self)._compute_website_url()
        for blog_post in self:
            blog_post.website_url = "/blog/%s/%s" % (slug(
                blog_post.blog_id), slug(blog_post))

    def _default_content(self):
        return '''
            <p class="o_default_snippet_text">''' + _(
            "Start writing here...") + '''</p>
        '''

    name = fields.Char('Title', required=True, translate=True, default='')
    subtitle = fields.Char('Sub Title', translate=True)
    author_id = fields.Many2one('res.partner',
                                'Author',
                                default=lambda self: self.env.user.partner_id)
    author_avatar = fields.Binary(related='author_id.image_128',
                                  string="Avatar",
                                  readonly=False)
    author_name = fields.Char(related='author_id.display_name',
                              string="Author Name",
                              readonly=False,
                              store=True)
    active = fields.Boolean('Active', default=True)
    blog_id = fields.Many2one('blog.blog',
                              'Blog',
                              required=True,
                              ondelete='cascade')
    tag_ids = fields.Many2many('blog.tag', string='Tags')
    content = fields.Html('Content',
                          default=_default_content,
                          translate=html_translate,
                          sanitize=False)
    teaser = fields.Text('Teaser',
                         compute='_compute_teaser',
                         inverse='_set_teaser')
    teaser_manual = fields.Text(string='Teaser Content')

    website_message_ids = fields.One2many(
        domain=lambda self: [('model', '=', self._name),
                             ('message_type', '=', 'comment')])

    # creation / update stuff
    create_date = fields.Datetime('Created on', index=True, readonly=True)
    published_date = fields.Datetime('Published Date')
    post_date = fields.Datetime(
        'Publishing date',
        compute='_compute_post_date',
        inverse='_set_post_date',
        store=True,
        help=
        "The blog post will be visible for your visitors as of this date on the website if it is set as published."
    )
    create_uid = fields.Many2one('res.users',
                                 'Created by',
                                 index=True,
                                 readonly=True)
    write_date = fields.Datetime('Last Updated on', index=True, readonly=True)
    write_uid = fields.Many2one('res.users',
                                'Last Contributor',
                                index=True,
                                readonly=True)
    visits = fields.Integer('No of Views', copy=False, default=0)
    website_id = fields.Many2one(related='blog_id.website_id',
                                 readonly=True,
                                 store=True)

    @api.depends('content', 'teaser_manual')
    def _compute_teaser(self):
        for blog_post in self:
            if blog_post.teaser_manual:
                blog_post.teaser = blog_post.teaser_manual
            else:
                content = html2plaintext(blog_post.content).replace('\n', ' ')
                blog_post.teaser = content[:200] + '...'

    def _set_teaser(self):
        for blog_post in self:
            blog_post.teaser_manual = blog_post.teaser

    @api.depends('create_date', 'published_date')
    def _compute_post_date(self):
        for blog_post in self:
            if blog_post.published_date:
                blog_post.post_date = blog_post.published_date
            else:
                blog_post.post_date = blog_post.create_date

    def _set_post_date(self):
        for blog_post in self:
            blog_post.published_date = blog_post.post_date
            if not blog_post.published_date:
                blog_post._write(dict(post_date=blog_post.create_date)
                                 )  # dont trigger inverse function

    def _check_for_publication(self, vals):
        if vals.get('is_published'):
            for post in self.filtered(lambda p: p.active):
                post.blog_id.message_post_with_view(
                    'website_blog.blog_post_template_new_post',
                    subject=post.name,
                    values={'post': post},
                    subtype_id=self.env['ir.model.data'].xmlid_to_res_id(
                        'website_blog.mt_blog_blog_published'))
            return True
        return False

    @api.model
    def create(self, vals):
        post_id = super(BlogPost,
                        self.with_context(mail_create_nolog=True)).create(vals)
        post_id._check_for_publication(vals)
        return post_id

    def write(self, vals):
        result = True
        # archiving a blog post, unpublished the blog post
        if 'active' in vals and not vals['active']:
            vals['is_published'] = False
        for post in self:
            copy_vals = dict(vals)
            published_in_vals = set(
                vals.keys()) & {'is_published', 'website_published'}
            if (published_in_vals and 'published_date' not in vals
                    and (not post.published_date
                         or post.published_date <= fields.Datetime.now())):
                copy_vals['published_date'] = vals[list(
                    published_in_vals)[0]] and fields.Datetime.now() or False
            result &= super(BlogPost, post).write(copy_vals)
        self._check_for_publication(vals)
        return result

    @api.returns('self', lambda value: value.id)
    def copy_data(self, default=None):
        self.ensure_one()
        name = _("%s (copy)", self.name)
        default = dict(default or {}, name=name)
        return super(BlogPost, self).copy_data(default)

    def get_access_action(self, access_uid=None):
        """ Instead of the classic form view, redirect to the post on website
        directly if user is an employee or if the post is published. """
        self.ensure_one()
        user = access_uid and self.env['res.users'].sudo().browse(
            access_uid) or self.env.user
        if user.share and not self.sudo().website_published:
            return super(BlogPost, self).get_access_action(access_uid)
        return {
            'type': 'ir.actions.act_url',
            'url': self.website_url,
            'target': 'self',
            'target_type': 'public',
            'res_id': self.id,
        }

    def _notify_get_groups(self, msg_vals=None):
        """ Add access button to everyone if the document is published. """
        groups = super(BlogPost, self)._notify_get_groups(msg_vals=msg_vals)

        if self.website_published:
            for group_name, group_method, group_data in groups:
                group_data['has_button_access'] = True

        return groups

    def _notify_record_by_inbox(self,
                                message,
                                recipients_data,
                                msg_vals=False,
                                **kwargs):
        """ Override to avoid keeping all notified recipients of a comment.
        We avoid tracking needaction on post comments. Only emails should be
        sufficient. """
        if msg_vals.get('message_type', message.message_type) == 'comment':
            return
        return super(BlogPost, self)._notify_record_by_inbox(message,
                                                             recipients_data,
                                                             msg_vals=msg_vals,
                                                             **kwargs)

    def _default_website_meta(self):
        res = super(BlogPost, self)._default_website_meta()
        res['default_opengraph']['og:description'] = res['default_twitter'][
            'twitter:description'] = self.subtitle
        res['default_opengraph']['og:type'] = 'article'
        res['default_opengraph']['article:published_time'] = self.post_date
        res['default_opengraph']['article:modified_time'] = self.write_date
        res['default_opengraph']['article:tag'] = self.tag_ids.mapped('name')
        # background-image might contain single quotes eg `url('/my/url')`
        res['default_opengraph']['og:image'] = res['default_twitter'][
            'twitter:image'] = json_scriptsafe.loads(
                self.cover_properties).get('background-image',
                                           'none')[4:-1].strip("'")
        res['default_opengraph']['og:title'] = res['default_twitter'][
            'twitter:title'] = self.name
        res['default_meta_description'] = self.subtitle
        return res
Example #10
0
class Applicant(models.Model):
    _name = "hr.applicant"
    _description = "Applicant"
    _order = "priority desc, id desc"
    _inherit = ['mail.thread', 'mail.activity.mixin', 'utm.mixin']

    def _default_stage_id(self):
        if self._context.get('default_job_id'):
            ids = self.env['hr.recruitment.stage'].search([
                '|', ('job_id', '=', False),
                ('job_id', '=', self._context['default_job_id']),
                ('fold', '=', False)
            ],
                                                          order='sequence asc',
                                                          limit=1).ids
            if ids:
                return ids[0]
        return False

    def _default_company_id(self):
        company_id = False
        if self._context.get('default_department_id'):
            department = self.env['hr.department'].browse(
                self._context['default_department_id'])
            company_id = department.company_id.id
        if not company_id:
            company_id = self.env['res.company']._company_default_get(
                'hr.applicant')
        return company_id

    name = fields.Char("Subject / Application Name", required=True)
    active = fields.Boolean(
        "Active",
        default=True,
        help=
        "If the active field is set to false, it will allow you to hide the case without removing it."
    )
    description = fields.Text("Description")
    email_from = fields.Char("Email",
                             size=128,
                             help="These people will receive email.")
    email_cc = fields.Text(
        "Watchers Emails",
        size=252,
        help=
        "These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"
    )
    probability = fields.Float("Probability")
    partner_id = fields.Many2one('res.partner', "Contact")
    create_date = fields.Datetime("Creation Date", readonly=True, index=True)
    write_date = fields.Datetime("Update Date", readonly=True)
    stage_id = fields.Many2one(
        'hr.recruitment.stage',
        'Stage',
        track_visibility='onchange',
        domain="['|', ('job_id', '=', False), ('job_id', '=', job_id)]",
        copy=False,
        index=True,
        group_expand='_read_group_stage_ids',
        default=_default_stage_id)
    last_stage_id = fields.Many2one(
        'hr.recruitment.stage',
        "Last Stage",
        help=
        "Stage of the applicant before being in the current stage. Used for lost cases analysis."
    )
    categ_ids = fields.Many2many('hr.applicant.category', string="Tags")
    company_id = fields.Many2one('res.company',
                                 "Company",
                                 default=_default_company_id)
    user_id = fields.Many2one('res.users',
                              "Responsible",
                              track_visibility="onchange",
                              default=lambda self: self.env.uid)
    date_closed = fields.Datetime("Closed", readonly=True, index=True)
    date_open = fields.Datetime("Assigned", readonly=True, index=True)
    date_last_stage_update = fields.Datetime("Last Stage Update",
                                             index=True,
                                             default=fields.Datetime.now)
    priority = fields.Selection(AVAILABLE_PRIORITIES,
                                "Appreciation",
                                default='0')
    job_id = fields.Many2one('hr.job', "Applied Job")
    salary_proposed_extra = fields.Char(
        "Proposed Salary Extra",
        help="Salary Proposed by the Organisation, extra advantages")
    salary_expected_extra = fields.Char(
        "Expected Salary Extra",
        help="Salary Expected by Applicant, extra advantages")
    salary_proposed = fields.Float("Proposed Salary",
                                   group_operator="avg",
                                   help="Salary Proposed by the Organisation")
    salary_expected = fields.Float("Expected Salary",
                                   group_operator="avg",
                                   help="Salary Expected by Applicant")
    availability = fields.Date(
        "Availability",
        help=
        "The date at which the applicant will be available to start working")
    partner_name = fields.Char("Applicant's Name")
    partner_phone = fields.Char("Phone", size=32)
    partner_mobile = fields.Char("Mobile", size=32)
    type_id = fields.Many2one('hr.recruitment.degree', "Degree")
    department_id = fields.Many2one('hr.department', "Department")
    reference = fields.Char("Referred By")
    day_open = fields.Float(compute='_compute_day', string="Days to Open")
    day_close = fields.Float(compute='_compute_day', string="Days to Close")
    delay_close = fields.Float(compute="_compute_day",
                               string='Delay to Close',
                               readonly=True,
                               group_operator="avg",
                               help="Number of days to close",
                               store=True)
    color = fields.Integer("Color Index", default=0)
    emp_id = fields.Many2one('hr.employee',
                             string="Employee",
                             track_visibility="onchange",
                             help="Employee linked to the applicant.")
    user_email = fields.Char(related='user_id.email',
                             type="char",
                             string="User Email",
                             readonly=True)
    attachment_number = fields.Integer(compute='_get_attachment_number',
                                       string="Number of Attachments")
    employee_name = fields.Char(related='emp_id.name', string="Employee Name")
    attachment_ids = fields.One2many('ir.attachment',
                                     'res_id',
                                     domain=[('res_model', '=', 'hr.applicant')
                                             ],
                                     string='Attachments')

    @api.depends('date_open', 'date_closed')
    @api.one
    def _compute_day(self):
        if self.date_open:
            date_create = datetime.strptime(
                self.create_date, tools.DEFAULT_SERVER_DATETIME_FORMAT)
            date_open = datetime.strptime(self.date_open,
                                          tools.DEFAULT_SERVER_DATETIME_FORMAT)
            self.day_open = (date_open - date_create).total_seconds() / (24.0 *
                                                                         3600)

        if self.date_closed:
            date_create = datetime.strptime(
                self.create_date, tools.DEFAULT_SERVER_DATETIME_FORMAT)
            date_closed = datetime.strptime(
                self.date_closed, tools.DEFAULT_SERVER_DATETIME_FORMAT)
            self.day_close = (date_closed -
                              date_create).total_seconds() / (24.0 * 3600)
            self.delay_close = self.day_close - self.day_open

    @api.multi
    def _get_attachment_number(self):
        read_group_res = self.env['ir.attachment'].read_group(
            [('res_model', '=', 'hr.applicant'),
             ('res_id', 'in', self.ids)], ['res_id'], ['res_id'])
        attach_data = dict(
            (res['res_id'], res['res_id_count']) for res in read_group_res)
        for record in self:
            record.attachment_number = attach_data.get(record.id, 0)

    @api.model
    def _read_group_stage_ids(self, stages, domain, order):
        # retrieve job_id from the context and write the domain: ids + contextual columns (job or default)
        job_id = self._context.get('default_job_id')
        search_domain = [('job_id', '=', False)]
        if job_id:
            search_domain = ['|', ('job_id', '=', job_id)] + search_domain
        if stages:
            search_domain = ['|', ('id', 'in', stages.ids)] + search_domain

        stage_ids = stages._search(search_domain,
                                   order=order,
                                   access_rights_uid=SUPERUSER_ID)
        return stages.browse(stage_ids)

    @api.onchange('job_id')
    def onchange_job_id(self):
        vals = self._onchange_job_id_internal(self.job_id.id)
        self.department_id = vals['value']['department_id']
        self.user_id = vals['value']['user_id']
        self.stage_id = vals['value']['stage_id']

    def _onchange_job_id_internal(self, job_id):
        department_id = False
        user_id = False
        stage_id = self.stage_id.id
        if job_id:
            job = self.env['hr.job'].browse(job_id)
            department_id = job.department_id.id
            user_id = job.user_id.id
            if not self.stage_id:
                stage_ids = self.env['hr.recruitment.stage'].search(
                    [
                        '|', ('job_id', '=', False), ('job_id', '=', job.id),
                        ('fold', '=', False)
                    ],
                    order='sequence asc',
                    limit=1).ids
                stage_id = stage_ids[0] if stage_ids else False

        return {
            'value': {
                'department_id': department_id,
                'user_id': user_id,
                'stage_id': stage_id
            }
        }

    @api.onchange('partner_id')
    def onchange_partner_id(self):
        self.partner_phone = self.partner_id.phone
        self.partner_mobile = self.partner_id.mobile
        self.email_from = self.partner_id.email

    @api.onchange('stage_id')
    def onchange_stage_id(self):
        vals = self._onchange_stage_id_internal(self.stage_id.id)
        if vals['value'].get('date_closed'):
            self.date_closed = vals['value']['date_closed']

    def _onchange_stage_id_internal(self, stage_id):
        if not stage_id:
            return {'value': {}}
        stage = self.env['hr.recruitment.stage'].browse(stage_id)
        if stage.fold:
            return {'value': {'date_closed': fields.datetime.now()}}
        return {'value': {'date_closed': False}}

    @api.model
    def create(self, vals):
        if vals.get('department_id'
                    ) and not self._context.get('default_department_id'):
            self = self.with_context(
                default_department_id=vals.get('department_id'))
        if vals.get('job_id') or self._context.get('default_job_id'):
            job_id = vals.get('job_id') or self._context.get('default_job_id')
            for key, value in self._onchange_job_id_internal(
                    job_id)['value'].items():
                if key not in vals:
                    vals[key] = value
        if vals.get('user_id'):
            vals['date_open'] = fields.Datetime.now()
        if 'stage_id' in vals:
            vals.update(
                self._onchange_stage_id_internal(
                    vals.get('stage_id'))['value'])
        return super(Applicant,
                     self.with_context(mail_create_nolog=True)).create(vals)

    @api.multi
    def write(self, vals):
        # user_id change: update date_open
        if vals.get('user_id'):
            vals['date_open'] = fields.Datetime.now()
        # stage_id: track last stage before update
        if 'stage_id' in vals:
            vals['date_last_stage_update'] = fields.Datetime.now()
            vals.update(
                self._onchange_stage_id_internal(
                    vals.get('stage_id'))['value'])
            for applicant in self:
                vals['last_stage_id'] = applicant.stage_id.id
                res = super(Applicant, self).write(vals)
        else:
            res = super(Applicant, self).write(vals)
        return res

    @api.model
    def get_empty_list_help(self, help):
        return super(
            Applicant,
            self.with_context(
                empty_list_help_model='hr.job',
                empty_list_help_id=self.env.context.get('default_job_id'),
                empty_list_help_document_name=_(
                    "job applicants"))).get_empty_list_help(help)

    @api.multi
    def action_get_created_employee(self):
        self.ensure_one()
        action = self.env['ir.actions.act_window'].for_xml_id(
            'hr', 'open_view_employee_list')
        action['res_id'] = self.mapped('emp_id').ids[0]
        return action

    @api.multi
    def action_makeMeeting(self):
        """ This opens Meeting's calendar view to schedule meeting on current applicant
            @return: Dictionary value for created Meeting view
        """
        self.ensure_one()
        partners = self.partner_id | self.user_id.partner_id | self.department_id.manager_id.user_id.partner_id

        category = self.env.ref('hr_recruitment.categ_meet_interview')
        res = self.env['ir.actions.act_window'].for_xml_id(
            'calendar', 'action_calendar_event')
        res['context'] = {
            'search_default_partner_ids': self.partner_id.name,
            'default_partner_ids': partners.ids,
            'default_user_id': self.env.uid,
            'default_name': self.name,
            'default_categ_ids': category and [category.id] or False,
        }
        return res

    @api.multi
    def action_get_attachment_tree_view(self):
        attachment_action = self.env.ref('base.action_attachment')
        action = attachment_action.read()[0]
        action['context'] = {
            'default_res_model': self._name,
            'default_res_id': self.ids[0]
        }
        action['domain'] = str(
            ['&', ('res_model', '=', self._name), ('res_id', 'in', self.ids)])
        action['search_view_id'] = (self.env.ref(
            'hr_recruitment.ir_attachment_view_search_inherit_hr_recruitment').
                                    id, )
        return action

    @api.multi
    def _track_template(self, tracking):
        res = super(Applicant, self)._track_template(tracking)
        applicant = self[0]
        changes, dummy = tracking[applicant.id]
        if 'stage_id' in changes and applicant.stage_id.template_id:
            res['stage_id'] = (applicant.stage_id.template_id, {
                'composition_mode': 'mass_mail'
            })
        return res

    @api.multi
    def _track_subtype(self, init_values):
        record = self[0]
        if 'emp_id' in init_values and record.emp_id and record.emp_id.active:
            return 'hr_recruitment.mt_applicant_hired'
        elif 'stage_id' in init_values and record.stage_id and record.stage_id.sequence <= 1:
            return 'hr_recruitment.mt_applicant_new'
        elif 'stage_id' in init_values and record.stage_id and record.stage_id.sequence > 1:
            return 'hr_recruitment.mt_applicant_stage_changed'
        return super(Applicant, self)._track_subtype(init_values)

    @api.model
    def message_get_reply_to(self, ids, default=None):
        """ Override to get the reply_to of the parent project. """
        applicants = self.sudo().browse(ids)
        aliases = self.env['hr.job'].message_get_reply_to(
            applicants.mapped('job_id').ids, default=default)
        return dict(
            (applicant.id,
             aliases.get(applicant.job_id and applicant.job_id.id or 0, False))
            for applicant in applicants)

    @api.multi
    def message_get_suggested_recipients(self):
        recipients = super(Applicant, self).message_get_suggested_recipients()
        for applicant in self:
            if applicant.partner_id:
                applicant._message_add_suggested_recipient(
                    recipients,
                    partner=applicant.partner_id,
                    reason=_('Contact'))
            elif applicant.email_from:
                applicant._message_add_suggested_recipient(
                    recipients,
                    email=applicant.email_from,
                    reason=_('Contact Email'))
        return recipients

    @api.model
    def message_new(self, msg, custom_values=None):
        """ Overrides mail_thread message_new that is called by the mailgateway
            through message_process.
            This override updates the document according to the email.
        """
        # remove default author when going through the mail gateway. Indeed we
        # do not want to explicitly set user_id to False; however we do not
        # want the gateway user to be responsible if no other responsible is
        # found.
        self = self.with_context(default_user_id=False)
        val = msg.get('from').split('<')[0]
        defaults = {
            'name': msg.get('subject') or _("No Subject"),
            'partner_name': val,
            'email_from': msg.get('from'),
            'email_cc': msg.get('cc'),
            'partner_id': msg.get('author_id', False),
        }
        if msg.get('priority'):
            defaults['priority'] = msg.get('priority')
        if custom_values:
            defaults.update(custom_values)
        return super(Applicant, self).message_new(msg, custom_values=defaults)

    def _message_post_after_hook(self, message):
        if self.email_from and not self.partner_id:
            # we consider that posting a message with a specified recipient (not a follower, a specific one)
            # on a document without customer means that it was created through the chatter using
            # suggested recipients. This heuristic allows to avoid ugly hacks in JS.
            new_partner = message.partner_ids.filtered(
                lambda partner: partner.email == self.email_from)
            if new_partner:
                self.search([('partner_id', '=', False),
                             ('email_from', '=', new_partner.email),
                             ('stage_id.fold', '=', False)
                             ]).write({'partner_id': new_partner.id})
        return super(Applicant, self)._message_post_after_hook(message)

    @api.multi
    def create_employee_from_applicant(self):
        """ Create an hr.employee from the hr.applicants """
        employee = False
        for applicant in self:
            contact_name = False
            if applicant.partner_id:
                address_id = applicant.partner_id.address_get(['contact'
                                                               ])['contact']
                contact_name = applicant.partner_id.name_get()[0][1]
            else:
                new_partner_id = self.env['res.partner'].create({
                    'is_company':
                    False,
                    'name':
                    applicant.partner_name,
                    'email':
                    applicant.email_from,
                    'phone':
                    applicant.partner_phone,
                    'mobile':
                    applicant.partner_mobile
                })
                address_id = new_partner_id.address_get(['contact'])['contact']
            if applicant.job_id and (applicant.partner_name or contact_name):
                applicant.job_id.write({
                    'no_of_hired_employee':
                    applicant.job_id.no_of_hired_employee + 1
                })
                employee = self.env['hr.employee'].create({
                    'name':
                    applicant.partner_name or contact_name,
                    'job_id':
                    applicant.job_id.id,
                    'address_home_id':
                    address_id,
                    'department_id':
                    applicant.department_id.id or False,
                    'address_id':
                    applicant.company_id and applicant.company_id.partner_id
                    and applicant.company_id.partner_id.id or False,
                    'work_email':
                    applicant.department_id
                    and applicant.department_id.company_id
                    and applicant.department_id.company_id.email or False,
                    'work_phone':
                    applicant.department_id
                    and applicant.department_id.company_id
                    and applicant.department_id.company_id.phone or False
                })
                applicant.write({'emp_id': employee.id})
                applicant.job_id.message_post(
                    body=_('New Employee %s Hired') % applicant.partner_name
                    if applicant.partner_name else applicant.name,
                    subtype="hr_recruitment.mt_job_applicant_hired")
                employee._broadcast_welcome()
            else:
                raise UserError(
                    _('You must define an Applied Job and a Contact Name for this applicant.'
                      ))

        employee_action = self.env.ref('hr.open_view_employee_list')
        dict_act_window = employee_action.read([])[0]
        if employee:
            dict_act_window['res_id'] = employee.id
        dict_act_window['view_mode'] = 'form,tree'
        return dict_act_window

    @api.multi
    def archive_applicant(self):
        self.write({'active': False})

    @api.multi
    def reset_applicant(self):
        """ Reinsert the applicant into the recruitment pipe in the first stage"""
        default_stage_id = self._default_stage_id()
        self.write({'active': True, 'stage_id': default_stage_id})
Example #11
0
class AeatSiiResult(models.Model):
    _name = 'aeat.sii.result'

    TYPE = [('normal', 'Normal'), ('recc', 'RECC')]

    csv = fields.Char(string='CSV')
    vat_presenter = fields.Char(string='Vat Presenter')
    timestamp_presentation = fields.Datetime(string='Timestamp Presentation')
    id_version_sii = fields.Char(string='ID Version SII')
    name = fields.Char(string='Company Name')
    vat_agent = fields.Char(string='Vat Agent')
    vat = fields.Char(string='Vat')
    type_communication = fields.Char(string='Type Communication')
    sent_state = fields.Char(string='Sent State')
    vat_emitting = fields.Char(string='Vat Emitting')
    country_code = fields.Char(string='Country Code')
    type_id = fields.Char(string='Type ID')
    number_id = fields.Char(string='ID')
    serial_number = fields.Char(string='Serial number')
    serial_number_resume = fields.Char(string='Serial number end resume')
    date = fields.Date(string='Date Invoice')
    registry_state = fields.Char(string='Registry State')
    registry_error_code = fields.Char(string='Registry Error Code')
    registry_error_description = fields.Char(
        string='Registry Error Description')
    registry_csv = fields.Char(string='CSV')
    inv_type = fields.Selection(TYPE, 'Type')
    invoice_id = fields.Many2one(comodel_name='account.invoice',
                                 string='Invoice')

    _order = 'id desc'

    def _prepare_vals(self, model_id, res, inv_type, fault, model):
        vals = {
            'csv': False,
            'vat_presenter': False,
            'timestamp_presentation': False,
            'id_version_sii': False,
            'name': False,
            'vat_agent': False,
            'vat': False,
            'type_communication': False,
            'sent_state': False,
            'vat_emitting': False,
            'country_code': False,
            'type_id': False,
            'number_id': False,
            'serial_number': False,
            'serial_number_resume': False,
            'date': False,
            'registry_state': False,
            'registry_error_code': False,
            'registry_error_description': False,
            'registry_csv': False,
            'inv_type': inv_type,
        }
        if model == 'account.invoice':
            vals['invoice_id'] = model_id.id
        if fault:
            vals['registry_error_description'] = fault
        else:
            if 'CSV' in res:
                vals['csv'] = res['CSV']
            if 'DatosPresentacion' in res and res['DatosPresentacion']:
                if 'NIFPresentador' in res['DatosPresentacion']:
                    vals['vat_presenter'] = res['DatosPresentacion'][
                        'NIFPresentador']
                if 'TimestampPresentacion' in res['DatosPresentacion']:
                    date = datetime.datetime.strptime(
                        res['DatosPresentacion']['TimestampPresentacion'],
                        '%d-%m-%Y %H:%M:%S')
                    new_date = datetime.datetime.strftime(
                        date, '%Y-%m-%d %H:%M:%S')
                    vals['timestamp_presentation'] = new_date
            if 'Cabecera' in res:
                if 'IDVersionSii' in res['Cabecera']:
                    vals['id_version_sii'] = res['Cabecera']['IDVersionSii']
                if 'Titular' in res['Cabecera']:
                    if 'NombreRazon' in res['Cabecera']['Titular']:
                        vals['name'] = res['Cabecera']['Titular'][
                            'NombreRazon']
                    if 'NIFRepresentante' in res['Cabecera']['Titular']:
                        vals['vat_agent'] = res['Cabecera']['Titular'][
                            'NIFRepresentante']
                    if 'NIF' in res['Cabecera']['Titular']:
                        vals['vat'] = res['Cabecera']['Titular']['NIF']
                if 'TipoComunicacion' in res['Cabecera']:
                    vals['type_communication'] = res['Cabecera'][
                        'TipoComunicacion']
            if 'EstadoEnvio' in res:
                vals['sent_state'] = res['EstadoEnvio']
            if 'RespuestaLinea' in res:
                reply = res['RespuestaLinea'][0]
                if 'IDFactura' in reply:
                    if 'IDEmisorFactura' in reply['IDFactura']:
                        if 'NIF' in reply['IDFactura']['IDEmisorFactura']:
                            vals['vat_emitting'] = \
                                reply['IDFactura']['IDEmisorFactura']['NIF']
                        if 'IDOtro' in reply['IDFactura']['IDEmisorFactura']:
                            if reply['IDFactura']['IDEmisorFactura']['IDOtro']:
                                if 'CodigoPais' in reply['IDFactura'][
                                        'IDEmisorFactura']['IDOtro']:
                                    vals['country_code'] = reply['IDFactura'][
                                        'IDEmisorFactura']['IDOtro'][
                                            'CodigoPais']
                                if 'IDType' in reply['IDFactura'][
                                        'IDEmisorFactura']['IDOtro']:
                                    vals['type_id'] = reply['IDFactura'][
                                        'IDEmisorFactura']['IDOtro']['IDType']
                                if 'ID' in reply['IDFactura'][
                                        'IDEmisorFactura']['IDOtro']:
                                    vals['number_id'] = reply['IDFactura'][
                                        'IDEmisorFactura']['IDOtro']['ID']
                    if 'NumSerieFacturaEmisor' in reply['IDFactura']:
                        vals['serial_number'] = \
                            reply['IDFactura']['NumSerieFacturaEmisor']
                    if 'NumSerieFacturaEmisorResumenFin' in reply['IDFactura']:
                        vals['serial_number_resume'] = \
                            reply['IDFactura'][
                                'NumSerieFacturaEmisorResumenFin']
                    if 'FechaExpedicionFacturaEmisor' in reply['IDFactura']:
                        date = datetime.datetime.strptime(
                            reply['IDFactura']['FechaExpedicionFacturaEmisor'],
                            '%d-%m-%Y')
                        new_date = datetime.datetime.strftime(date, '%Y-%m-%d')
                        vals['date'] = new_date
                if 'EstadoRegistro' in reply:
                    vals['registry_state'] = reply['EstadoRegistro']
                if 'CodigoErrorRegistro' in reply:
                    vals['registry_error_code'] = reply['CodigoErrorRegistro']
                if 'DescripcionErrorRegistro' in reply:
                    vals['registry_error_description'] = \
                        reply['DescripcionErrorRegistro']
                if 'CSV' in reply:
                    vals['registry_csv'] = reply['CSV']
        return vals

    def create_result(self, model_id, res, inv_type, fault, model):
        vals = self._prepare_vals(model_id, res, inv_type, fault, model)
        self.create(vals)
Example #12
0
class MrpProductionRequest(models.Model):
    _name = "mrp.production.request"
    _description = "Manufacturing Request"
    _inherit = "mail.thread"
    _order = "date_planned_start desc, id desc"

    @api.model
    def _company_get(self):
        company_id = self.env['res.company']._company_default_get(self._name)
        return self.env['res.company'].browse(company_id.id)

    @api.model
    def _get_default_requested_by(self):
        return self.env.user

    name = fields.Char(default="/",
                       required=True,
                       readonly=True,
                       states={'draft': [('readonly', False)]})
    origin = fields.Char(string='Source Document',
                         readonly=True,
                         states={'draft': [('readonly', False)]})
    requested_by = fields.Many2one(comodel_name='res.users',
                                   string='Requested by',
                                   default=_get_default_requested_by,
                                   required=True,
                                   track_visibility='onchange',
                                   readonly=True,
                                   states={'draft': [('readonly', False)]})
    assigned_to = fields.Many2one(
        comodel_name='res.users',
        string='Approver',
        track_visibility='onchange',
        readonly=True,
        states={'draft': [('readonly', False)]},
        domain=lambda self: [('groups_id', 'in',
                              self.env.ref(
                                  'mrp_production_request.'
                                  'group_mrp_production_request_manager').id)])
    description = fields.Text('Description')
    date_planned_start = fields.Datetime(
        'Deadline Start',
        copy=False,
        default=fields.Datetime.now,
        index=True,
        required=True,
        states={'confirmed': [('readonly', False)]},
        oldname="date_planned")
    date_planned_finished = fields.Datetime(
        'Deadline End',
        copy=False,
        default=fields.Datetime.now,
        index=True,
        states={'confirmed': [('readonly', False)]})
    company_id = fields.Many2one(comodel_name='res.company',
                                 string='Company',
                                 required=True,
                                 default=_company_get)
    mrp_production_ids = fields.One2many(
        comodel_name="mrp.production",
        string="Manufacturing Orders",
        inverse_name="mrp_production_request_id",
        readonly=True)
    mrp_production_count = fields.Integer(
        compute="_compute_mrp_production_count",
        string="MO's Count",
    )
    state = fields.Selection(selection=[("draft", "Draft"),
                                        ("to_approve", "To Be Approved"),
                                        ("approved", "Approved"),
                                        ("done", "Done"),
                                        ("cancel", "Cancelled")],
                             index=True,
                             track_visibility='onchange',
                             required=True,
                             copy=False,
                             default='draft')
    procurement_group_id = fields.Many2one(string='Procurement Group',
                                           comodel_name='procurement.group',
                                           copy=False)
    propagate = fields.Boolean(
        'Propagate cancel and split',
        help='If checked, when the previous move of the move '
        '(which was generated by a next procurement) is cancelled '
        'or split, the move generated by this move will too')
    product_id = fields.Many2one(comodel_name="product.product",
                                 string="Product",
                                 required=True,
                                 domain=[('type', 'in', ['product', 'consu'])],
                                 track_visibility="onchange",
                                 readonly=True,
                                 states={'draft': [('readonly', False)]})
    product_tmpl_id = fields.Many2one(comodel_name='product.template',
                                      string='Product Template',
                                      related='product_id.product_tmpl_id')
    product_qty = fields.Float(
        string="Required Quantity",
        required=True,
        track_visibility='onchange',
        digits=dp.get_precision('Product Unit of Measure'),
        default=1.0,
        readonly=True,
        states={'draft': [('readonly', False)]})
    product_uom_id = fields.Many2one(
        comodel_name='product.uom',
        string='Unit of Measure',
        readonly=True,
        states={'draft': [('readonly', False)]},
        domain="[('category_id', '=', category_uom_id)]")
    category_uom_id = fields.Many2one(related="product_uom_id.category_id")
    manufactured_qty = fields.Float(
        string="Quantity in Manufacturing Orders",
        compute="_compute_manufactured_qty",
        store=True,
        readonly=True,
        digits=dp.get_precision('Product Unit of Measure'),
        help="Sum of the quantities in Manufacturing Orders (in any state).")
    done_qty = fields.Float(
        string="Quantity Done",
        store=True,
        readonly=True,
        compute="_compute_manufactured_qty",
        digits=dp.get_precision('Product Unit of Measure'),
        help="Sum of the quantities in all done Manufacturing Orders.")
    pending_qty = fields.Float(
        string="Pending Quantity",
        compute="_compute_manufactured_qty",
        store=True,
        digits=dp.get_precision('Product Unit of Measure'),
        readonly=True,
        help="Quantity pending to add to Manufacturing Orders "
        "to fulfill the Manufacturing Request requirement.")
    bom_id = fields.Many2one(comodel_name="mrp.bom",
                             string="Bill of Materials",
                             required=True,
                             readonly=True,
                             states={'draft': [('readonly', False)]})
    routing_id = fields.Many2one(
        comodel_name='mrp.routing',
        string='Routing',
        on_delete='setnull',
        readonly=True,
        states={'draft': [('readonly', False)]},
        help="The list of operations (list of work centers) to produce "
        "the finished product. The routing is mainly used to compute "
        "work center costs during operations and to plan future loads "
        "on work centers based on production plannification.")
    location_src_id = fields.Many2one(
        comodel_name='stock.location',
        string='Raw Materials Location',
        default=lambda self: self.env['stock.location'].browse(self.env[
            'mrp.production']._get_default_location_src_id()),
        required=True,
        readonly=True,
        states={'draft': [('readonly', False)]})
    location_dest_id = fields.Many2one(
        comodel_name='stock.location',
        string='Finished Products Location',
        default=lambda self: self.env['stock.location'].browse(self.env[
            'mrp.production']._get_default_location_dest_id()),
        required=True,
        readonly=True,
        states={'draft': [('readonly', False)]})
    picking_type_id = fields.Many2one(
        comodel_name='stock.picking.type',
        string='Picking Type',
        default=lambda self: self.env['stock.picking.type'].browse(self.env[
            'mrp.production']._get_default_picking_type()),
        required=True,
        readonly=True,
        states={'draft': [('readonly', False)]})
    move_dest_ids = fields.One2many(
        comodel_name='stock.move',
        inverse_name='created_mrp_production_request_id',
        string="Stock Movements of Produced Goods")
    orderpoint_id = fields.Many2one(comodel_name='stock.warehouse.orderpoint',
                                    string='Orderpoint')

    _sql_constraints = [
        ('name_uniq', 'unique(name, company_id)',
         'Reference must be unique per Company!'),
    ]

    @api.model
    def _get_mo_valid_states(self):
        return ['planned', 'confirmed', 'progress', 'done']

    @api.multi
    @api.depends('mrp_production_ids', 'mrp_production_ids.state', 'state')
    def _compute_manufactured_qty(self):
        valid_states = self._get_mo_valid_states()
        for req in self:
            done_mo = req.mrp_production_ids.filtered(
                lambda mo: mo.state in 'done').mapped('product_qty')
            req.done_qty = sum(done_mo)
            valid_mo = req.mrp_production_ids.filtered(
                lambda mo: mo.state in valid_states).mapped('product_qty')
            req.manufactured_qty = sum(valid_mo)
            req.pending_qty = max(req.product_qty - req.manufactured_qty, 0.0)

    @api.multi
    def _compute_mrp_production_count(self):
        for rec in self:
            rec.mrp_production_count = len(rec.mrp_production_ids)

    @api.onchange('product_id')
    def _onchange_product_id(self):
        self.product_uom_id = self.product_id.uom_id
        self.bom_id = self.env['mrp.bom']._bom_find(
            product=self.product_id,
            company_id=self.company_id.id,
            picking_type=self.picking_type_id)

    @api.multi
    def _subscribe_assigned_user(self, vals):
        self.ensure_one()
        if vals.get('assigned_to'):
            self.message_subscribe_users(user_ids=[self.assigned_to.id])

    @api.model
    def _create_sequence(self, vals):
        if not vals.get('name') or vals.get('name') == '/':
            vals['name'] = self.env['ir.sequence'].next_by_code(
                'mrp.production.request') or '/'
        return vals

    @api.model
    def create(self, vals):
        """Add sequence if name is not defined and subscribe to the thread
        the user assigned to the request."""
        vals = self._create_sequence(vals)
        res = super(MrpProductionRequest, self).create(vals)
        res._subscribe_assigned_user(vals)
        return res

    @api.multi
    def write(self, vals):
        res = super(MrpProductionRequest, self).write(vals)
        for request in self:
            request._subscribe_assigned_user(vals)
        return res

    @api.multi
    def button_to_approve(self):
        self.write({'state': 'to_approve'})
        return True

    @api.multi
    def button_approved(self):
        self.write({'state': 'approved'})
        return True

    @api.multi
    def button_done(self):
        self.write({'state': 'done'})
        return True

    @api.multi
    def _check_reset_allowed(self):
        if any([
                s in self._get_mo_valid_states()
                for s in self.mapped('mrp_production_ids.state')
        ]):
            raise UserError(
                _("You cannot reset a manufacturing request if the related "
                  "manufacturing orders are not cancelled."))

    @api.multi
    def button_draft(self):
        self._check_reset_allowed()
        self.write({'state': 'draft'})
        return True

    @api.multi
    def _check_cancel_allowed(self):
        if any([s == 'done' for s in self.mapped('state')]):
            raise UserError(
                _('You cannot reject a manufacturing request related to '
                  'done procurement orders.'))

    @api.multi
    def button_cancel(self):
        self._check_cancel_allowed()
        self.write({'state': 'cancel'})
        self.mapped('move_dest_ids').filtered(
            lambda r: r.state != 'cancel')._action_cancel()
        return True

    @api.multi
    def action_view_mrp_productions(self):
        action = self.env.ref('mrp.mrp_production_action')
        result = action.read()[0]
        result['context'] = {}
        mos = self.mapped('mrp_production_ids')
        # choose the view_mode accordingly
        if len(mos) != 1:
            result['domain'] = [('id', 'in', mos.ids)]
        elif len(mos) == 1:
            form = self.env.ref('mrp.mrp_production_form_view', False)
            result['views'] = [(form and form.id or False, 'form')]
            result['res_id'] = mos[0].id
        return result
Example #13
0
class MrpWorkorder(models.Model):
    _name = 'mrp.workorder'
    _description = 'Work Order'
    _inherit = ['mail.thread']

    name = fields.Char('Work Order',
                       required=True,
                       states={
                           'done': [('readonly', True)],
                           'cancel': [('readonly', True)]
                       })

    workcenter_id = fields.Many2one('mrp.workcenter',
                                    'Work Center',
                                    required=True,
                                    states={
                                        'done': [('readonly', True)],
                                        'cancel': [('readonly', True)]
                                    })
    working_state = fields.Selection('Workcenter Status',
                                     related='workcenter_id.working_state',
                                     help='Technical: used in views only')

    production_id = fields.Many2one('mrp.production',
                                    'Manufacturing Order',
                                    index=True,
                                    ondelete='cascade',
                                    required=True,
                                    track_visibility='onchange',
                                    states={
                                        'done': [('readonly', True)],
                                        'cancel': [('readonly', True)]
                                    })
    product_id = fields.Many2one('product.product',
                                 'Product',
                                 related='production_id.product_id',
                                 readonly=True,
                                 help='Technical: used in views only.',
                                 store=True)
    product_uom_id = fields.Many2one('product.uom',
                                     'Unit of Measure',
                                     related='production_id.product_uom_id',
                                     readonly=True,
                                     help='Technical: used in views only.')
    production_availability = fields.Selection(
        'Stock Availability',
        readonly=True,
        related='production_id.availability',
        store=True,
        help='Technical: used in views and domains only.')
    production_state = fields.Selection('Production State',
                                        readonly=True,
                                        related='production_id.state',
                                        help='Technical: used in views only.')
    product_tracking = fields.Selection(
        'Product Tracking',
        related='production_id.product_id.tracking',
        help='Technical: used in views only.')
    qty_production = fields.Float('Original Production Quantity',
                                  readonly=True,
                                  related='production_id.product_qty')
    qty_remaining = fields.Float(
        'Quantity To Be Produced',
        compute='_compute_qty_remaining',
        digits=dp.get_precision('Product Unit of Measure'))
    qty_produced = fields.Float(
        'Quantity',
        default=0.0,
        readonly=True,
        digits=dp.get_precision('Product Unit of Measure'),
        help="The number of products already handled by this work order")
    qty_producing = fields.Float(
        'Currently Produced Quantity',
        default=1.0,
        digits=dp.get_precision('Product Unit of Measure'),
        states={
            'done': [('readonly', True)],
            'cancel': [('readonly', True)]
        })
    is_produced = fields.Boolean(string="Has Been Produced",
                                 compute='_compute_is_produced')

    state = fields.Selection([('pending', 'Pending'), ('ready', 'Ready'),
                              ('progress', 'In Progress'),
                              ('done', 'Finished'), ('cancel', 'Cancelled')],
                             string='Status',
                             default='pending')
    date_planned_start = fields.Datetime('Scheduled Date Start',
                                         states={
                                             'done': [('readonly', True)],
                                             'cancel': [('readonly', True)]
                                         })
    date_planned_finished = fields.Datetime('Scheduled Date Finished',
                                            states={
                                                'done': [('readonly', True)],
                                                'cancel': [('readonly', True)]
                                            })
    date_start = fields.Datetime('Effective Start Date',
                                 states={
                                     'done': [('readonly', True)],
                                     'cancel': [('readonly', True)]
                                 })
    date_finished = fields.Datetime('Effective End Date',
                                    states={
                                        'done': [('readonly', True)],
                                        'cancel': [('readonly', True)]
                                    })

    duration_expected = fields.Float('Expected Duration',
                                     digits=(16, 2),
                                     states={
                                         'done': [('readonly', True)],
                                         'cancel': [('readonly', True)]
                                     },
                                     help="Expected duration (in minutes)")
    duration = fields.Float('Real Duration',
                            compute='_compute_duration',
                            readonly=True,
                            store=True)
    duration_unit = fields.Float('Duration Per Unit',
                                 compute='_compute_duration',
                                 readonly=True,
                                 store=True)
    duration_percent = fields.Integer('Duration Deviation (%)',
                                      compute='_compute_duration',
                                      group_operator="avg",
                                      readonly=True,
                                      store=True)

    operation_id = fields.Many2one(
        'mrp.routing.workcenter', 'Operation'
    )  # Should be used differently as BoM can change in the meantime
    worksheet = fields.Binary('Worksheet',
                              related='operation_id.worksheet',
                              readonly=True)
    move_raw_ids = fields.One2many('stock.move', 'workorder_id', 'Moves')
    move_line_ids = fields.One2many(
        'stock.move.line',
        'workorder_id',
        'Moves to Track',
        domain=[('done_wo', '=', True)],
        help=
        "Inventory moves for which you must scan a lot number at this work order"
    )
    active_move_line_ids = fields.One2many('stock.move.line',
                                           'workorder_id',
                                           domain=[('done_wo', '=', False)])
    final_lot_id = fields.Many2one('stock.production.lot',
                                   'Lot/Serial Number',
                                   domain="[('product_id', '=', product_id)]",
                                   states={
                                       'done': [('readonly', True)],
                                       'cancel': [('readonly', True)]
                                   })
    tracking = fields.Selection(related='production_id.product_id.tracking')
    time_ids = fields.One2many('mrp.workcenter.productivity', 'workorder_id')
    is_user_working = fields.Boolean(
        'Is the Current User Working',
        compute='_compute_is_user_working',
        help="Technical field indicating whether the current user is working. "
    )
    production_messages = fields.Html('Workorder Message',
                                      compute='_compute_production_messages')

    next_work_order_id = fields.Many2one('mrp.workorder', "Next Work Order")
    scrap_ids = fields.One2many('stock.scrap', 'workorder_id')
    scrap_count = fields.Integer(compute='_compute_scrap_move_count',
                                 string='Scrap Move')
    production_date = fields.Datetime(
        'Production Date',
        related='production_id.date_planned_start',
        store=True)
    color = fields.Integer('Color', compute='_compute_color')
    capacity = fields.Float(
        'Capacity',
        default=1.0,
        help="Number of pieces that can be produced in parallel.")

    @api.multi
    def name_get(self):
        return [(wo.id, "%s - %s - %s" %
                 (wo.production_id.name, wo.product_id.name, wo.name))
                for wo in self]

    @api.one
    @api.depends('production_id.product_qty', 'qty_produced')
    def _compute_is_produced(self):
        rounding = self.production_id.product_uom_id.rounding
        self.is_produced = float_compare(self.qty_produced,
                                         self.production_id.product_qty,
                                         precision_rounding=rounding) >= 0

    @api.one
    @api.depends('time_ids.duration', 'qty_produced')
    def _compute_duration(self):
        self.duration = sum(self.time_ids.mapped('duration'))
        self.duration_unit = round(self.duration / max(self.qty_produced, 1),
                                   2)  # rounding 2 because it is a time
        if self.duration_expected:
            self.duration_percent = 100 * (
                self.duration_expected -
                self.duration) / self.duration_expected
        else:
            self.duration_percent = 0

    def _compute_is_user_working(self):
        """ Checks whether the current user is working """
        for order in self:
            if order.time_ids.filtered(
                    lambda x: (x.user_id.id == self.env.user.id) and
                (not x.date_end) and (x.loss_type in
                                      ('productive', 'performance'))):
                order.is_user_working = True
            else:
                order.is_user_working = False

    @api.depends('production_id', 'workcenter_id', 'production_id.bom_id')
    def _compute_production_messages(self):
        ProductionMessage = self.env['mrp.message']
        for workorder in self:
            domain = [('valid_until', '>=', fields.Date.today()), '|',
                      ('workcenter_id', '=', False),
                      ('workcenter_id', '=', workorder.workcenter_id.id),
                      '|', '|', '|',
                      ('product_id', '=', workorder.product_id.id), '&',
                      ('product_id', '=', False),
                      ('product_tmpl_id', '=',
                       workorder.product_id.product_tmpl_id.id),
                      ('bom_id', '=', workorder.production_id.bom_id.id),
                      ('routing_id', '=',
                       workorder.operation_id.routing_id.id)]
            messages = ProductionMessage.search(domain).mapped('message')
            workorder.production_messages = "<br/>".join(messages) or False

    @api.multi
    def _compute_scrap_move_count(self):
        data = self.env['stock.scrap'].read_group(
            [('workorder_id', 'in', self.ids)], ['workorder_id'],
            ['workorder_id'])
        count_data = dict((item['workorder_id'][0], item['workorder_id_count'])
                          for item in data)
        for workorder in self:
            workorder.scrap_count = count_data.get(workorder.id, 0)

    @api.multi
    @api.depends('date_planned_finished',
                 'production_id.date_planned_finished')
    def _compute_color(self):
        late_orders = self.filtered(
            lambda x: x.production_id.date_planned_finished and x.
            date_planned_finished > x.production_id.date_planned_finished)
        for order in late_orders:
            order.color = 4
        for order in (self - late_orders):
            order.color = 2

    @api.onchange('qty_producing')
    def _onchange_qty_producing(self):
        """ Update stock.move.lot records, according to the new qty currently
        produced. """
        moves = self.move_raw_ids.filtered(
            lambda move: move.state not in
            ('done', 'cancel') and move.product_id.tracking != 'none' and move.
            product_id.id != self.production_id.product_id.id)
        for move in moves:
            move_lots = self.active_move_line_ids.filtered(
                lambda move_lot: move_lot.move_id == move)
            if not move_lots:
                continue
            rounding = move.product_uom.rounding
            new_qty = float_round(move.unit_factor * self.qty_producing,
                                  precision_rounding=rounding)
            if move.product_id.tracking == 'lot':
                move_lots[0].product_qty = new_qty
                move_lots[0].qty_done = new_qty
            elif move.product_id.tracking == 'serial':
                # Create extra pseudo record
                qty_todo = float_round(new_qty -
                                       sum(move_lots.mapped('qty_done')),
                                       precision_rounding=rounding)
                if float_compare(qty_todo, 0.0,
                                 precision_rounding=rounding) > 0:
                    while float_compare(
                            qty_todo, 0.0, precision_rounding=rounding) > 0:
                        self.active_move_line_ids += self.env[
                            'stock.move.line'].new({
                                'move_id':
                                move.id,
                                'product_id':
                                move.product_id.id,
                                'lot_id':
                                False,
                                'product_uom_qty':
                                0.0,
                                'product_uom_id':
                                move.product_uom.id,
                                'qty_done':
                                min(1.0, qty_todo),
                                'workorder_id':
                                self.id,
                                'done_wo':
                                False,
                                'location_id':
                                move.location_id.id,
                                'location_dest_id':
                                move.location_dest_id.id,
                            })
                        qty_todo -= 1
                elif float_compare(qty_todo, 0.0,
                                   precision_rounding=rounding) < 0:
                    qty_todo = abs(qty_todo)
                    for move_lot in move_lots:
                        if float_compare(
                                qty_todo, 0, precision_rounding=rounding) <= 0:
                            break
                        if not move_lot.lot_id and float_compare(
                                qty_todo,
                                move_lot.qty_done,
                                precision_rounding=rounding) >= 0:
                            qty_todo = float_round(qty_todo -
                                                   move_lot.qty_done,
                                                   precision_rounding=rounding)
                            self.active_move_line_ids -= move_lot  # Difference operator
                        else:
                            #move_lot.product_qty = move_lot.product_qty - qty_todo
                            if float_compare(move_lot.qty_done - qty_todo,
                                             0,
                                             precision_rounding=rounding) == 1:
                                move_lot.qty_done = move_lot.qty_done - qty_todo
                            else:
                                move_lot.qty_done = 0
                            qty_todo = 0

    @api.multi
    def write(self, values):
        if ('date_planned_start' in values or 'date_planned_finished'
                in values) and any(workorder.state == 'done'
                                   for workorder in self):
            raise UserError(_('You can not change the finished work order.'))
        return super(MrpWorkorder, self).write(values)

    def _generate_lot_ids(self):
        """ Generate stock move lines """
        self.ensure_one()
        MoveLine = self.env['stock.move.line']
        tracked_moves = self.move_raw_ids.filtered(
            lambda move: move.state not in
            ('done', 'cancel') and move.product_id.tracking != 'none' and move.
            product_id != self.production_id.product_id and move.bom_line_id)
        for move in tracked_moves:
            qty = move.unit_factor * self.qty_producing
            if move.product_id.tracking == 'serial':
                while float_compare(
                        qty, 0.0,
                        precision_rounding=move.product_uom.rounding) > 0:
                    MoveLine.create({
                        'move_id':
                        move.id,
                        'product_uom_qty':
                        0,
                        'product_uom_id':
                        move.product_uom.id,
                        'qty_done':
                        min(1, qty),
                        'production_id':
                        self.production_id.id,
                        'workorder_id':
                        self.id,
                        'product_id':
                        move.product_id.id,
                        'done_wo':
                        False,
                        'location_id':
                        move.location_id.id,
                        'location_dest_id':
                        move.location_dest_id.id,
                    })
                    qty -= 1
            else:
                MoveLine.create({
                    'move_id': move.id,
                    'product_uom_qty': 0,
                    'product_uom_id': move.product_uom.id,
                    'qty_done': qty,
                    'product_id': move.product_id.id,
                    'production_id': self.production_id.id,
                    'workorder_id': self.id,
                    'done_wo': False,
                    'location_id': move.location_id.id,
                    'location_dest_id': move.location_dest_id.id,
                })

    def _assign_default_final_lot_id(self):
        self.final_lot_id = self.env['stock.production.lot'].search(
            [('use_next_on_work_order_id', '=', self.id)],
            order='create_date, id',
            limit=1)

    @api.multi
    def record_production(self):
        self.ensure_one()
        if self.qty_producing <= 0:
            raise UserError(
                _('Please set the quantity you are currently producing. It should be different from zero.'
                  ))

        if (self.production_id.product_id.tracking !=
                'none') and not self.final_lot_id and self.move_raw_ids:
            raise UserError(
                _('You should provide a lot/serial number for the final product'
                  ))

        # Update quantities done on each raw material line
        # For each untracked component without any 'temporary' move lines,
        # (the new workorder tablet view allows registering consumed quantities for untracked components)
        # we assume that only the theoretical quantity was used
        for move in self.move_raw_ids:
            if move.has_tracking == 'none' and (move.state not in ('done', 'cancel')) and move.bom_line_id\
                        and move.unit_factor and not move.move_line_ids.filtered(lambda ml: not ml.done_wo):
                rounding = move.product_uom.rounding
                if self.product_id.tracking != 'none':
                    qty_to_add = float_round(self.qty_producing *
                                             move.unit_factor,
                                             precision_rounding=rounding)
                    move._generate_consumed_move_line(qty_to_add,
                                                      self.final_lot_id)
                else:
                    move.quantity_done += float_round(
                        self.qty_producing * move.unit_factor,
                        precision_rounding=rounding)

        # Transfer quantities from temporary to final move lots or make them final
        for move_line in self.active_move_line_ids:
            # Check if move_line already exists
            if move_line.qty_done <= 0:  # rounding...
                move_line.sudo().unlink()
                continue
            if move_line.product_id.tracking != 'none' and not move_line.lot_id:
                raise UserError(
                    _('You should provide a lot/serial number for a component')
                )
            # Search other move_line where it could be added:
            lots = self.move_line_ids.filtered(
                lambda x: (x.lot_id.id == move_line.lot_id.id) and
                (not x.lot_produced_id) and (not x.done_move) and
                (x.product_id == move_line.product_id))
            if lots:
                lots[0].qty_done += move_line.qty_done
                lots[0].lot_produced_id = self.final_lot_id.id
                move_line.sudo().unlink()
            else:
                move_line.lot_produced_id = self.final_lot_id.id
                move_line.done_wo = True

        # One a piece is produced, you can launch the next work order
        if self.next_work_order_id.state == 'pending':
            self.next_work_order_id.state = 'ready'

        self.move_line_ids.filtered(
            lambda move_line: not move_line.done_move and not move_line.
            lot_produced_id and move_line.qty_done > 0).write({
                'lot_produced_id':
                self.final_lot_id.id,
                'lot_produced_qty':
                self.qty_producing
            })

        # If last work order, then post lots used
        # TODO: should be same as checking if for every workorder something has been done?
        if not self.next_work_order_id:
            production_move = self.production_id.move_finished_ids.filtered(
                lambda x: (x.product_id.id == self.production_id.product_id.id
                           ) and (x.state not in ('done', 'cancel')))
            if production_move.has_tracking != 'none':
                move_line = production_move.move_line_ids.filtered(
                    lambda x: x.lot_id.id == self.final_lot_id.id)
                if move_line:
                    move_line.product_uom_qty += self.qty_producing
                else:
                    move_line.create({
                        'move_id':
                        production_move.id,
                        'product_id':
                        production_move.product_id.id,
                        'lot_id':
                        self.final_lot_id.id,
                        'product_uom_qty':
                        self.qty_producing,
                        'product_uom_id':
                        production_move.product_uom.id,
                        'qty_done':
                        self.qty_producing,
                        'workorder_id':
                        self.id,
                        'location_id':
                        production_move.location_id.id,
                        'location_dest_id':
                        production_move.location_dest_id.id,
                    })
            else:
                production_move.quantity_done += self.qty_producing

        if not self.next_work_order_id:
            for by_product_move in self.production_id.move_finished_ids.filtered(
                    lambda x: (x.product_id.id != self.production_id.product_id
                               .id) and (x.state not in ('done', 'cancel'))):
                if by_product_move.has_tracking == 'none':
                    by_product_move.quantity_done += self.qty_producing * by_product_move.unit_factor

        # Update workorder quantity produced
        self.qty_produced += self.qty_producing

        if self.final_lot_id:
            self.final_lot_id.use_next_on_work_order_id = self.next_work_order_id
            self.final_lot_id = False

        # Set a qty producing
        rounding = self.production_id.product_uom_id.rounding
        if float_compare(self.qty_produced,
                         self.production_id.product_qty,
                         precision_rounding=rounding) >= 0:
            self.qty_producing = 0
        elif self.production_id.product_id.tracking == 'serial':
            self._assign_default_final_lot_id()
            self.qty_producing = 1.0
            self._generate_lot_ids()
        else:
            self.qty_producing = float_round(self.production_id.product_qty -
                                             self.qty_produced,
                                             precision_rounding=rounding)
            self._generate_lot_ids()

        if self.next_work_order_id and self.production_id.product_id.tracking != 'none':
            self.next_work_order_id._assign_default_final_lot_id()

        if float_compare(self.qty_produced,
                         self.production_id.product_qty,
                         precision_rounding=rounding) >= 0:
            self.button_finish()
        return True

    @api.multi
    def button_start(self):
        # TDE CLEANME
        timeline = self.env['mrp.workcenter.productivity']
        if self.duration < self.duration_expected:
            loss_id = self.env['mrp.workcenter.productivity.loss'].search(
                [('loss_type', '=', 'productive')], limit=1)
            if not len(loss_id):
                raise UserError(
                    _("You need to define at least one productivity loss in the category 'Productivity'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses."
                      ))
        else:
            loss_id = self.env['mrp.workcenter.productivity.loss'].search(
                [('loss_type', '=', 'performance')], limit=1)
            if not len(loss_id):
                raise UserError(
                    _("You need to define at least one productivity loss in the category 'Performance'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses."
                      ))
        for workorder in self:
            if workorder.production_id.state != 'progress':
                workorder.production_id.write({
                    'state': 'progress',
                    'date_start': datetime.now(),
                })
            timeline.create({
                'workorder_id':
                workorder.id,
                'workcenter_id':
                workorder.workcenter_id.id,
                'description':
                _('Time Tracking: ') + self.env.user.name,
                'loss_id':
                loss_id[0].id,
                'date_start':
                datetime.now(),
                'user_id':
                self.env.user.id
            })
        return self.write({
            'state': 'progress',
            'date_start': datetime.now(),
        })

    @api.multi
    def button_finish(self):
        self.ensure_one()
        self.end_all()
        return self.write({
            'state': 'done',
            'date_finished': fields.Datetime.now()
        })

    @api.multi
    def end_previous(self, doall=False):
        """
        @param: doall:  This will close all open time lines on the open work orders when doall = True, otherwise
        only the one of the current user
        """
        # TDE CLEANME
        timeline_obj = self.env['mrp.workcenter.productivity']
        domain = [('workorder_id', 'in', self.ids), ('date_end', '=', False)]
        if not doall:
            domain.append(('user_id', '=', self.env.user.id))
        not_productive_timelines = timeline_obj.browse()
        for timeline in timeline_obj.search(domain,
                                            limit=None if doall else 1):
            wo = timeline.workorder_id
            if wo.duration_expected <= wo.duration:
                if timeline.loss_type == 'productive':
                    not_productive_timelines += timeline
                timeline.write({'date_end': fields.Datetime.now()})
            else:
                maxdate = fields.Datetime.from_string(
                    timeline.date_start) + relativedelta(
                        minutes=wo.duration_expected - wo.duration)
                enddate = datetime.now()
                if maxdate > enddate:
                    timeline.write({'date_end': enddate})
                else:
                    timeline.write({'date_end': maxdate})
                    not_productive_timelines += timeline.copy({
                        'date_start':
                        maxdate,
                        'date_end':
                        enddate
                    })
        if not_productive_timelines:
            loss_id = self.env['mrp.workcenter.productivity.loss'].search(
                [('loss_type', '=', 'performance')], limit=1)
            if not len(loss_id):
                raise UserError(
                    _("You need to define at least one unactive productivity loss in the category 'Performance'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses."
                      ))
            not_productive_timelines.write({'loss_id': loss_id.id})
        return True

    @api.multi
    def end_all(self):
        return self.end_previous(doall=True)

    @api.multi
    def button_pending(self):
        self.end_previous()
        return True

    @api.multi
    def button_unblock(self):
        for order in self:
            order.workcenter_id.unblock()
        return True

    @api.multi
    def action_cancel(self):
        return self.write({'state': 'cancel'})

    @api.multi
    def button_done(self):
        if any([x.state in ('done', 'cancel') for x in self]):
            raise UserError(
                _('A Manufacturing Order is already done or cancelled!'))
        self.end_all()
        return self.write({'state': 'done', 'date_finished': datetime.now()})

    @api.multi
    def button_scrap(self):
        self.ensure_one()
        return {
            'name': _('Scrap'),
            'view_type': 'form',
            'view_mode': 'form',
            'res_model': 'stock.scrap',
            'view_id': self.env.ref('stock.stock_scrap_form_view2').id,
            'type': 'ir.actions.act_window',
            'context': {
                'default_workorder_id':
                self.id,
                'default_production_id':
                self.production_id.id,
                'product_ids':
                (self.production_id.move_raw_ids.filtered(
                    lambda x: x.state not in ('done', 'cancel'))
                 | self.production_id.move_finished_ids.filtered(
                     lambda x: x.state == 'done')).mapped('product_id').ids
            },
            # 'context': {'product_ids': self.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')).mapped('product_id').ids + [self.production_id.product_id.id]},
            'target': 'new',
        }

    @api.multi
    def action_see_move_scrap(self):
        self.ensure_one()
        action = self.env.ref('stock.action_stock_scrap').read()[0]
        action['domain'] = [('workorder_id', '=', self.id)]
        return action

    @api.multi
    @api.depends('qty_production', 'qty_produced')
    def _compute_qty_remaining(self):
        for wo in self:
            wo.qty_remaining = float_round(
                wo.qty_production - wo.qty_produced,
                precision_rounding=wo.production_id.product_uom_id.rounding)
Example #14
0
class SaleOrder(models.Model):
    """Add several date fields to Sales Orders, computed or user-entered"""
    _inherit = 'sale.order'

    commitment_date = fields.Datetime(
        compute='_compute_commitment_date',
        string='Commitment Date',
        store=True,
        help="Date by which the products are sure to be delivered. This is "
        "a date that you can promise to the customer, based on the "
        "Product Lead Times.")
    requested_date = fields.Datetime(
        'Requested Date',
        readonly=True,
        states={
            'draft': [('readonly', False)],
            'sent': [('readonly', False)]
        },
        copy=False,
        help="Date by which the customer has requested the items to be "
        "delivered.\n"
        "When this Order gets confirmed, the Delivery Order's "
        "expected date will be computed based on this date and the "
        "Company's Security Delay.\n"
        "Leave this field empty if you want the Delivery Order to be "
        "processed as soon as possible. In that case the expected "
        "date will be computed using the default method: based on "
        "the Product Lead Times and the Company's Security Delay.")
    effective_date = fields.Date(
        compute='_compute_picking_ids',
        string='Effective Date',
        store=True,
        help="Date on which the first Delivery Order was created.")

    @api.depends('date_order', 'order_line.customer_lead')
    def _compute_commitment_date(self):
        """Compute the commitment date"""
        for order in self:
            dates_list = []
            order_datetime = fields.Datetime.from_string(order.date_order)
            for line in order.order_line.filtered(
                    lambda x: x.state != 'cancel' and not x._is_delivery()):
                dt = order_datetime + timedelta(days=line.customer_lead or 0.0)
                dates_list.append(dt)
            if dates_list:
                commit_date = min(
                    dates_list) if order.picking_policy == 'direct' else max(
                        dates_list)
                order.commitment_date = fields.Datetime.to_string(commit_date)

    def _compute_picking_ids(self):
        super(SaleOrder, self)._compute_picking_ids()
        for order in self:
            dates_list = []
            for pick in order.picking_ids:
                dates_list.append(fields.Datetime.from_string(pick.date))
            if dates_list:
                order.effective_date = fields.Datetime.to_string(
                    min(dates_list))

    @api.onchange('requested_date')
    def onchange_requested_date(self):
        """Warn if the requested dates is sooner than the commitment date"""
        if (self.requested_date and self.commitment_date
                and self.requested_date < self.commitment_date):
            return {
                'warning': {
                    'title':
                    _('Requested date is too soon!'),
                    'message':
                    _("The date requested by the customer is "
                      "sooner than the commitment date. You may be "
                      "unable to honor the customer's request.")
                }
            }
Example #15
0
class MrpWorkcenterProductivity(models.Model):
    _name = "mrp.workcenter.productivity"
    _description = "Workcenter Productivity Log"
    _order = "id desc"
    _rec_name = "loss_id"
    _check_company_auto = True

    def _get_default_company_id(self):
        company_id = False
        if self.env.context.get('default_company_id'):
            company_id = self.env.context['default_company_id']
        if not company_id and self.env.context.get('default_workorder_id'):
            workorder = self.env['mrp.workorder'].browse(
                self.env.context['default_workorder_id'])
            company_id = workorder.company_id
        if not company_id and self.env.context.get('default_workcenter_id'):
            workcenter = self.env['mrp.workcenter'].browse(
                self.env.context['default_workcenter_id'])
            company_id = workcenter.company_id
        if not company_id:
            company_id = self.env.company
        return company_id

    production_id = fields.Many2one('mrp.production',
                                    string='Manufacturing Order',
                                    related='workorder_id.production_id',
                                    readonly='True')
    workcenter_id = fields.Many2one('mrp.workcenter',
                                    "Work Center",
                                    required=True,
                                    check_company=True)
    company_id = fields.Many2one(
        'res.company',
        required=True,
        index=True,
        default=lambda self: self._get_default_company_id())
    workorder_id = fields.Many2one('mrp.workorder',
                                   'Work Order',
                                   check_company=True)
    user_id = fields.Many2one('res.users',
                              "User",
                              default=lambda self: self.env.uid)
    loss_id = fields.Many2one('mrp.workcenter.productivity.loss',
                              "Loss Reason",
                              ondelete='restrict',
                              required=True)
    loss_type = fields.Selection(string="Effectiveness",
                                 related='loss_id.loss_type',
                                 store=True,
                                 readonly=False)
    description = fields.Text('Description')
    date_start = fields.Datetime('Start Date',
                                 default=fields.Datetime.now,
                                 required=True)
    date_end = fields.Datetime('End Date')
    duration = fields.Float('Duration',
                            compute='_compute_duration',
                            store=True)

    @api.depends('date_end', 'date_start')
    def _compute_duration(self):
        for blocktime in self:
            if blocktime.date_start and blocktime.date_end:
                d1 = fields.Datetime.from_string(blocktime.date_start)
                d2 = fields.Datetime.from_string(blocktime.date_end)
                diff = d2 - d1
                if (blocktime.loss_type not in ('productive', 'performance')
                    ) and blocktime.workcenter_id.resource_calendar_id:
                    r = blocktime.workcenter_id._get_work_days_data_batch(
                        d1, d2)[blocktime.workcenter_id.id]['hours']
                    blocktime.duration = round(r * 60, 2)
                else:
                    blocktime.duration = round(diff.total_seconds() / 60.0, 2)
            else:
                blocktime.duration = 0.0

    def button_block(self):
        self.ensure_one()
        self.workcenter_id.order_ids.end_all()
Example #16
0
class Users(models.Model):
    """ User class. A res.users record models an OpenERP user and is different
        from an employee.

        res.users class now inherits from res.partner. The partner model is
        used to store the data related to the partner: lang, name, address,
        avatar, ... The user model is now dedicated to technical data.
    """
    _name = "res.users"
    _description = 'Users'
    _inherits = {'res.partner': 'partner_id'}
    _order = 'name, login'
    __uid_cache = defaultdict(dict)  # {dbname: {uid: password}}

    # User can write on a few of his own fields (but not his groups for example)
    SELF_WRITEABLE_FIELDS = [
        'signature', 'action_id', 'company_id', 'email', 'name', 'image',
        'image_medium', 'image_small', 'lang', 'tz'
    ]
    # User can read a few of his own fields
    SELF_READABLE_FIELDS = [
        'signature', 'company_id', 'login', 'email', 'name', 'image',
        'image_medium', 'image_small', 'lang', 'tz', 'tz_offset', 'groups_id',
        'partner_id', '__last_update', 'action_id'
    ]

    def _default_groups(self):
        default_user = self.env.ref('base.default_user',
                                    raise_if_not_found=False)
        return (default_user or self.env['res.users']).sudo().groups_id

    def _companies_count(self):
        return self.env['res.company'].sudo().search_count([])

    partner_id = fields.Many2one('res.partner',
                                 required=True,
                                 ondelete='restrict',
                                 auto_join=True,
                                 string='Related Partner',
                                 help='Partner-related data of the user')
    login = fields.Char(required=True, help="Used to log into the system")
    password = fields.Char(
        default='',
        invisible=True,
        copy=False,
        help=
        "Keep empty if you don't want the user to be able to connect on the system."
    )
    new_password = fields.Char(string='Set Password',
        compute='_compute_password', inverse='_inverse_password',
        help="Specify a value only when creating a user or if you're "\
             "changing the user's password, otherwise leave empty. After "\
             "a change of password, the user has to login again.")
    signature = fields.Html()
    active = fields.Boolean(default=True)
    action_id = fields.Many2one(
        'ir.actions.actions',
        string='Home Action',
        help=
        "If specified, this action will be opened at log on for this user, in addition to the standard menu."
    )
    groups_id = fields.Many2many('res.groups',
                                 'res_groups_users_rel',
                                 'uid',
                                 'gid',
                                 string='Groups',
                                 default=_default_groups)
    log_ids = fields.One2many('res.users.log',
                              'create_uid',
                              string='User log entries')
    login_date = fields.Datetime(related='log_ids.create_date',
                                 string='Latest connection')
    share = fields.Boolean(
        compute='_compute_share',
        compute_sudo=True,
        string='Share User',
        store=True,
        help=
        "External user with limited access, created only for the purpose of sharing data."
    )
    companies_count = fields.Integer(compute='_compute_companies_count',
                                     string="Number of Companies",
                                     default=_companies_count)
    tz_offset = fields.Char(compute='_compute_tz_offset',
                            string='Timezone offset',
                            invisible=True)

    @api.model
    def _get_company(self):
        return self.env.user.company_id

    # Special behavior for this field: res.company.search() will only return the companies
    # available to the current user (should be the user's companies?), when the user_preference
    # context is set.
    company_id = fields.Many2one(
        'res.company',
        string='Company',
        required=True,
        default=_get_company,
        help='The company this user is currently working for.',
        context={'user_preference': True})
    company_ids = fields.Many2many('res.company',
                                   'res_company_users_rel',
                                   'user_id',
                                   'cid',
                                   string='Companies',
                                   default=_get_company)

    # overridden inherited fields to bypass access rights, in case you have
    # access to the user but not its corresponding partner
    name = fields.Char(related='partner_id.name', inherited=True)
    email = fields.Char(related='partner_id.email', inherited=True)

    _sql_constraints = [('login_key', 'UNIQUE (login)',
                         'You can not have two users with the same login !')]

    def _compute_password(self):
        for user in self:
            user.password = ''

    def _inverse_password(self):
        for user in self:
            if not user.new_password:
                # Do not update the password if no value is provided, ignore silently.
                # For example web client submits False values for all empty fields.
                continue
            if user == self.env.user:
                # To change their own password, users must use the client-specific change password wizard,
                # so that the new password is immediately used for further RPC requests, otherwise the user
                # will face unexpected 'Access Denied' exceptions.
                raise UserError(
                    _('Please use the change password wizard (in User Preferences or User menu) to change your own password.'
                      ))
            else:
                user.password = user.new_password

    @api.depends('groups_id')
    def _compute_share(self):
        for user in self:
            user.share = not user.has_group('base.group_user')

    @api.multi
    def _compute_companies_count(self):
        companies_count = self._companies_count()
        for user in self:
            user.companies_count = companies_count

    @api.depends('tz')
    def _compute_tz_offset(self):
        for user in self:
            user.tz_offset = datetime.datetime.now(
                pytz.timezone(user.tz or 'GMT')).strftime('%z')

    @api.onchange('login')
    def on_change_login(self):
        if self.login and tools.single_email_re.match(self.login):
            self.email = self.login

    @api.onchange('parent_id')
    def onchange_parent_id(self):
        return self.mapped('partner_id').onchange_parent_id()

    def _read_from_database(self, field_names, inherited_field_names=[]):
        super(Users, self)._read_from_database(field_names,
                                               inherited_field_names)
        canwrite = self.check_access_rights('write', raise_exception=False)
        if not canwrite and set(USER_PRIVATE_FIELDS).intersection(field_names):
            for record in self:
                for f in USER_PRIVATE_FIELDS:
                    try:
                        record._cache[f]
                        record._cache[f] = '********'
                    except Exception:
                        # skip SpecialValue (e.g. for missing record or access right)
                        pass

    @api.multi
    @api.constrains('company_id', 'company_ids')
    def _check_company(self):
        if any(user.company_ids and user.company_id not in user.company_ids
               for user in self):
            raise ValidationError(
                _('The chosen company is not in the allowed companies for this user'
                  ))

    @api.multi
    @api.constrains('action_id')
    def _check_action_id(self):
        action_open_website = self.env.ref('base.action_open_website',
                                           raise_if_not_found=False)
        if action_open_website and any(
                user.action_id.id == action_open_website.id for user in self):
            raise ValidationError(
                _('The "App Switcher" action cannot be selected as home action.'
                  ))

    @api.multi
    def read(self, fields=None, load='_classic_read'):
        if fields and self == self.env.user:
            for key in fields:
                if not (key in self.SELF_READABLE_FIELDS
                        or key.startswith('context_')):
                    break
            else:
                # safe fields only, so we read as super-user to bypass access rights
                self = self.sudo()

        return super(Users, self).read(fields=fields, load=load)

    @api.model
    def read_group(self,
                   domain,
                   fields,
                   groupby,
                   offset=0,
                   limit=None,
                   orderby=False,
                   lazy=True):
        groupby_fields = set([groupby] if isinstance(
            groupby, pycompat.string_types) else groupby)
        if groupby_fields.intersection(USER_PRIVATE_FIELDS):
            raise AccessError(_("Invalid 'group by' parameter"))
        return super(Users, self).read_group(domain,
                                             fields,
                                             groupby,
                                             offset=offset,
                                             limit=limit,
                                             orderby=orderby,
                                             lazy=lazy)

    @api.model
    def _search(self,
                args,
                offset=0,
                limit=None,
                order=None,
                count=False,
                access_rights_uid=None):
        if self._uid != SUPERUSER_ID and args:
            domain_fields = {
                term[0]
                for term in args if isinstance(term, (tuple, list))
            }
            if domain_fields.intersection(USER_PRIVATE_FIELDS):
                raise AccessError(_('Invalid search criterion'))
        return super(Users, self)._search(args,
                                          offset=offset,
                                          limit=limit,
                                          order=order,
                                          count=count,
                                          access_rights_uid=access_rights_uid)

    @api.model
    def create(self, vals):
        user = super(Users, self).create(vals)
        user.partner_id.active = user.active
        if user.partner_id.company_id:
            user.partner_id.write({'company_id': user.company_id.id})
        return user

    @api.multi
    def write(self, values):
        if values.get('active') == False:
            for user in self:
                if user.id == SUPERUSER_ID:
                    raise UserError(_("You cannot deactivate the admin user."))
                elif user.id == self._uid:
                    raise UserError(
                        _("You cannot deactivate the user you're currently logged in as."
                          ))

        if self == self.env.user:
            for key in list(values):
                if not (key in self.SELF_WRITEABLE_FIELDS
                        or key.startswith('context_')):
                    break
            else:
                if 'company_id' in values:
                    if values[
                            'company_id'] not in self.env.user.company_ids.ids:
                        del values['company_id']
                # safe fields only, so we write as super-user to bypass access rights
                self = self.sudo()

        res = super(Users, self).write(values)
        if 'company_id' in values:
            for user in self:
                # if partner is global we keep it that way
                if user.partner_id.company_id and user.partner_id.company_id.id != values[
                        'company_id']:
                    user.partner_id.write({'company_id': user.company_id.id})
            # clear default ir values when company changes
            self.env['ir.default'].clear_caches()

        # clear caches linked to the users
        if 'groups_id' in values:
            self.env['ir.model.access'].call_cache_clearing_methods()
            self.env['ir.rule'].clear_caches()
            self.has_group.clear_cache(self)
        if any(
                key.startswith('context_') or key in ('lang', 'tz')
                for key in values):
            self.context_get.clear_cache(self)
        if any(key in values for key in ['active'] + USER_PRIVATE_FIELDS):
            db = self._cr.dbname
            for id in self.ids:
                self.__uid_cache[db].pop(id, None)
        if any(key in values for key in self._get_session_token_fields()):
            self._invalidate_session_cache()

        return res

    @api.multi
    def unlink(self):
        if SUPERUSER_ID in self.ids:
            raise UserError(
                _('You can not remove the admin user as it is used internally for resources created by Odoo (updates, module installation, ...)'
                  ))
        db = self._cr.dbname
        for id in self.ids:
            self.__uid_cache[db].pop(id, None)
        self._invalidate_session_cache()
        return super(Users, self).unlink()

    @api.model
    def name_search(self, name='', args=None, operator='ilike', limit=100):
        if args is None:
            args = []
        users = self.browse()
        if name and operator in ['=', 'ilike']:
            users = self.search([('login', '=', name)] + args, limit=limit)
        if not users:
            users = self.search([('name', operator, name)] + args, limit=limit)
        return users.name_get()

    @api.multi
    def copy(self, default=None):
        self.ensure_one()
        default = dict(default or {})
        if ('name' not in default) and ('partner_id' not in default):
            default['name'] = _("%s (copy)") % self.name
        if 'login' not in default:
            default['login'] = _("%s (copy)") % self.login
        return super(Users, self).copy(default)

    @api.model
    @tools.ormcache('self._uid')
    def context_get(self):
        user = self.env.user
        # determine field names to read
        name_to_key = {
            name: name[8:] if name.startswith('context_') else name
            for name in self._fields
            if name.startswith('context_') or name in ('lang', 'tz')
        }
        # use read() to not read other fields: this must work while modifying
        # the schema of models res.users or res.partner
        values = user.read(list(name_to_key), load=False)[0]
        return {key: values[name] for name, key in name_to_key.items()}

    @api.model
    @api.returns('ir.actions.act_window', lambda record: record.id)
    def action_get(self):
        return self.sudo().env.ref('base.action_res_users_my')

    def check_super(self, passwd):
        return check_super(passwd)

    @api.model
    def check_credentials(self, password):
        """ Override this method to plug additional authentication methods"""
        if not password:
            raise AccessDenied()
        user = self.sudo().search([('id', '=', self._uid),
                                   ('password', '=', password)])
        if not user:
            raise AccessDenied()

    @api.model
    def _update_last_login(self):
        # only create new records to avoid any side-effect on concurrent transactions
        # extra records will be deleted by the periodical garbage collection
        self.env['res.users.log'].create({})  # populated by defaults

    @classmethod
    def _login(cls, db, login, password):
        if not password:
            return False
        user_id = False
        try:
            with cls.pool.cursor() as cr:
                self = api.Environment(cr, SUPERUSER_ID, {})[cls._name]
                user = self.search([('login', '=', login)])
                if user:
                    user_id = user.id
                    user.sudo(user_id).check_credentials(password)
                    user.sudo(user_id)._update_last_login()
        except AccessDenied:
            user_id = False

        status = "successful" if user_id else "failed"
        ip = request.httprequest.environ['REMOTE_ADDR'] if request else 'n/a'
        _logger.info("Login %s for db:%s login:%s from %s", status, db, login,
                     ip)

        return user_id

    @classmethod
    def authenticate(cls, db, login, password, user_agent_env):
        """Verifies and returns the user ID corresponding to the given
          ``login`` and ``password`` combination, or False if there was
          no matching user.
           :param str db: the database on which user is trying to authenticate
           :param str login: username
           :param str password: user password
           :param dict user_agent_env: environment dictionary describing any
               relevant environment attributes
        """
        uid = cls._login(db, login, password)
        if uid == SUPERUSER_ID:
            # Successfully logged in as admin!
            # Attempt to guess the web base url...
            if user_agent_env and user_agent_env.get('base_location'):
                try:
                    with cls.pool.cursor() as cr:
                        base = user_agent_env['base_location']
                        ICP = api.Environment(cr, uid,
                                              {})['ir.config_parameter']
                        if not ICP.get_param('web.base.url.freeze'):
                            ICP.set_param('web.base.url', base)
                except Exception:
                    _logger.exception(
                        "Failed to update web.base.url configuration parameter"
                    )
        return uid

    @classmethod
    def check(cls, db, uid, passwd):
        """Verifies that the given (uid, password) is authorized for the database ``db`` and
           raise an exception if it is not."""
        if not passwd:
            # empty passwords disallowed for obvious security reasons
            raise AccessDenied()
        db = cls.pool.db_name
        if cls.__uid_cache[db].get(uid) == passwd:
            return
        cr = cls.pool.cursor()
        try:
            self = api.Environment(cr, uid, {})[cls._name]
            self.check_credentials(passwd)
            cls.__uid_cache[db][uid] = passwd
        finally:
            cr.close()

    def _get_session_token_fields(self):
        return {'id', 'login', 'password', 'active'}

    @tools.ormcache('sid')
    def _compute_session_token(self, sid):
        """ Compute a session token given a session id and a user id """
        # retrieve the fields used to generate the session token
        session_fields = ', '.join(sorted(self._get_session_token_fields()))
        self.env.cr.execute(
            """SELECT %s, (SELECT value FROM ir_config_parameter WHERE key='database.secret')
                                FROM res_users
                                WHERE id=%%s""" % (session_fields),
            (self.id, ))
        if self.env.cr.rowcount != 1:
            self._invalidate_session_cache()
            return False
        data_fields = self.env.cr.fetchone()
        # generate hmac key
        key = (u'%s' % (data_fields, )).encode('utf-8')
        # hmac the session id
        data = sid.encode('utf-8')
        h = hmac.new(key, data, sha256)
        # keep in the cache the token
        return h.hexdigest()

    @api.multi
    def _invalidate_session_cache(self):
        """ Clear the sessions cache """
        self._compute_session_token.clear_cache(self)

    @api.model
    def change_password(self, old_passwd, new_passwd):
        """Change current user password. Old password must be provided explicitly
        to prevent hijacking an existing user session, or for cases where the cleartext
        password is not used to authenticate requests.

        :return: True
        :raise: flectra.exceptions.AccessDenied when old password is wrong
        :raise: flectra.exceptions.UserError when new password is not set or empty
        """
        self.check(self._cr.dbname, self._uid, old_passwd)
        if new_passwd:
            # use self.env.user here, because it has uid=SUPERUSER_ID
            return self.env.user.write({'password': new_passwd})
        raise UserError(
            _("Setting empty passwords is not allowed for security reasons!"))

    @api.multi
    def preference_save(self):
        return {
            'type': 'ir.actions.client',
            'tag': 'reload_context',
        }

    @api.multi
    def preference_change_password(self):
        return {
            'type': 'ir.actions.client',
            'tag': 'change_password',
            'target': 'new',
        }

    @api.model
    def has_group(self, group_ext_id):
        # use singleton's id if called on a non-empty recordset, otherwise
        # context uid
        uid = self.id or self._uid
        return self.sudo(user=uid)._has_group(group_ext_id)

    @api.model
    @tools.ormcache('self._uid', 'group_ext_id')
    def _has_group(self, group_ext_id):
        """Checks whether user belongs to given group.

        :param str group_ext_id: external ID (XML ID) of the group.
           Must be provided in fully-qualified form (``module.ext_id``), as there
           is no implicit module to use..
        :return: True if the current user is a member of the group with the
           given external ID (XML ID), else False.
        """
        assert group_ext_id and '.' in group_ext_id, "External ID must be fully qualified"
        module, ext_id = group_ext_id.split('.')
        self._cr.execute(
            """SELECT 1 FROM res_groups_users_rel WHERE uid=%s AND gid IN
                            (SELECT res_id FROM ir_model_data WHERE module=%s AND name=%s)""",
            (self._uid, module, ext_id))
        return bool(self._cr.fetchone())

    # for a few places explicitly clearing the has_group cache
    has_group.clear_cache = _has_group.clear_cache

    @api.multi
    def _is_public(self):
        self.ensure_one()
        return self.has_group('base.group_public')

    @api.multi
    def _is_system(self):
        self.ensure_one()
        return self.has_group('base.group_system')

    @api.multi
    def _is_admin(self):
        self.ensure_one()
        return self._is_superuser() or self.has_group('base.group_erp_manager')

    @api.multi
    def _is_superuser(self):
        self.ensure_one()
        return self.id == SUPERUSER_ID

    @api.model
    def get_company_currency_id(self):
        return self.env.user.company_id.currency_id.id
Example #17
0
class StockMoveLine(models.Model):
    _name = "stock.move.line"
    _description = "Packing Operation"
    _rec_name = "product_id"
    _order = "result_package_id desc, id"

    picking_id = fields.Many2one(
        'stock.picking', 'Stock Picking',
        help='The stock operation where the packing has been made')
    move_id = fields.Many2one(
        'stock.move', 'Stock Move',
        help="Change to a better name")
    product_id = fields.Many2one('product.product', 'Product', ondelete="cascade")
    product_uom_id = fields.Many2one('product.uom', 'Unit of Measure', required=True)
    product_qty = fields.Float(
        'Real Reserved Quantity', digits=0,
        compute='_compute_product_qty', inverse='_set_product_qty', store=True)
    product_uom_qty = fields.Float('Reserved', default=0.0, digits=dp.get_precision('Product Unit of Measure'), required=True)
    ordered_qty = fields.Float('Ordered Quantity', digits=dp.get_precision('Product Unit of Measure'))
    qty_done = fields.Float('Done', default=0.0, digits=dp.get_precision('Product Unit of Measure'), copy=False)
    package_id = fields.Many2one('stock.quant.package', 'Source Package', ondelete='restrict')
    lot_id = fields.Many2one('stock.production.lot', 'Lot')
    lot_name = fields.Char('Lot/Serial Number')
    result_package_id = fields.Many2one(
        'stock.quant.package', 'Destination Package',
        ondelete='restrict', required=False,
        help="If set, the operations are packed into this package")
    date = fields.Datetime('Date', default=fields.Datetime.now, required=True)
    owner_id = fields.Many2one('res.partner', 'Owner', help="Owner of the quants")
    location_id = fields.Many2one('stock.location', 'From', required=True)
    location_dest_id = fields.Many2one('stock.location', 'To', required=True)
    from_loc = fields.Char(compute='_compute_location_description')
    to_loc = fields.Char(compute='_compute_location_description')
    lots_visible = fields.Boolean(compute='_compute_lots_visible')
    state = fields.Selection(related='move_id.state', store=True)
    is_initial_demand_editable = fields.Boolean(related='move_id.is_initial_demand_editable')
    is_locked = fields.Boolean(related='move_id.is_locked', default=True, readonly=True)
    consume_line_ids = fields.Many2many('stock.move.line', 'stock_move_line_consume_rel', 'consume_line_id', 'produce_line_id', help="Technical link to see who consumed what. ")
    produce_line_ids = fields.Many2many('stock.move.line', 'stock_move_line_consume_rel', 'produce_line_id', 'consume_line_id', help="Technical link to see which line was produced with this. ")
    reference = fields.Char(related='move_id.reference', store=True)
    in_entire_package = fields.Boolean(compute='_compute_in_entire_package')

    @api.one
    def _compute_location_description(self):
        for operation, operation_sudo in izip(self, self.sudo()):
            operation.from_loc = '%s%s' % (operation_sudo.location_id.name, operation.product_id and operation_sudo.package_id.name or '')
            operation.to_loc = '%s%s' % (operation_sudo.location_dest_id.name, operation_sudo.result_package_id.name or '')

    @api.one
    @api.depends('picking_id.picking_type_id', 'product_id.tracking')
    def _compute_lots_visible(self):
        picking = self.picking_id
        if picking.picking_type_id and self.product_id.tracking != 'none':  # TDE FIXME: not sure correctly migrated
            self.lots_visible = picking.picking_type_id.use_existing_lots or picking.picking_type_id.use_create_lots
        else:
            self.lots_visible = self.product_id.tracking != 'none'

    @api.one
    @api.depends('product_id', 'product_uom_id', 'product_uom_qty')
    def _compute_product_qty(self):
        self.product_qty = self.product_uom_id._compute_quantity(self.product_uom_qty, self.product_id.uom_id, rounding_method='HALF-UP')

    @api.one
    def _set_product_qty(self):
        """ The meaning of product_qty field changed lately and is now a functional field computing the quantity
        in the default product UoM. This code has been added to raise an error if a write is made given a value
        for `product_qty`, where the same write should set the `product_uom_qty` field instead, in order to
        detect errors. """
        raise UserError(_('The requested operation cannot be processed because of a programming error setting the `product_qty` field instead of the `product_uom_qty`.'))

    def _compute_in_entire_package(self):
        """ This method check if the move line is in an entire pack shown in the picking."""
        for ml in self:
            picking_id = ml.picking_id
            ml.in_entire_package = picking_id and picking_id.picking_type_entire_packs and picking_id.state != 'done'\
                                   and ml.result_package_id and ml.result_package_id in picking_id.entire_package_ids

    @api.constrains('product_uom_qty')
    def check_reserved_done_quantity(self):
        for move_line in self:
            if move_line.state == 'done' and not float_is_zero(move_line.product_uom_qty, precision_rounding=self.env['decimal.precision'].precision_get('Product Unit of Measure')):
                raise ValidationError(_('A done move line should never have a reserved quantity.'))

    @api.onchange('product_id', 'product_uom_id')
    def onchange_product_id(self):
        if self.product_id:
            self.lots_visible = self.product_id.tracking != 'none'
            if not self.product_uom_id or self.product_uom_id.category_id != self.product_id.uom_id.category_id:
                if self.move_id.product_uom:
                    self.product_uom_id = self.move_id.product_uom.id
                else:
                    self.product_uom_id = self.product_id.uom_id.id
            res = {'domain': {'product_uom_id': [('category_id', '=', self.product_uom_id.category_id.id)]}}
        else:
            res = {'domain': {'product_uom_id': []}}
        return res

    @api.onchange('lot_name', 'lot_id')
    def onchange_serial_number(self):
        res = {}
        if self.product_id.tracking == 'serial':
            self.qty_done = 1
            move_lines_to_check = self._get_similar_move_lines() - self
            message = move_lines_to_check._check_for_duplicated_serial_numbers()
            if message:
                res['warning'] = {'title': _('Warning'), 'message': message}
        return res

    @api.constrains('qty_done')
    def _check_positive_qty_done(self):
        if any([ml.qty_done < 0 for ml in self]):
            raise ValidationError(_('You can not enter negative quantities!'))

    @api.constrains('lot_id', 'lot_name', 'qty_done')
    def _check_unique_serial_number(self):
        for ml in self.filtered(lambda ml: ml.move_id.product_id.tracking == 'serial' and (ml.lot_id or ml.lot_name)):
            move_lines_to_check = ml._get_similar_move_lines()
            message = move_lines_to_check._check_for_duplicated_serial_numbers()
            if message:
                raise ValidationError(message)
            if float_compare(ml.qty_done, 1.0, precision_rounding=ml.move_id.product_id.uom_id.rounding) == 1:
                raise UserError(_(
                    'You can only process 1.0 %s for products with unique serial number.') % ml.product_id.uom_id.name)
            if ml.lot_name:
                already_exist = self.env['stock.production.lot'].search(
                    [('name', '=', ml.lot_name), ('product_id', '=', ml.product_id.id)])
                if already_exist:
                    return _('You have already assigned this serial number to this product. Please correct the serial numbers encoded.')

    def _get_similar_move_lines(self):
        self.ensure_one()
        lines = self.env['stock.move.line']
        picking_id = self.move_id.picking_id if self.move_id else self.picking_id
        if picking_id:
            lines |= picking_id.move_line_ids.filtered(lambda ml: ml.product_id == self.product_id and (ml.lot_id or ml.lot_name))
        return lines

    def _check_for_duplicated_serial_numbers(self):
        """ This method is used in _check_unique_serial_number and in onchange_serial_number to check that a same serial number is not used twice amongst the recordset passed.

        :return: an error message directed to the user if needed else False
        """
        if self.mapped('lot_id'):
            lots_map = [(ml.product_id.id, ml.lot_id.name) for ml in self]
            recorded_serials_counter = Counter(lots_map)
            for (product_id, lot_id), occurrences in recorded_serials_counter.items():
                if occurrences > 1 and lot_id is not False:
                    return _('You cannot use the same serial number twice. Please correct the serial numbers encoded.')
        elif self.mapped('lot_name'):
            lots_map = [(ml.product_id.id, ml.lot_name) for ml in self]
            recorded_serials_counter = Counter(lots_map)
            for (product_id, lot_id), occurrences in recorded_serials_counter.items():
                if occurrences > 1 and lot_id is not False:
                    return _('You cannot use the same serial number twice. Please correct the serial numbers encoded.')
        return False

    @api.model
    def create(self, vals):
        vals['ordered_qty'] = vals.get('product_uom_qty')

        # If the move line is directly create on the picking view.
        # If this picking is already done we should generate an
        # associated done move.
        if 'picking_id' in vals and 'move_id' not in vals:
            picking = self.env['stock.picking'].browse(vals['picking_id'])
            if picking.state == 'done':
                product = self.env['product.product'].browse(vals['product_id'])
                new_move = self.env['stock.move'].create({
                    'name': _('New Move:') + product.display_name,
                    'product_id': product.id,
                    'product_uom_qty': 'qty_done' in vals and vals['qty_done'] or 0,
                    'product_uom': vals['product_uom_id'],
                    'location_id': 'location_id' in vals and vals['location_id'] or picking.location_id.id,
                    'location_dest_id': 'location_dest_id' in vals and vals['location_dest_id'] or picking.location_dest_id.id,
                    'state': 'done',
                    'additional': True,
                    'picking_id': picking.id,
                })
                vals['move_id'] = new_move.id

        ml = super(StockMoveLine, self).create(vals)
        if ml.state == 'done':
            if ml.product_id.type == 'product':
                Quant = self.env['stock.quant']
                quantity = ml.product_uom_id._compute_quantity(ml.qty_done, ml.move_id.product_id.uom_id,rounding_method='HALF-UP')
                in_date = None
                available_qty, in_date = Quant._update_available_quantity(ml.product_id, ml.location_id, -quantity, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id)
                if available_qty < 0 and ml.lot_id:
                    # see if we can compensate the negative quants with some untracked quants
                    untracked_qty = Quant._get_available_quantity(ml.product_id, ml.location_id, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id, strict=True)
                    if untracked_qty:
                        taken_from_untracked_qty = min(untracked_qty, abs(quantity))
                        Quant._update_available_quantity(ml.product_id, ml.location_id, -taken_from_untracked_qty, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id)
                        Quant._update_available_quantity(ml.product_id, ml.location_id, taken_from_untracked_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id)
                Quant._update_available_quantity(ml.product_id, ml.location_dest_id, quantity, lot_id=ml.lot_id, package_id=ml.result_package_id, owner_id=ml.owner_id, in_date=in_date)
            next_moves = ml.move_id.move_dest_ids.filtered(lambda move: move.state not in ('done', 'cancel'))
            next_moves._do_unreserve()
            next_moves._action_assign()
        return ml

    def write(self, vals):
        """ Through the interface, we allow users to change the charateristics of a move line. If a
        quantity has been reserved for this move line, we impact the reservation directly to free
        the old quants and allocate the new ones.
        """
        if self.env.context.get('bypass_reservation_update'):
            return super(StockMoveLine, self).write(vals)

        Quant = self.env['stock.quant']
        precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')
        # We forbid to change the reserved quantity in the interace, but it is needed in the
        # case of stock.move's split.
        # TODO Move me in the update
        if 'product_uom_qty' in vals:
            for ml in self.filtered(lambda m: m.state in ('partially_available', 'assigned') and m.product_id.type == 'product'):
                if not ml.location_id.should_bypass_reservation():
                    qty_to_decrease = ml.product_qty - ml.product_uom_id._compute_quantity(vals['product_uom_qty'], ml.product_id.uom_id, rounding_method='HALF-UP')
                    try:
                        Quant._update_reserved_quantity(ml.product_id, ml.location_id, -qty_to_decrease, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id, strict=True)
                    except UserError:
                        if ml.lot_id:
                            Quant._update_reserved_quantity(ml.product_id, ml.location_id, -qty_to_decrease, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id, strict=True)
                        else:
                            raise

        triggers = [
            ('location_id', 'stock.location'),
            ('location_dest_id', 'stock.location'),
            ('lot_id', 'stock.production.lot'),
            ('package_id', 'stock.quant.package'),
            ('result_package_id', 'stock.quant.package'),
            ('owner_id', 'res.partner')
        ]
        updates = {}
        for key, model in triggers:
            if key in vals:
                updates[key] = self.env[model].browse(vals[key])

        if updates:
            for ml in self.filtered(lambda ml: ml.state in ['partially_available', 'assigned'] and ml.product_id.type == 'product'):
                if not ml.location_id.should_bypass_reservation():
                    try:
                        Quant._update_reserved_quantity(ml.product_id, ml.location_id, -ml.product_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id, strict=True)
                    except UserError:
                        if ml.lot_id:
                            Quant._update_reserved_quantity(ml.product_id, ml.location_id, -ml.product_qty, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id, strict=True)
                        else:
                            raise

                if not updates.get('location_id', ml.location_id).should_bypass_reservation():
                    new_product_qty = 0
                    try:
                        q = Quant._update_reserved_quantity(ml.product_id, updates.get('location_id', ml.location_id), ml.product_qty, lot_id=updates.get('lot_id', ml.lot_id),
                                                             package_id=updates.get('package_id', ml.package_id), owner_id=updates.get('owner_id', ml.owner_id), strict=True)
                        new_product_qty = sum([x[1] for x in q])
                    except UserError:
                        if updates.get('lot_id'):
                            # If we were not able to reserve on tracked quants, we can use untracked ones.
                            try:
                                q = Quant._update_reserved_quantity(ml.product_id, updates.get('location_id', ml.location_id), ml.product_qty, lot_id=False,
                                                                     package_id=updates.get('package_id', ml.package_id), owner_id=updates.get('owner_id', ml.owner_id), strict=True)
                                new_product_qty = sum([x[1] for x in q])
                            except UserError:
                                pass
                    if new_product_qty != ml.product_qty:
                        new_product_uom_qty = self.product_id.uom_id._compute_quantity(new_product_qty, self.product_uom_id, rounding_method='HALF-UP')
                        ml.with_context(bypass_reservation_update=True).product_uom_qty = new_product_uom_qty

        # When editing a done move line, the reserved availability of a potential chained move is impacted. Take care of running again `_action_assign` on the concerned moves.
        next_moves = self.env['stock.move']
        if updates or 'qty_done' in vals:
            for ml in self.filtered(lambda ml: ml.move_id.state == 'done' and ml.product_id.type == 'product'):
                # undo the original move line
                qty_done_orig = ml.move_id.product_uom._compute_quantity(ml.qty_done, ml.move_id.product_id.uom_id, rounding_method='HALF-UP')
                in_date = Quant._update_available_quantity(ml.product_id, ml.location_dest_id, -qty_done_orig, lot_id=ml.lot_id,
                                                      package_id=ml.result_package_id, owner_id=ml.owner_id)[1]
                Quant._update_available_quantity(ml.product_id, ml.location_id, qty_done_orig, lot_id=ml.lot_id,
                                                      package_id=ml.package_id, owner_id=ml.owner_id, in_date=in_date)

                # move what's been actually done
                product_id = ml.product_id
                location_id = updates.get('location_id', ml.location_id)
                location_dest_id = updates.get('location_dest_id', ml.location_dest_id)
                qty_done = vals.get('qty_done', ml.qty_done)
                lot_id = updates.get('lot_id', ml.lot_id)
                package_id = updates.get('package_id', ml.package_id)
                result_package_id = updates.get('result_package_id', ml.result_package_id)
                owner_id = updates.get('owner_id', ml.owner_id)
                quantity = ml.move_id.product_uom._compute_quantity(qty_done, ml.move_id.product_id.uom_id, rounding_method='HALF-UP')
                if not location_id.should_bypass_reservation():
                    ml._free_reservation(product_id, location_id, quantity, lot_id=lot_id, package_id=package_id, owner_id=owner_id)
                if not float_is_zero(quantity, precision_digits=precision):
                    available_qty, in_date = Quant._update_available_quantity(product_id, location_id, -quantity, lot_id=lot_id, package_id=package_id, owner_id=owner_id)
                    if available_qty < 0 and lot_id:
                        # see if we can compensate the negative quants with some untracked quants
                        untracked_qty = Quant._get_available_quantity(product_id, location_id, lot_id=False, package_id=package_id, owner_id=owner_id, strict=True)
                        if untracked_qty:
                            taken_from_untracked_qty = min(untracked_qty, abs(available_qty))
                            Quant._update_available_quantity(product_id, location_id, -taken_from_untracked_qty, lot_id=False, package_id=package_id, owner_id=owner_id)
                            Quant._update_available_quantity(product_id, location_id, taken_from_untracked_qty, lot_id=lot_id, package_id=package_id, owner_id=owner_id)
                            if not location_id.should_bypass_reservation():
                                ml._free_reservation(ml.product_id, location_id, untracked_qty, lot_id=False, package_id=package_id, owner_id=owner_id)
                    Quant._update_available_quantity(product_id, location_dest_id, quantity, lot_id=lot_id, package_id=result_package_id, owner_id=owner_id, in_date=in_date)
                # Unreserve and reserve following move in order to have the real reserved quantity on move_line.
                next_moves |= ml.move_id.move_dest_ids.filtered(lambda move: move.state not in ('done', 'cancel'))

                # Log a note
                if ml.picking_id:
                    ml._log_message(ml.picking_id, ml, 'stock.track_move_template', vals)

        res = super(StockMoveLine, self).write(vals)

        # Update scrap object linked to move_lines to the new quantity.
        if 'qty_done' in vals:
            for move in self.mapped('move_id'):
                if move.scrapped:
                    move.scrap_ids.write({'scrap_qty': move.quantity_done})

        # As stock_account values according to a move's `product_uom_qty`, we consider that any
        # done stock move should have its `quantity_done` equals to its `product_uom_qty`, and
        # this is what move's `action_done` will do. So, we replicate the behavior here.
        if updates or 'qty_done' in vals:
            moves = self.filtered(lambda ml: ml.move_id.state == 'done').mapped('move_id')
            for move in moves:
                move.product_uom_qty = move.quantity_done
        next_moves._do_unreserve()
        next_moves._action_assign()
        return res

    def unlink(self):
        precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')
        for ml in self:
            if ml.state in ('done', 'cancel'):
                raise UserError(_('You can not delete product moves if the picking is done. You can only correct the done quantities.'))
            # Unlinking a move line should unreserve.
            if ml.product_id.type == 'product' and not ml.location_id.should_bypass_reservation() and not float_is_zero(ml.product_qty, precision_digits=precision):
                self.env['stock.quant']._update_reserved_quantity(ml.product_id, ml.location_id, -ml.product_qty, lot_id=ml.lot_id,
                                                                   package_id=ml.package_id, owner_id=ml.owner_id, strict=True)
        moves = self.mapped('move_id')
        res = super(StockMoveLine, self).unlink()
        if moves:
            moves._recompute_state()
        return res

    def _action_done(self):
        """ This method is called during a move's `action_done`. It'll actually move a quant from
        the source location to the destination location, and unreserve if needed in the source
        location.

        This method is intended to be called on all the move lines of a move. This method is not
        intended to be called when editing a `done` move (that's what the override of `write` here
        is done.
        """

        # First, we loop over all the move lines to do a preliminary check: `qty_done` should not
        # be negative and, according to the presence of a picking type or a linked inventory
        # adjustment, enforce some rules on the `lot_id` field. If `qty_done` is null, we unlink
        # the line. It is mandatory in order to free the reservation and correctly apply
        # `action_done` on the next move lines.
        ml_to_delete = self.env['stock.move.line']
        for ml in self:
            qty_done_float_compared = float_compare(ml.qty_done, 0, precision_rounding=ml.product_uom_id.rounding)
            if qty_done_float_compared > 0:
                if ml.product_id.tracking != 'none':
                    picking_type_id = ml.move_id.picking_type_id
                    if picking_type_id:
                        if picking_type_id.use_create_lots:
                            # If a picking type is linked, we may have to create a production lot on
                            # the fly before assigning it to the move line if the user checked both
                            # `use_create_lots` and `use_existing_lots`.
                            if ml.lot_name and not ml.lot_id:
                                lot = self.env['stock.production.lot'].create(
                                    {'name': ml.lot_name, 'product_id': ml.product_id.id}
                                )
                                ml.write({'lot_id': lot.id})
                        elif not picking_type_id.use_create_lots and not picking_type_id.use_existing_lots:
                            # If the user disabled both `use_create_lots` and `use_existing_lots`
                            # checkboxes on the picking type, he's allowed to enter tracked
                            # products without a `lot_id`.
                            continue
                    elif ml.move_id.inventory_id:
                        # If an inventory adjustment is linked, the user is allowed to enter
                        # tracked products without a `lot_id`.
                        continue

                    if not ml.lot_id:
                        raise UserError(_('You need to supply a lot/serial number for %s.') % ml.product_id.name)
            elif qty_done_float_compared < 0:
                raise UserError(_('No negative quantities allowed'))
            else:
                ml_to_delete |= ml
        ml_to_delete.unlink()

        # Now, we can actually move the quant.
        for ml in self - ml_to_delete:
            if ml.product_id.type == 'product':
                Quant = self.env['stock.quant']
                rounding = ml.product_uom_id.rounding

                # if this move line is force assigned, unreserve elsewhere if needed
                if not ml.location_id.should_bypass_reservation() and float_compare(ml.qty_done, ml.product_qty, precision_rounding=rounding) > 0:
                    extra_qty = ml.qty_done - ml.product_qty
                    ml._free_reservation(ml.product_id, ml.location_id, extra_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id)
                # unreserve what's been reserved
                if not ml.location_id.should_bypass_reservation() and ml.product_id.type == 'product' and ml.product_qty:
                    try:
                        Quant._update_reserved_quantity(ml.product_id, ml.location_id, -ml.product_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id, strict=True)
                    except UserError:
                        Quant._update_reserved_quantity(ml.product_id, ml.location_id, -ml.product_qty, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id, strict=True)

                # move what's been actually done
                quantity = ml.product_uom_id._compute_quantity(ml.qty_done, ml.move_id.product_id.uom_id, rounding_method='HALF-UP')
                available_qty, in_date = Quant._update_available_quantity(ml.product_id, ml.location_id, -quantity, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id)
                if available_qty < 0 and ml.lot_id:
                    # see if we can compensate the negative quants with some untracked quants
                    untracked_qty = Quant._get_available_quantity(ml.product_id, ml.location_id, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id, strict=True)
                    if untracked_qty:
                        taken_from_untracked_qty = min(untracked_qty, abs(quantity))
                        Quant._update_available_quantity(ml.product_id, ml.location_id, -taken_from_untracked_qty, lot_id=False, package_id=ml.package_id, owner_id=ml.owner_id)
                        Quant._update_available_quantity(ml.product_id, ml.location_id, taken_from_untracked_qty, lot_id=ml.lot_id, package_id=ml.package_id, owner_id=ml.owner_id)
                Quant._update_available_quantity(ml.product_id, ml.location_dest_id, quantity, lot_id=ml.lot_id, package_id=ml.result_package_id, owner_id=ml.owner_id, in_date=in_date)
        # Reset the reserved quantity as we just moved it to the destination location.
        (self - ml_to_delete).with_context(bypass_reservation_update=True).write({'product_uom_qty': 0.00})

    def _log_message(self, record, move, template, vals):
        data = vals.copy()
        if 'lot_id' in vals and vals['lot_id'] != move.lot_id.id:
            data['lot_name'] = self.env['stock.production.lot'].browse(vals.get('lot_id')).name
        if 'location_id' in vals:
            data['location_name'] = self.env['stock.location'].browse(vals.get('location_id')).name
        if 'location_dest_id' in vals:
            data['location_dest_name'] = self.env['stock.location'].browse(vals.get('location_dest_id')).name
        if 'package_id' in vals and vals['package_id'] != move.package_id.id:
            data['package_name'] = self.env['stock.quant.package'].browse(vals.get('package_id')).name
        if 'package_result_id' in vals and vals['package_result_id'] != move.package_result_id.id:
            data['result_package_name'] = self.env['stock.quant.package'].browse(vals.get('result_package_id')).name
        if 'owner_id' in vals and vals['owner_id'] != move.owner_id.id:
            data['owner_name'] = self.env['res.partner'].browse(vals.get('owner_id')).name
        record.message_post_with_view(template, values={'move': move, 'vals': dict(vals, **data)}, subtype_id=self.env.ref('mail.mt_note').id)

    def _free_reservation(self, product_id, location_id, quantity, lot_id=None, package_id=None, owner_id=None):
        """ When editing a done move line or validating one with some forced quantities, it is
        possible to impact quants that were not reserved. It is therefore necessary to edit or
        unlink the move lines that reserved a quantity now unavailable.
        """
        self.ensure_one()

        # Check the available quantity, with the `strict` kw set to `True`. If the available
        # quantity is greather than the quantity now unavailable, there is nothing to do.
        available_quantity = self.env['stock.quant']._get_available_quantity(
            product_id, location_id, lot_id=lot_id, package_id=package_id, owner_id=owner_id, strict=True
        )
        if quantity > available_quantity:
            # We now have to find the move lines that reserved our now unavailable quantity. We
            # take care to exclude ourselves and the move lines were work had already been done.
            oudated_move_lines_domain = [
                ('move_id.state', 'not in', ['done', 'cancel']),
                ('product_id', '=', product_id.id),
                ('lot_id', '=', lot_id.id if lot_id else False),
                ('location_id', '=', location_id.id),
                ('owner_id', '=', owner_id.id if owner_id else False),
                ('package_id', '=', package_id.id if package_id else False),
                ('product_qty', '>', 0.0),
                ('id', '!=', self.id),
            ]
            oudated_candidates = self.env['stock.move.line'].search(oudated_move_lines_domain)

            # As the move's state is not computed over the move lines, we'll have to manually
            # recompute the moves which we adapted their lines.
            move_to_recompute_state = self.env['stock.move']

            rounding = self.product_uom_id.rounding
            for candidate in oudated_candidates:
                if float_compare(candidate.product_qty, quantity, precision_rounding=rounding) <= 0:
                    quantity -= candidate.product_qty
                    move_to_recompute_state |= candidate.move_id
                    if candidate.qty_done:
                        candidate.product_uom_qty = 0.0
                    else:
                        candidate.unlink()
                else:
                    # split this move line and assign the new part to our extra move
                    quantity_split = float_round(
                        candidate.product_qty - quantity,
                        precision_rounding=self.product_uom_id.rounding,
                        rounding_method='UP')
                    candidate.product_uom_qty = self.product_id.uom_id._compute_quantity(quantity_split, self.product_uom_id, rounding_method='HALF-UP')
                    quantity -= quantity_split
                    move_to_recompute_state |= candidate.move_id
                if quantity == 0.0:
                    break
            move_to_recompute_state._recompute_state()
class ProductWishlist(models.Model):
    _name = 'product.wishlist'
    _sql_constrains = [
        ("session_or_partner_id",
         "CHECK(session IS NULL != partner_id IS NULL)",
         "Need a session or partner, but never both."),
        ("product_unique_session", "UNIQUE(product_id, session)",
         "Duplicated wishlisted product for this session."),
        ("product_unique_partner_id", "UNIQUE(product_id, partner_id)",
         "Duplicated wishlisted product for this partner."),
    ]

    partner_id = fields.Many2one('res.partner', string='Owner')
    session = fields.Char(
        help="Website session identifier where this product was wishlisted.")
    product_id = fields.Many2one('product.product',
                                 string='Product',
                                 required=True)
    currency_id = fields.Many2one('res.currency',
                                  related='pricelist_id.currency_id',
                                  readonly=True)
    pricelist_id = fields.Many2one('product.pricelist',
                                   string='Pricelist',
                                   help='Pricelist when added')
    price = fields.Monetary(
        digits=0,
        currency_field='currency_id',
        string='Price',
        help='Price of the product when it has been added in the wishlist')
    price_new = fields.Float(
        compute='compute_new_price',
        string='Current price',
        help='Current price of this product, using same pricelist, ...')
    website_id = fields.Many2one('website', required=True)
    create_date = fields.Datetime('Added Date', readonly=True, required=True)
    active = fields.Boolean(default=True, required=True)

    @api.multi
    @api.depends('pricelist_id', 'currency_id', 'product_id')
    def compute_new_price(self):
        for wish in self:
            wish.price_new = wish.product_id.with_context(
                pricelist=wish.pricelist_id.id).website_price

    @api.model
    def current(self):
        """Get all wishlist items that belong to current user or session,
        filter products that are unpublished."""
        return self.sudo().search([
            "|",
            ("partner_id", "=", self.env.user.partner_id.id),
            "&",
            ("partner_id", "=", False),
            ("session", "=", self.env.user.current_session),
        ]).filtered('product_id.product_tmpl_id.website_published').sudo(
            user=self.env.user)

    @api.model
    def _add_to_wishlist(self,
                         pricelist_id,
                         currency_id,
                         website_id,
                         price,
                         product_id,
                         partner_id=False,
                         session=False):
        wish = self.env['product.wishlist'].create({
            'partner_id': partner_id,
            'session': session,
            'product_id': product_id,
            'currency_id': currency_id,
            'pricelist_id': pricelist_id,
            'price': price,
            'website_id': website_id,
        })
        return wish

    @api.model
    def _join_current_user_and_session(self):
        """Assign all dangling session wishlisted products to user."""
        session_wishes = self.search([
            ("session", "=", self.env.user.current_session),
            ("partner_id", "=", False),
        ])
        partner_wishes = self.search([
            ("partner_id", "=", self.env.user.partner_id.id),
        ])
        partner_products = partner_wishes.mapped("product_id")
        # Remove session products already present for the user
        duplicated_wishes = session_wishes.filtered(
            lambda wish: wish.product_id <= partner_products)
        session_wishes -= duplicated_wishes
        duplicated_wishes.unlink()
        # Assign the rest to the user
        session_wishes.write({
            "partner_id": self.env.user.partner_id.id,
            "session": False,
        })

    @api.model
    def _garbage_collector(self, *args, **kwargs):
        """Remove wishlists for unexisting sessions."""
        self.search([
            ("create_date", "<",
             fields.Datetime.to_string(datetime.now() - timedelta(
                 weeks=kwargs.get('wishlist_week', 5)))),
            ("partner_id", "=", False),
        ]).unlink()
Example #19
0
class SaleReport(models.Model):
    _name = "sale.report"
    _inherit = ['ir.branch.company.mixin']
    _description = "Sales Orders Statistics"
    _auto = False
    _rec_name = 'date'
    _order = 'date desc'

    name = fields.Char('Order Reference', readonly=True)
    date = fields.Datetime('Date Order', readonly=True)
    confirmation_date = fields.Datetime('Confirmation Date', readonly=True)
    product_id = fields.Many2one('product.product', 'Product', readonly=True)
    product_uom = fields.Many2one('product.uom', 'Unit of Measure', readonly=True)
    product_uom_qty = fields.Float('Qty Ordered', readonly=True)
    qty_delivered = fields.Float('Qty Delivered', readonly=True)
    qty_to_invoice = fields.Float('Qty To Invoice', readonly=True)
    qty_invoiced = fields.Float('Qty Invoiced', readonly=True)
    partner_id = fields.Many2one('res.partner', 'Partner', readonly=True)
    company_id = fields.Many2one('res.company', 'Company', readonly=True)
    user_id = fields.Many2one('res.users', 'Salesperson', readonly=True)
    price_total = fields.Float('Total', readonly=True)
    price_subtotal = fields.Float('Untaxed Total', readonly=True)
    amt_to_invoice = fields.Float('Amount To Invoice', readonly=True)
    amt_invoiced = fields.Float('Amount Invoiced', readonly=True)
    product_tmpl_id = fields.Many2one('product.template', 'Product Template', readonly=True)
    categ_id = fields.Many2one('product.category', 'Product Category', readonly=True)
    nbr = fields.Integer('# of Lines', readonly=True)
    pricelist_id = fields.Many2one('product.pricelist', 'Pricelist', readonly=True)
    analytic_account_id = fields.Many2one('account.analytic.account', 'Analytic Account', readonly=True)
    team_id = fields.Many2one('crm.team', 'Sales Channel', readonly=True, oldname='section_id')
    country_id = fields.Many2one('res.country', 'Partner Country', readonly=True)
    commercial_partner_id = fields.Many2one('res.partner', 'Commercial Entity', readonly=True)
    state = fields.Selection([
        ('draft', 'Draft Quotation'),
        ('sent', 'Quotation Sent'),
        ('sale', 'Sales Order'),
        ('done', 'Sales Done'),
        ('cancel', 'Cancelled'),
        ], string='Status', readonly=True)
    weight = fields.Float('Gross Weight', readonly=True)
    volume = fields.Float('Volume', readonly=True)

    def _select(self):
        select_str = """
            WITH currency_rate as (%s)
             SELECT min(l.id) as id,
                    l.product_id as product_id,
                    t.uom_id as product_uom,
                    sum(l.product_uom_qty / u.factor * u2.factor) as product_uom_qty,
                    sum(l.qty_delivered / u.factor * u2.factor) as qty_delivered,
                    sum(l.qty_invoiced / u.factor * u2.factor) as qty_invoiced,
                    sum(l.qty_to_invoice / u.factor * u2.factor) as qty_to_invoice,
                    sum(l.price_total / COALESCE(cr.rate, 1.0)) as price_total,
                    sum(l.price_subtotal / COALESCE(cr.rate, 1.0)) as price_subtotal,
                    sum(l.amt_to_invoice / COALESCE(cr.rate, 1.0)) as amt_to_invoice,
                    sum(l.amt_invoiced / COALESCE(cr.rate, 1.0)) as amt_invoiced,
                    count(*) as nbr,
                    s.name as name,
                    s.date_order as date,
                    s.confirmation_date as confirmation_date,
                    s.state as state,
                    s.partner_id as partner_id,
                    s.user_id as user_id,
                    s.company_id as company_id,
                    s.branch_id as branch_id,
                    extract(epoch from avg(date_trunc('day',s.date_order)-date_trunc('day',s.create_date)))/(24*60*60)::decimal(16,2) as delay,
                    t.categ_id as categ_id,
                    s.pricelist_id as pricelist_id,
                    s.analytic_account_id as analytic_account_id,
                    s.team_id as team_id,
                    p.product_tmpl_id,
                    partner.country_id as country_id,
                    partner.commercial_partner_id as commercial_partner_id,
                    sum(p.weight * l.product_uom_qty / u.factor * u2.factor) as weight,
                    sum(p.volume * l.product_uom_qty / u.factor * u2.factor) as volume
        """ % self.env['res.currency']._select_companies_rates()
        return select_str

    def _from(self):
        from_str = """
                sale_order_line l
                      join sale_order s on (l.order_id=s.id)
                      join res_partner partner on s.partner_id = partner.id
                        left join product_product p on (l.product_id=p.id)
                            left join product_template t on (p.product_tmpl_id=t.id)
                    left join product_uom u on (u.id=l.product_uom)
                    left join product_uom u2 on (u2.id=t.uom_id)
                    left join product_pricelist pp on (s.pricelist_id = pp.id)
                    left join currency_rate cr on (cr.currency_id = pp.currency_id and
                        cr.company_id = s.company_id and
                        cr.date_start <= coalesce(s.date_order, now()) and
                        (cr.date_end is null or cr.date_end > coalesce(s.date_order, now())))
        """
        return from_str

    def _group_by(self):
        group_by_str = """
            GROUP BY l.product_id,
                    l.order_id,
                    t.uom_id,
                    t.categ_id,
                    s.name,
                    s.date_order,
                    s.confirmation_date,
                    s.partner_id,
                    s.user_id,
                    s.state,
                    s.company_id,
                    s.branch_id,
                    s.pricelist_id,
                    s.analytic_account_id,
                    s.team_id,
                    p.product_tmpl_id,
                    partner.country_id,
                    partner.commercial_partner_id
        """
        return group_by_str

    @api.model_cr
    def init(self):
        # self._table = sale_report
        tools.drop_view_if_exists(self.env.cr, self._table)
        self.env.cr.execute("""CREATE or REPLACE VIEW %s as (
            %s
            FROM ( %s )
            %s
            )""" % (self._table, self._select(), self._from(), self._group_by()))
Example #20
0
class HelpdeskTicket(models.Model):
    _name = 'helpdesk.ticket'
    _inherit = [
        'mail.thread.cc', 'mail.thread', 'mail.activity.mixin', 'utm.mixin',
        'portal.mixin', 'rating.mixin'
    ]

    _description = 'Helpdesk Ticket'
    _rec_name = 'issue_name'

    def _default_team_id(self):
        team_id = self.env['helpdesk.team'].search(
            [('visibility_member_ids', 'in', self.env.uid)], limit=1).id
        if not team_id:
            team_id = self.env['helpdesk.team'].search([], limit=1).id
        return team_id

    uid = fields.Many2one('res.users', default=lambda self: self.env.uid)
    issue_name = fields.Char(string='Subject',
                             required=True,
                             index=True,
                             tracking=True)
    team_id = fields.Many2one('helpdesk.team',
                              string='Helpdesk Team',
                              default=_default_team_id,
                              tracking=True)
    help_description = fields.Text()
    active = fields.Boolean(default=True)
    tag_ids = fields.Many2many('helpdesk.tag', string='Tags')
    company_id = fields.Many2one(related='team_id.company_id',
                                 string='Company',
                                 store=True,
                                 readonly=True)
    user_id = fields.Many2one('res.users', string='Assigned to', tracking=True)
    color = fields.Integer(string='Color Index')
    ticket_seq = fields.Char('Ticket No', default='New', copy=False)
    priority = fields.Selection([('1', 'Low'), ('2', 'Medium'), ('3', 'High')],
                                default='1')
    partner_id = fields.Many2one('res.partner',
                                 string='Related Partner',
                                 tracking=True)
    partner_name = fields.Char('Customer Name')
    email = fields.Char(string='Email')
    issue_type_id = fields.Many2one('issue.type',
                                    string='Issue Type',
                                    store=True)
    start_date = fields.Datetime(string='Ticket Created Date',
                                 default=fields.Datetime.now,
                                 tracking=True)
    end_date = fields.Datetime(string='Ticket Close Date', tracking=True)
    attachment_ids = fields.One2many(
        'ir.attachment',
        compute='_compute_attachments',
        string="Main Attachments",
        help="Attachment that don't come from message.")
    attachments_count = fields.Integer(compute='_compute_attachments',
                                       string='Add Attachments')
    is_accessible = fields.Boolean('Is Accessible',
                                   compute='_compute_is_accessible')
    is_assigned = fields.Boolean('Is Asigned',
                                 compute='_compute_is_accessible')
    stage_id = fields.Many2one('helpdesk.stage',
                               string='Stage',
                               index=True,
                               tracking=True,
                               readonly=False,
                               store=True,
                               copy=False,
                               ondelete='restrict')

    feedback = fields.Text('Comment', help="Reason of the rating")

    rating_last_value = fields.Float('Rating Last Value',
                                     groups='base.group_user',
                                     compute='_compute_rating_last_value',
                                     compute_sudo=True,
                                     store=True)
    is_rating = fields.Boolean("Is Rating")

    def action_assign_to_me(self):
        if not self.user_id:
            self.user_id = self.env.user

    def _merge_ticket_attachments(self, tickets):
        self.ensure_one()

        def _get_attachments(ticket_id):
            return self.env['ir.attachment'].search([
                ('res_model', '=', self._name), ('res_id', '=', ticket_id)
            ])

        first_attachments = _get_attachments(self.id)
        count = 1
        for ticket in tickets:
            attachments = _get_attachments(ticket.id)
            for attachment in attachments:
                values = {'res_id': self.id}
                for attachment_in_first in first_attachments:
                    if attachment.name == attachment_in_first.name:
                        values['name'] = "%s (%s)" % (attachment.name, count)
                count += 1
                attachment.write(values)
        return True

    @api.depends('user_id')
    def _compute_team_id(self):
        for team in self:
            if not team.user_id:
                continue
            user = team.user_id
            if team.team_id and user in team.team_id.member_ids | team.team_id.user_id:
                continue
            team = self.env['helpdesk.team']._get_default_team_id(
                user_id=user.id)
            team.team_id = team.id

    @api.model
    def message_new(self, msg_dict, custom_values=None):
        if self.env.user.has_group('base.group_portal'):
            self = self.with_context(default_user_id=False)
        if self._uid == self.env.ref('base.user_root').id:
            self = self.with_context(default_user_id=False)

        if custom_values is None:
            custom_values = {}
        defaults = {
            'issue_name': msg_dict.get('subject') or _("No Subject"),
            'email': msg_dict.get('from'),
            'partner_id': msg_dict.get('author_id', False),
            'team_id': custom_values.get('team_id', False),
        }

        if 'company_id' not in defaults and 'team_id' in defaults:
            defaults['company_id'] = self.env['helpdesk.team'].browse(
                defaults['team_id']).company_id.id
        return super(HelpdeskTicket, self).message_new(msg_dict,
                                                       custom_values=defaults)

    @api.model
    def _default_access_token(self):
        return uuid.uuid4().hex

    access_token = fields.Char('Access Token', default=_default_access_token)

    @api.model
    def default_get(self, default_fields):
        vals = super(HelpdeskTicket, self).default_get(default_fields)
        if 'team_id' not in default_fields:
            team_id = self._default_team_id()
            vals.update({'team_id': team_id})
        if 'team_id' in vals:
            user_dict = {}
            team_id = self.env['helpdesk.team'].search([("id", "=",
                                                         vals['team_id'])])
            if team_id.assignment_method == 'balanced':
                for rec in team_id.member_ids.ids:
                    ticket = self.env['helpdesk.ticket'].search_count([
                        ('team_id', '=', team_id.id), ('user_id', '=', rec)
                    ])
                    user_dict.update({rec: ticket})
                temp = min(user_dict.values())
                res = [key for key in user_dict if user_dict[key] == temp]
                vals['user_id'] = res[0]

            if team_id.assignment_method == 'random':
                for member in team_id.member_ids:
                    vals['user_id'] = member.id
        return vals

    @api.onchange('stage_id')
    def onchange_end_date(self):
        if self.stage_id.stage_type == 'done':
            self.end_date = datetime.today()

    def _creation_subtype(self):
        return self.env.ref('helpdesk_basic.mt_ticket_new')

    def _track_subtype(self, init_values):
        self.ensure_one()
        if 'user_id' in init_values and self.user_id:
            return self.env.ref('helpdesk_basic.mt_ticket_new')
        elif 'stage_id' in init_values and self.stage_id and \
                self.stage_id.sequence <= 1:
            return self.env.ref('helpdesk_basic.mt_ticket_new')
        elif 'stage_id' in init_values:
            return self.env.ref('helpdesk_basic.mt_ticket_stage')
        return super(HelpdeskTicket, self)._track_subtype(init_values)

    def _message_get_suggested_recipients(self):
        recipients = super(HelpdeskTicket,
                           self)._message_get_suggested_recipients()
        try:
            for ticket in self:
                if ticket.team_id.message_follower_ids:
                    ticket.sudo()._message_add_suggested_recipient(
                        recipients,
                        partner=ticket.partner_id,
                        reason=_('Customer'))
                elif ticket.email:
                    ticket.sudo()._message_add_suggested_recipient(
                        recipients,
                        email=ticket.email,
                        reason=_('Customer Email'))
        except AccessError:
            pass
        return recipients

    def get_valid_email(self, msg):
        emails_list = []
        valid_email = tools.email_split((msg.get('to') or '') + ',' +
                                        (msg.get('cc') or ''))
        team_aliases = self.mapped('team_id.alias_name')
        for eml in valid_email:
            if eml.split('@')[0] not in team_aliases:
                emails_list += [eml]
        return emails_list

    @api.model_create_multi
    def create(self, values):
        for vals in values:
            if not vals.get('ticket_seq') or vals['ticket_seq'] == _('New'):
                vals['ticket_seq'] = self.env['ir.sequence'].next_by_code(
                    'helpdesk.ticket') or _('New')

            partner_id = vals.get('partner_id', False)
            partner_name = vals.get('email', False)
            partner_email = vals.get('email', False)
            if partner_email and partner_name and not partner_id:
                try:
                    vals['partner_id'] = self.env[
                        'res.partner'].find_or_create(
                            self.get_valid_email({
                                'to': partner_email,
                                'cc': ''
                            })[0]).id
                except UnicodeEncodeError:

                    vals['partner_id'] = self.env['res.partner'].create({
                        'name':
                        partner_name,
                        'email':
                        partner_email,
                    }).id

        partners = self.env['res.partner'].browse([
            vals['partner_id'] for vals in values if 'partner_id' in vals
            and vals.get('partner_id') and 'email' not in vals
        ])
        partner_email_map = {partner.id: partner.email for partner in partners}
        partner_name_map = {partner.id: partner.name for partner in partners}

        for vals in values:
            if vals.get('issue_type_id'):
                team = self.env['helpdesk.team'].search([
                    ('issue_type_ids', '=', int(vals.get('issue_type_id')))
                ])
                if team:
                    vals.update({'team_id': team.id})
            if vals.get('partner_id') in partner_name_map:
                vals['partner_name'] = partner_name_map.get(vals['partner_id'])

            if vals.get('team_id'):
                team = self.team_id.browse(vals.get('team_id'))
                vals.update({
                    'stage_id':
                    team.stage_ids and team.stage_ids[0].id or False
                })
            if not self.stage_id and self.team_id and self.team_id.stage_ids:
                self.stage_id = self.team_id.stage_ids[0]

        tickets = super(HelpdeskTicket, self).create(values)
        for ticket in tickets:
            if ticket.partner_id:
                ticket.message_subscribe(partner_ids=ticket.partner_id.ids)

        return tickets

    def _track_template(self, changes):
        res = super(HelpdeskTicket, self)._track_template(changes)
        stage_id = self.env['helpdesk.stage'].search([])
        ticket = self[0]
        if ticket.stage_id.stage_type == 'draft':
            if 'stage_id' in changes and ticket.team_id.mail_template_id:
                res['team_id'] = (ticket.team_id.mail_template_id, {
                    'auto_delete_message':
                    True,
                    'subtype_id':
                    self.env['ir.model.data'].xmlid_to_res_id('mail.mt_note'),
                    'email_layout_xmlid':
                    'mail.mail_notification_light'
                })
        if ticket.stage_id.stage_type == 'done':
            if 'stage_id' in changes and ticket.team_id.mail_close_tmpl_id:
                res['team_id'] = (ticket.team_id.mail_close_tmpl_id, {
                    'auto_delete_message':
                    True,
                    'subtype_id':
                    self.env['ir.model.data'].xmlid_to_res_id('mail.mt_note'),
                    'email_layout_xmlid':
                    'mail.mail_notification_light'
                })
        return res

    @api.onchange('partner_id')
    def onchange_partner_id(self):
        if self.partner_id:
            self.email = self.partner_id.email

    @api.onchange('team_id')
    def onchange_team_id(self):
        self.user_id = False
        if self.team_id:
            self.stage_id = \
                self.team_id.stage_ids and self.team_id.stage_ids.ids[0]

    def _compute_is_accessible(self):
        has_group = self.env.user.has_group('base.group_no_one')
        for ticket in self:
            if self.env.user.partner_id.id == ticket.partner_id.id or \
                    has_group:
                ticket.is_accessible = True
            if self.env.user.id == ticket.user_id.id or has_group:
                ticket.is_assigned = True

    def _compute_attachments(self):
        for ticket in self:
            attachment_ids = self.env['ir.attachment'].search([
                ('res_model', '=', ticket._name), ('res_id', '=', ticket.id)
            ])
            ticket.attachments_count = len(attachment_ids.ids)
            ticket.attachment_ids = attachment_ids

    def _auto_rating_request_mail(self):
        ticket_ids = self.env['helpdesk.ticket'].search([])
        for ticket in ticket_ids.filtered(
                lambda r: r.stage_id.stage_type == 'done' and r.team_id.
                is_rating == True):
            template = self.env.ref(
                'helpdesk_basic.ticket_rating_mail_template')
            if ticket.is_rating != True:
                template.send_mail(res_id=ticket.id, force_send=True)
            ticket.is_rating = True
Example #21
0
class ResPartner(models.Model):
    _name = 'res.partner'
    _inherit = 'res.partner'

    @api.multi
    def _credit_debit_get(self):
        tables, where_clause, where_params = self.env[
            'account.move.line']._query_get()
        where_params = [tuple(self.ids)] + where_params
        if where_clause:
            where_clause = 'AND ' + where_clause
        self._cr.execute(
            """SELECT account_move_line.partner_id, act.type, SUM(account_move_line.amount_residual)
                      FROM account_move_line
                      LEFT JOIN account_account a ON (account_move_line.account_id=a.id)
                      LEFT JOIN account_account_type act ON (a.user_type_id=act.id)
                      WHERE act.type IN ('receivable','payable')
                      AND account_move_line.partner_id IN %s
                      AND account_move_line.reconciled IS FALSE
                      """ + where_clause + """
                      GROUP BY account_move_line.partner_id, act.type
                      """, where_params)
        for pid, type, val in self._cr.fetchall():
            partner = self.browse(pid)
            if type == 'receivable':
                partner.credit = val
            elif type == 'payable':
                partner.debit = -val

    @api.multi
    def _asset_difference_search(self, account_type, operator, operand):
        if operator not in ('<', '=', '>', '>=', '<='):
            return []
        if type(operand) not in (float, int):
            return []
        sign = 1
        if account_type == 'payable':
            sign = -1
        res = self._cr.execute(
            '''
            SELECT partner.id
            FROM res_partner partner
            LEFT JOIN account_move_line aml ON aml.partner_id = partner.id
            RIGHT JOIN account_account acc ON aml.account_id = acc.id
            WHERE acc.internal_type = %s
              AND NOT acc.deprecated
            GROUP BY partner.id
            HAVING %s * COALESCE(SUM(aml.amount_residual), 0) ''' + operator +
            ''' %s''', (account_type, sign, operand))
        res = self._cr.fetchall()
        if not res:
            return [('id', '=', '0')]
        return [('id', 'in', [r[0] for r in res])]

    @api.model
    def _credit_search(self, operator, operand):
        return self._asset_difference_search('receivable', operator, operand)

    @api.model
    def _debit_search(self, operator, operand):
        return self._asset_difference_search('payable', operator, operand)

    @api.multi
    def _invoice_total(self):
        account_invoice_report = self.env['account.invoice.report']
        if not self.ids:
            self.total_invoiced = 0.0
            return True

        user_currency_id = self.env.user.company_id.currency_id.id
        all_partners_and_children = {}
        all_partner_ids = []
        for partner in self:
            # price_total is in the company currency
            all_partners_and_children[partner] = self.with_context(
                active_test=False).search([('id', 'child_of', partner.id)]).ids
            all_partner_ids += all_partners_and_children[partner]

        # searching account.invoice.report via the orm is comparatively expensive
        # (generates queries "id in []" forcing to build the full table).
        # In simple cases where all invoices are in the same currency than the user's company
        # access directly these elements

        # generate where clause to include multicompany rules
        where_query = account_invoice_report._where_calc([
            ('partner_id', 'in', all_partner_ids),
            ('state', 'not in', ['draft', 'cancel']),
            ('type', 'in', ('out_invoice', 'out_refund'))
        ])
        account_invoice_report._apply_ir_rules(where_query, 'read')
        from_clause, where_clause, where_clause_params = where_query.get_sql()

        # price_total is in the company currency
        query = """
                  SELECT SUM(price_total) as total, partner_id
                    FROM account_invoice_report account_invoice_report
                   WHERE %s
                   GROUP BY partner_id
                """ % where_clause
        self.env.cr.execute(query, where_clause_params)
        price_totals = self.env.cr.dictfetchall()
        for partner, child_ids in all_partners_and_children.items():
            partner.total_invoiced = sum(price['total']
                                         for price in price_totals
                                         if price['partner_id'] in child_ids)

    @api.multi
    def _compute_journal_item_count(self):
        AccountMoveLine = self.env['account.move.line']
        for partner in self:
            partner.journal_item_count = AccountMoveLine.search_count([
                ('partner_id', '=', partner.id)
            ])

    @api.multi
    def _compute_contracts_count(self):
        AccountAnalyticAccount = self.env['account.analytic.account']
        for partner in self:
            partner.contracts_count = AccountAnalyticAccount.search_count([
                ('partner_id', '=', partner.id)
            ])

    def get_followup_lines_domain(self,
                                  date,
                                  overdue_only=False,
                                  only_unblocked=False):
        domain = [('reconciled', '=', False),
                  ('account_id.deprecated', '=', False),
                  ('account_id.internal_type', '=', 'receivable'), '|',
                  ('debit', '!=', 0), ('credit', '!=', 0),
                  ('company_id', '=', self.env.user.company_id.id)]
        if only_unblocked:
            domain += [('blocked', '=', False)]
        if self.ids:
            if 'exclude_given_ids' in self._context:
                domain += [('partner_id', 'not in', self.ids)]
            else:
                domain += [('partner_id', 'in', self.ids)]
        #adding the overdue lines
        overdue_domain = [
            '|', '&', ('date_maturity', '!=', False),
            ('date_maturity', '<', date), '&', ('date_maturity', '=', False),
            ('date', '<', date)
        ]
        if overdue_only:
            domain += overdue_domain
        return domain

    @api.one
    def _compute_has_unreconciled_entries(self):
        # Avoid useless work if has_unreconciled_entries is not relevant for this partner
        if not self.active or not self.is_company and self.parent_id:
            return
        self.env.cr.execute(
            """ SELECT 1 FROM(
                    SELECT
                        p.last_time_entries_checked AS last_time_entries_checked,
                        MAX(l.write_date) AS max_date
                    FROM
                        account_move_line l
                        RIGHT JOIN account_account a ON (a.id = l.account_id)
                        RIGHT JOIN res_partner p ON (l.partner_id = p.id)
                    WHERE
                        p.id = %s
                        AND EXISTS (
                            SELECT 1
                            FROM account_move_line l
                            WHERE l.account_id = a.id
                            AND l.partner_id = p.id
                            AND l.amount_residual > 0
                        )
                        AND EXISTS (
                            SELECT 1
                            FROM account_move_line l
                            WHERE l.account_id = a.id
                            AND l.partner_id = p.id
                            AND l.amount_residual < 0
                        )
                    GROUP BY p.last_time_entries_checked
                ) as s
                WHERE (last_time_entries_checked IS NULL OR max_date > last_time_entries_checked)
            """, (self.id, ))
        self.has_unreconciled_entries = self.env.cr.rowcount == 1

    @api.multi
    def mark_as_reconciled(self):
        self.env['account.partial.reconcile'].check_access_rights('write')
        return self.sudo().with_context(
            company_id=self.env.user.company_id.id).write({
                'last_time_entries_checked':
                time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
            })

    @api.one
    def _get_company_currency(self):
        if self.company_id:
            self.currency_id = self.sudo().company_id.currency_id
        else:
            self.currency_id = self.env.user.company_id.currency_id

    credit = fields.Monetary(compute='_credit_debit_get',
                             search=_credit_search,
                             string='Total Receivable',
                             help="Total amount this customer owes you.")
    debit = fields.Monetary(
        compute='_credit_debit_get',
        search=_debit_search,
        string='Total Payable',
        help="Total amount you have to pay to this vendor.")
    debit_limit = fields.Monetary('Payable Limit')
    total_invoiced = fields.Monetary(compute='_invoice_total',
                                     string="Total Invoiced",
                                     groups='account.group_account_invoice')
    currency_id = fields.Many2one(
        'res.currency',
        compute='_get_company_currency',
        readonly=True,
        string="Currency",
        help='Utility field to express amount currency')
    contracts_count = fields.Integer(compute='_compute_contracts_count',
                                     string="Contracts",
                                     type='integer')
    journal_item_count = fields.Integer(compute='_compute_journal_item_count',
                                        string="Journal Items",
                                        type="integer")
    property_account_payable_id = fields.Many2one(
        'account.account',
        company_dependent=True,
        string="Account Payable",
        oldname="property_account_payable",
        domain=
        "[('internal_type', '=', 'payable'), ('deprecated', '=', False)]",
        help=
        "This account will be used instead of the default one as the payable account for the current partner",
        required=True)
    property_account_receivable_id = fields.Many2one(
        'account.account',
        company_dependent=True,
        string="Account Receivable",
        oldname="property_account_receivable",
        domain=
        "[('internal_type', '=', 'receivable'), ('deprecated', '=', False)]",
        help=
        "This account will be used instead of the default one as the receivable account for the current partner",
        required=True)
    property_account_position_id = fields.Many2one(
        'account.fiscal.position',
        company_dependent=True,
        string="Fiscal Position",
        help=
        "The fiscal position will determine taxes and accounts used for the partner.",
        oldname="property_account_position")
    property_payment_term_id = fields.Many2one(
        'account.payment.term',
        company_dependent=True,
        string='Customer Payment Terms',
        help=
        "This payment term will be used instead of the default one for sales orders and customer invoices",
        oldname="property_payment_term")
    property_supplier_payment_term_id = fields.Many2one(
        'account.payment.term',
        company_dependent=True,
        string='Vendor Payment Terms',
        help=
        "This payment term will be used instead of the default one for purchase orders and vendor bills",
        oldname="property_supplier_payment_term")
    ref_company_ids = fields.One2many(
        'res.company',
        'partner_id',
        string='Companies that refers to partner',
        oldname="ref_companies")
    has_unreconciled_entries = fields.Boolean(
        compute='_compute_has_unreconciled_entries',
        help=
        "The partner has at least one unreconciled debit and credit since last time the invoices & payments matching was performed."
    )
    last_time_entries_checked = fields.Datetime(
        oldname='last_reconciliation_date',
        string='Latest Invoices & Payments Matching Date',
        readonly=True,
        copy=False,
        help=
        'Last time the invoices & payments matching was performed for this partner. '
        'It is set either if there\'s not at least an unreconciled debit and an unreconciled credit '
        'or if you click the "Done" button.')
    invoice_ids = fields.One2many('account.invoice',
                                  'partner_id',
                                  string='Invoices',
                                  readonly=True,
                                  copy=False)
    contract_ids = fields.One2many('account.analytic.account',
                                   'partner_id',
                                   string='Contracts',
                                   readonly=True)
    bank_account_count = fields.Integer(compute='_compute_bank_count',
                                        string="Bank")
    trust = fields.Selection([('good', 'Good Debtor'),
                              ('normal', 'Normal Debtor'),
                              ('bad', 'Bad Debtor')],
                             string='Degree of trust you have in this debtor',
                             default='normal',
                             company_dependent=True)
    invoice_warn = fields.Selection(WARNING_MESSAGE,
                                    'Invoice',
                                    help=WARNING_HELP,
                                    required=True,
                                    default="no-message")
    invoice_warn_msg = fields.Text('Message for Invoice')

    @api.multi
    def _compute_bank_count(self):
        bank_data = self.env['res.partner.bank'].read_group(
            [('partner_id', 'in', self.ids)], ['partner_id'], ['partner_id'])
        mapped_data = dict([(bank['partner_id'][0], bank['partner_id_count'])
                            for bank in bank_data])
        for partner in self:
            partner.bank_account_count = mapped_data.get(partner.id, 0)

    def _find_accounting_partner(self, partner):
        ''' Find the partner for which the accounting entries will be created '''
        return partner.commercial_partner_id

    @api.model
    def _commercial_fields(self):
        return super(ResPartner, self)._commercial_fields() + \
            ['debit_limit', 'property_account_payable_id', 'property_account_receivable_id', 'property_account_position_id',
             'property_payment_term_id', 'property_supplier_payment_term_id', 'last_time_entries_checked']

    @api.multi
    def action_view_partner_invoices(self):
        self.ensure_one()
        action = self.env.ref(
            'account.action_invoice_refund_out_tree').read()[0]
        action['domain'] = literal_eval(action['domain'])
        action['domain'].append(('partner_id', 'child_of', self.id))
        return action

    @api.onchange('company_id')
    def _onchange_company_id(self):
        company = self.env['res.company']
        if self.company_id:
            company = self.company_id
        else:
            company = self.env.user.company_id
        return {
            'domain': {
                'property_account_position_id':
                [('company_id', 'in', [company.id, False])]
            }
        }
Example #22
0
class Property(models.Model):
    _name = 'ir.property'
    _description = 'Company Property'

    name = fields.Char(index=True)
    res_id = fields.Char(
        string='Resource',
        index=True,
        help="If not set, acts as a default value for new resources",
    )
    company_id = fields.Many2one('res.company', string='Company', index=True)
    fields_id = fields.Many2one('ir.model.fields',
                                string='Field',
                                ondelete='cascade',
                                required=True)
    value_float = fields.Float()
    value_integer = fields.Integer()
    value_text = fields.Text()  # will contain (char, text)
    value_binary = fields.Binary(attachment=False)
    value_reference = fields.Char()
    value_datetime = fields.Datetime()
    type = fields.Selection([
        ('char', 'Char'),
        ('float', 'Float'),
        ('boolean', 'Boolean'),
        ('integer', 'Integer'),
        ('text', 'Text'),
        ('binary', 'Binary'),
        ('many2one', 'Many2One'),
        ('date', 'Date'),
        ('datetime', 'DateTime'),
        ('selection', 'Selection'),
    ],
                            required=True,
                            default='many2one',
                            index=True)

    def init(self):
        # Ensure there is at most one active variant for each combination.
        query = """
            CREATE UNIQUE INDEX IF NOT EXISTS ir_property_unique_index
            ON %s (fields_id, COALESCE(company_id, 0), COALESCE(res_id, ''))
        """
        self.env.cr.execute(query % self._table)

    def _update_values(self, values):
        if 'value' not in values:
            return values
        value = values.pop('value')

        prop = None
        type_ = values.get('type')
        if not type_:
            if self:
                prop = self[0]
                type_ = prop.type
            else:
                type_ = self._fields['type'].default(self)

        field = TYPE2FIELD.get(type_)
        if not field:
            raise UserError(_('Invalid type'))

        if field == 'value_reference':
            if not value:
                value = False
            elif isinstance(value, models.BaseModel):
                value = '%s,%d' % (value._name, value.id)
            elif isinstance(value, int):
                field_id = values.get('fields_id')
                if not field_id:
                    if not prop:
                        raise ValueError()
                    field_id = prop.fields_id
                else:
                    field_id = self.env['ir.model.fields'].browse(field_id)

                value = '%s,%d' % (field_id.sudo().relation, value)

        values[field] = value
        return values

    def write(self, values):
        # if any of the records we're writing on has a res_id=False *or*
        # we're writing a res_id=False on any record
        default_set = False
        if self._ids:
            self.env.cr.execute(
                'SELECT EXISTS (SELECT 1 FROM ir_property WHERE id in %s AND res_id IS NULL)',
                [self._ids])
            default_set = self.env.cr.rowcount == 1 or any(
                v.get('res_id') is False for v in values)
        r = super(Property, self).write(self._update_values(values))
        if default_set:
            # DLE P44: test `test_27_company_dependent`
            # Easy solution, need to flush write when changing a property.
            # Maybe it would be better to be able to compute all impacted cache value and update those instead
            # Then clear_caches must be removed as well.
            self.flush()
            self.clear_caches()
        return r

    @api.model_create_multi
    def create(self, vals_list):
        vals_list = [self._update_values(vals) for vals in vals_list]
        created_default = any(not v.get('res_id') for v in vals_list)
        r = super(Property, self).create(vals_list)
        if created_default:
            # DLE P44: test `test_27_company_dependent`
            self.flush()
            self.clear_caches()
        return r

    def unlink(self):
        default_deleted = False
        if self._ids:
            self.env.cr.execute(
                'SELECT EXISTS (SELECT 1 FROM ir_property WHERE id in %s)',
                [self._ids])
            default_deleted = self.env.cr.rowcount == 1
        r = super().unlink()
        if default_deleted:
            self.clear_caches()
        return r

    def get_by_record(self):
        self.ensure_one()
        if self.type in ('char', 'text', 'selection'):
            return self.value_text
        elif self.type == 'float':
            return self.value_float
        elif self.type == 'boolean':
            return bool(self.value_integer)
        elif self.type == 'integer':
            return self.value_integer
        elif self.type == 'binary':
            return self.value_binary
        elif self.type == 'many2one':
            if not self.value_reference:
                return False
            model, resource_id = self.value_reference.split(',')
            return self.env[model].browse(int(resource_id)).exists()
        elif self.type == 'datetime':
            return self.value_datetime
        elif self.type == 'date':
            if not self.value_datetime:
                return False
            return fields.Date.to_string(
                fields.Datetime.from_string(self.value_datetime))
        return False

    @api.model
    def _set_default(self, name, model, value, company=False):
        """ Set the given field's generic value for the given company.

        :param name: the field's name
        :param model: the field's model name
        :param value: the field's value
        :param company: the company (record or id)
        """
        field_id = self.env['ir.model.fields']._get(model, name).id
        company_id = int(company) if company else False
        prop = self.sudo().search([
            ('fields_id', '=', field_id),
            ('company_id', '=', company_id),
            ('res_id', '=', False),
        ])
        if prop:
            prop.write({'value': value})
        else:
            prop.create({
                'fields_id': field_id,
                'company_id': company_id,
                'res_id': False,
                'name': name,
                'value': value,
                'type': self.env[model]._fields[name].type,
            })

    @api.model
    def _get(self, name, model, res_id=False):
        """ Get the given field's generic value for the record.

        :param name: the field's name
        :param model: the field's model name
        :param res_id: optional resource, format: "<id>" (int) or
                       "<model>,<id>" (str)
        """
        if not res_id:
            t, v = self._get_default_property(name, model)
            if not v or t != 'many2one':
                return v
            return self.env[v[0]].browse(v[1])

        p = self._get_property(name, model, res_id=res_id)
        if p:
            return p.get_by_record()
        return False

    # only cache Property._get(res_id=False) as that's
    # sub-optimally.
    COMPANY_KEY = "self.env.company.id"

    @ormcache(COMPANY_KEY, 'name', 'model')
    def _get_default_property(self, name, model):
        prop = self._get_property(name, model, res_id=False)
        if not prop:
            return None, False
        v = prop.get_by_record()
        if prop.type != 'many2one':
            return prop.type, v
        return 'many2one', v and (v._name, v.id)

    def _get_property(self, name, model, res_id):
        domain = self._get_domain(name, model)
        if domain is not None:
            if res_id and isinstance(res_id, int):
                res_id = "%s,%s" % (model, res_id)
            domain = [('res_id', '=', res_id)] + domain
            #make the search with company_id asc to make sure that properties specific to a company are given first
            return self.sudo().search(domain, limit=1, order='company_id')
        return self.sudo().browse(())

    def _get_domain(self, prop_name, model):
        field_id = self.env['ir.model.fields']._get(model, prop_name).id
        if not field_id:
            return None
        company_id = self.env.company.id
        return [('fields_id', '=', field_id),
                ('company_id', 'in', [company_id, False])]

    @api.model
    def _get_multi(self, name, model, ids):
        """ Read the property field `name` for the records of model `model` with
            the given `ids`, and return a dictionary mapping `ids` to their
            corresponding value.
        """
        if not ids:
            return {}

        field = self.env[model]._fields[name]
        field_id = self.env['ir.model.fields']._get(model, name).id
        company_id = self.env.company.id

        if field.type == 'many2one':
            comodel = self.env[field.comodel_name]
            model_pos = len(model) + 2
            value_pos = len(comodel._name) + 2
            # retrieve values: both p.res_id and p.value_reference are formatted
            # as "<rec._name>,<rec.id>"; the purpose of the LEFT JOIN is to
            # return the value id if it exists, NULL otherwise
            query = """
                SELECT substr(p.res_id, %s)::integer, r.id
                FROM ir_property p
                LEFT JOIN {} r ON substr(p.value_reference, %s)::integer=r.id
                WHERE p.fields_id=%s
                    AND (p.company_id=%s OR p.company_id IS NULL)
                    AND (p.res_id IN %s OR p.res_id IS NULL)
                ORDER BY p.company_id NULLS FIRST
            """.format(comodel._table)
            params = [model_pos, value_pos, field_id, company_id]
            clean = comodel.browse

        elif field.type in TYPE2FIELD:
            model_pos = len(model) + 2
            # retrieve values: p.res_id is formatted as "<rec._name>,<rec.id>"
            query = """
                SELECT substr(p.res_id, %s)::integer, p.{}
                FROM ir_property p
                WHERE p.fields_id=%s
                    AND (p.company_id=%s OR p.company_id IS NULL)
                    AND (p.res_id IN %s OR p.res_id IS NULL)
                ORDER BY p.company_id NULLS FIRST
            """.format(TYPE2FIELD[field.type])
            params = [model_pos, field_id, company_id]
            clean = TYPE2CLEAN[field.type]

        else:
            return dict.fromkeys(ids, False)

        # retrieve values
        cr = self.env.cr
        result = {}
        refs = {"%s,%s" % (model, id) for id in ids}
        for sub_refs in cr.split_for_in_conditions(refs):
            cr.execute(query, params + [sub_refs])
            result.update(cr.fetchall())

        # determine all values and format them
        default = result.get(None, None)
        return {id: clean(result.get(id, default)) for id in ids}

    @api.model
    def _set_multi(self, name, model, values, default_value=None):
        """ Assign the property field `name` for the records of model `model`
            with `values` (dictionary mapping record ids to their value).
            If the value for a given record is the same as the default
            value, the property entry will not be stored, to avoid bloating
            the database.
            If `default_value` is provided, that value will be used instead
            of the computed default value, to determine whether the value
            for a record should be stored or not.
        """
        def clean(value):
            return value.id if isinstance(value, models.BaseModel) else value

        if not values:
            return

        if default_value is None:
            domain = self._get_domain(name, model)
            if domain is None:
                raise Exception()
            # retrieve the default value for the field
            default_value = clean(self._get(name, model))

        # retrieve the properties corresponding to the given record ids
        field_id = self.env['ir.model.fields']._get(model, name).id
        company_id = self.env.company.id
        refs = {('%s,%s' % (model, id)): id for id in values}
        props = self.sudo().search([
            ('fields_id', '=', field_id),
            ('company_id', '=', company_id),
            ('res_id', 'in', list(refs)),
        ])

        # modify existing properties
        for prop in props:
            id = refs.pop(prop.res_id)
            value = clean(values[id])
            if value == default_value:
                # avoid prop.unlink(), as it clears the record cache that can
                # contain the value of other properties to set on record!
                self._cr.execute("DELETE FROM ir_property WHERE id=%s",
                                 [prop.id])
            elif value != clean(prop.get_by_record()):
                prop.write({'value': value})

        # create new properties for records that do not have one yet
        vals_list = []
        for ref, id in refs.items():
            value = clean(values[id])
            if value != default_value:
                vals_list.append({
                    'fields_id': field_id,
                    'company_id': company_id,
                    'res_id': ref,
                    'name': name,
                    'value': value,
                    'type': self.env[model]._fields[name].type,
                })
        self.sudo().create(vals_list)

    @api.model
    def search_multi(self, name, model, operator, value):
        """ Return a domain for the records that match the given condition. """
        default_matches = False
        negate = False

        # For "is set" and "is not set", same logic for all types
        if operator == 'in' and False in value:
            operator = 'not in'
            negate = True
        elif operator == 'not in' and False not in value:
            operator = 'in'
            negate = True
        elif operator in ('!=', 'not like', 'not ilike') and value:
            operator = TERM_OPERATORS_NEGATION[operator]
            negate = True
        elif operator == '=' and not value:
            operator = '!='
            negate = True

        field = self.env[model]._fields[name]

        if field.type == 'many2one':

            def makeref(value):
                return value and f'{field.comodel_name},{value}'

            if operator in ('=', '!=', '<=', '<', '>', '>='):
                value = makeref(value)
            elif operator in ('in', 'not in'):
                value = [makeref(v) for v in value]
            elif operator in ('=like', '=ilike', 'like', 'not like', 'ilike',
                              'not ilike'):
                # most probably inefficient... but correct
                target = self.env[field.comodel_name]
                target_names = target.name_search(value,
                                                  operator=operator,
                                                  limit=None)
                target_ids = [n[0] for n in target_names]
                operator, value = 'in', [makeref(v) for v in target_ids]

        elif field.type in ('integer', 'float'):
            # No record is created in ir.property if the field's type is float or integer with a value
            # equal to 0. Then to match with the records that are linked to a property field equal to 0,
            # the negation of the operator must be taken  to compute the goods and the domain returned
            # to match the searched records is just the opposite.
            value = float(value) if field.type == 'float' else int(value)
            if operator == '>=' and value <= 0:
                operator = '<'
                negate = True
            elif operator == '>' and value < 0:
                operator = '<='
                negate = True
            elif operator == '<=' and value >= 0:
                operator = '>'
                negate = True
            elif operator == '<' and value > 0:
                operator = '>='
                negate = True

        elif field.type == 'boolean':
            # the value must be mapped to an integer value
            value = int(value)

        # retrieve the properties that match the condition
        domain = self._get_domain(name, model)
        if domain is None:
            raise Exception()
        props = self.search(domain +
                            [(TYPE2FIELD[field.type], operator, value)])

        # retrieve the records corresponding to the properties that match
        good_ids = []
        for prop in props:
            if prop.res_id:
                __, res_id = prop.res_id.split(',')
                good_ids.append(int(res_id))
            else:
                default_matches = True

        if default_matches:
            # exclude all records with a property that does not match
            props = self.search(domain + [('res_id', '!=', False)])
            all_ids = {
                int(res_id.split(',')[1])
                for res_id in props.mapped('res_id')
            }
            bad_ids = list(all_ids - set(good_ids))
            if negate:
                return [('id', 'in', bad_ids)]
            else:
                return [('id', 'not in', bad_ids)]
        elif negate:
            return [('id', 'not in', good_ids)]
        else:
            return [('id', 'in', good_ids)]
Example #23
0
class MassMailingContact(models.Model):
    """Model of a contact. This model is different from the partner model
    because it holds only some basic information: name, email. The purpose is to
    be able to deal with large contact list to email without bloating the partner
    base."""
    _name = 'mail.mass_mailing.contact'
    _inherit = 'mail.thread'
    _description = 'Mass Mailing Contact'
    _order = 'email'
    _rec_name = 'email'

    name = fields.Char()
    company_name = fields.Char(string='Company Name')
    title_id = fields.Many2one('res.partner.title', string='Title')
    email = fields.Char(required=True)
    create_date = fields.Datetime(string='Creation Date')
    list_ids = fields.Many2many('mail.mass_mailing.list',
                                'mail_mass_mailing_contact_list_rel',
                                'contact_id',
                                'list_id',
                                string='Mailing Lists')
    opt_out = fields.Boolean(
        string='Opt Out',
        help=
        'The contact has chosen not to receive mails anymore from this list')
    unsubscription_date = fields.Datetime(string='Unsubscription Date')
    message_bounce = fields.Integer(
        string='Bounced',
        help='Counter of the number of bounced emails for this contact.',
        default=0)
    country_id = fields.Many2one('res.country', string='Country')
    tag_ids = fields.Many2many('res.partner.category', string='Tags')

    @api.model
    def create(self, vals):
        if 'opt_out' in vals:
            vals['unsubscription_date'] = vals[
                'opt_out'] and fields.Datetime.now()
        return super(MassMailingContact, self).create(vals)

    @api.multi
    def write(self, vals):
        if 'opt_out' in vals:
            vals['unsubscription_date'] = vals[
                'opt_out'] and fields.Datetime.now()
        return super(MassMailingContact, self).write(vals)

    def get_name_email(self, name):
        name, email = self.env['res.partner']._parse_partner_name(name)
        if name and not email:
            email = name
        if email and not name:
            name = email
        return name, email

    @api.model
    def name_create(self, name):
        name, email = self.get_name_email(name)
        contact = self.create({'name': name, 'email': email})
        return contact.name_get()[0]

    @api.model
    def add_to_list(self, name, list_id):
        name, email = self.get_name_email(name)
        contact = self.create({
            'name': name,
            'email': email,
            'list_ids': [(4, list_id)]
        })
        return contact.name_get()[0]

    @api.multi
    def message_get_default_recipients(self):
        return dict((record.id, {
            'partner_ids': [],
            'email_to': record.email,
            'email_cc': False
        }) for record in self)
Example #24
0
class IrAttachment(models.Model):
    """Attachments are used to link binary files or url to any openerp document.

    External attachment storage
    ---------------------------

    The computed field ``datas`` is implemented using ``_file_read``,
    ``_file_write`` and ``_file_delete``, which can be overridden to implement
    other storage engines. Such methods should check for other location pseudo
    uri (example: hdfs://hadoopserver).

    The default implementation is the file:dirname location that stores files
    on the local filesystem using name based on their sha1 hash
    """
    _name = 'ir.attachment'
    _order = 'id desc'

    @api.depends('res_model', 'res_id')
    def _compute_res_name(self):
        for attachment in self:
            if attachment.res_model and attachment.res_id:
                record = self.env[attachment.res_model].browse(
                    attachment.res_id)
                attachment.res_name = record.display_name

    @api.model
    def _storage(self):
        return self.env['ir.config_parameter'].sudo().get_param(
            'ir_attachment.location', 'file')

    @api.model
    def _filestore(self):
        return config.filestore(self._cr.dbname)

    @api.model
    def force_storage(self):
        """Force all attachments to be stored in the currently configured storage"""
        if not self.env.user._is_admin():
            raise AccessError(
                _('Only administrators can execute this action.'))

        # domain to retrieve the attachments to migrate
        domain = {
            'db': [('store_fname', '!=', False)],
            'file': [('db_datas', '!=', False)],
        }[self._storage()]

        for attach in self.search(domain):
            attach.write({'datas': attach.datas})
        return True

    @api.model
    def _full_path(self, path):
        # sanitize path
        path = re.sub('[.]', '', path)
        path = path.strip('/\\')
        return os.path.join(self._filestore(), path)

    @api.model
    def _get_path(self, bin_data, sha):
        # retro compatibility
        fname = sha[:3] + '/' + sha
        full_path = self._full_path(fname)
        if os.path.isfile(full_path):
            return fname, full_path  # keep existing path

        # scatter files across 256 dirs
        # we use '/' in the db (even on windows)
        fname = sha[:2] + '/' + sha
        full_path = self._full_path(fname)
        dirname = os.path.dirname(full_path)
        if not os.path.isdir(dirname):
            os.makedirs(dirname)
        return fname, full_path

    @api.model
    def _file_read(self, fname, bin_size=False):
        full_path = self._full_path(fname)
        r = ''
        try:
            if bin_size:
                r = human_size(os.path.getsize(full_path))
            else:
                r = base64.b64encode(open(full_path, 'rb').read())
        except (IOError, OSError):
            _logger.info("_read_file reading %s", full_path, exc_info=True)
        return r

    @api.model
    def _file_write(self, value, checksum):
        bin_value = base64.b64decode(value)
        fname, full_path = self._get_path(bin_value, checksum)
        if not os.path.exists(full_path):
            try:
                with open(full_path, 'wb') as fp:
                    fp.write(bin_value)
                # add fname to checklist, in case the transaction aborts
                self._mark_for_gc(fname)
            except IOError:
                _logger.info("_file_write writing %s",
                             full_path,
                             exc_info=True)
        return fname

    @api.model
    def _file_delete(self, fname):
        # simply add fname to checklist, it will be garbage-collected later
        self._mark_for_gc(fname)

    def _mark_for_gc(self, fname):
        """ Add ``fname`` in a checklist for the filestore garbage collection. """
        # we use a spooldir: add an empty file in the subdirectory 'checklist'
        full_path = os.path.join(self._full_path('checklist'), fname)
        if not os.path.exists(full_path):
            dirname = os.path.dirname(full_path)
            if not os.path.isdir(dirname):
                with tools.ignore(OSError):
                    os.makedirs(dirname)
            open(full_path, 'ab').close()

    @api.model
    def _file_gc(self):
        """ Perform the garbage collection of the filestore. """
        if self._storage() != 'file':
            return

        # Continue in a new transaction. The LOCK statement below must be the
        # first one in the current transaction, otherwise the database snapshot
        # used by it may not contain the most recent changes made to the table
        # ir_attachment! Indeed, if concurrent transactions create attachments,
        # the LOCK statement will wait until those concurrent transactions end.
        # But this transaction will not see the new attachements if it has done
        # other requests before the LOCK (like the method _storage() above).
        cr = self._cr
        cr.commit()

        # prevent all concurrent updates on ir_attachment while collecting!
        cr.execute("LOCK ir_attachment IN SHARE MODE")

        # retrieve the file names from the checklist
        checklist = {}
        for dirpath, _, filenames in os.walk(self._full_path('checklist')):
            dirname = os.path.basename(dirpath)
            for filename in filenames:
                fname = "%s/%s" % (dirname, filename)
                checklist[fname] = os.path.join(dirpath, filename)

        # determine which files to keep among the checklist
        whitelist = set()
        for names in cr.split_for_in_conditions(checklist):
            cr.execute(
                "SELECT store_fname FROM ir_attachment WHERE store_fname IN %s",
                [names])
            whitelist.update(row[0] for row in cr.fetchall())

        # remove garbage files, and clean up checklist
        removed = 0
        for fname, filepath in checklist.items():
            if fname not in whitelist:
                try:
                    os.unlink(self._full_path(fname))
                    removed += 1
                except (OSError, IOError):
                    _logger.info("_file_gc could not unlink %s",
                                 self._full_path(fname),
                                 exc_info=True)
            with tools.ignore(OSError):
                os.unlink(filepath)

        # commit to release the lock
        cr.commit()
        _logger.info("filestore gc %d checked, %d removed", len(checklist),
                     removed)

    @api.depends('store_fname', 'db_datas')
    def _compute_datas(self):
        bin_size = self._context.get('bin_size')
        for attach in self:
            if attach.store_fname:
                attach.datas = self._file_read(attach.store_fname, bin_size)
            else:
                attach.datas = attach.db_datas

    def _inverse_datas(self):
        location = self._storage()
        for attach in self:
            # compute the fields that depend on datas
            value = attach.datas
            bin_data = base64.b64decode(value) if value else b''
            vals = {
                'file_size':
                len(bin_data),
                'checksum':
                self._compute_checksum(bin_data),
                'index_content':
                self._index(bin_data, attach.datas_fname, attach.mimetype),
                'store_fname':
                False,
                'db_datas':
                value,
            }
            if value and location != 'db':
                # save it to the filestore
                vals['store_fname'] = self._file_write(value, vals['checksum'])
                vals['db_datas'] = False

            # take current location in filestore to possibly garbage-collect it
            fname = attach.store_fname
            # write as superuser, as user probably does not have write access
            super(IrAttachment, attach.sudo()).write(vals)
            if fname:
                self._file_delete(fname)

    def _compute_checksum(self, bin_data):
        """ compute the checksum for the given datas
            :param bin_data : datas in its binary form
        """
        # an empty file has a checksum too (for caching)
        return hashlib.sha1(bin_data or b'').hexdigest()

    def _compute_mimetype(self, values):
        """ compute the mimetype of the given values
            :param values : dict of values to create or write an ir_attachment
            :return mime : string indicating the mimetype, or application/octet-stream by default
        """
        mimetype = None
        if values.get('mimetype'):
            mimetype = values['mimetype']
        if not mimetype and values.get('datas_fname'):
            mimetype = mimetypes.guess_type(values['datas_fname'])[0]
        if not mimetype and values.get('url'):
            mimetype = mimetypes.guess_type(values['url'])[0]
        if values.get('datas') and (not mimetype
                                    or mimetype == 'application/octet-stream'):
            mimetype = guess_mimetype(base64.b64decode(values['datas']))
        return mimetype or 'application/octet-stream'

    def _check_contents(self, values):
        mimetype = values['mimetype'] = self._compute_mimetype(values)
        xml_like = 'ht' in mimetype or 'xml' in mimetype  # hta, html, xhtml, etc.
        force_text = (xml_like and
                      (not self.env.user._is_admin()
                       or self.env.context.get('attachments_mime_plainxml')))
        if force_text:
            values['mimetype'] = 'text/plain'
        return values

    @api.model
    def _index(self, bin_data, datas_fname, file_type):
        """ compute the index content of the given filename, or binary data.
            This is a python implementation of the unix command 'strings'.
            :param bin_data : datas in binary form
            :return index_content : string containing all the printable character of the binary data
        """
        index_content = False
        if file_type:
            index_content = file_type.split('/')[0]
            if index_content == 'text':  # compute index_content only for text type
                words = re.findall(b"[\x20-\x7E]{4,}", bin_data)
                index_content = b"\n".join(words).decode('ascii')
        return index_content

    name = fields.Char('Attachment Name', required=True)
    datas_fname = fields.Char('File Name')
    description = fields.Text('Description')
    res_name = fields.Char('Resource Name',
                           compute='_compute_res_name',
                           store=True)
    res_model = fields.Char(
        'Resource Model',
        readonly=True,
        help="The database object this attachment will be attached to.")
    res_field = fields.Char('Resource Field', readonly=True)
    res_id = fields.Integer('Resource ID',
                            readonly=True,
                            help="The record id this is attached to.")
    create_date = fields.Datetime('Date Created', readonly=True)
    create_uid = fields.Many2one('res.users', string='Owner', readonly=True)
    company_id = fields.Many2one('res.company',
                                 string='Company',
                                 change_default=True,
                                 default=lambda self: self.env['res.company'].
                                 _company_default_get('ir.attachment'))
    type = fields.Selection(
        [('url', 'URL'), ('binary', 'File')],
        string='Type',
        required=True,
        default='binary',
        change_default=True,
        help=
        "You can either upload a file from your computer or copy/paste an internet link to your file."
    )
    url = fields.Char('Url', index=True, size=1024)
    public = fields.Boolean('Is public document')

    # for external access
    access_token = fields.Char('Access Token')

    # the field 'datas' is computed and may use the other fields below
    datas = fields.Binary(string='File Content',
                          compute='_compute_datas',
                          inverse='_inverse_datas')
    db_datas = fields.Binary('Database Data')
    store_fname = fields.Char('Stored Filename')
    file_size = fields.Integer('File Size', readonly=True)
    checksum = fields.Char("Checksum/SHA1", size=40, index=True, readonly=True)
    mimetype = fields.Char('Mime Type', readonly=True)
    index_content = fields.Text('Indexed Content',
                                readonly=True,
                                prefetch=False)

    @api.model_cr_context
    def _auto_init(self):
        res = super(IrAttachment, self)._auto_init()
        tools.create_index(self._cr, 'ir_attachment_res_idx', self._table,
                           ['res_model', 'res_id'])
        return res

    @api.model
    def check(self, mode, values=None):
        """Restricts the access to an ir.attachment, according to referred model
        In the 'document' module, it is overriden to relax this hard rule, since
        more complex ones apply there.
        """
        # collect the records to check (by model)
        model_ids = defaultdict(set)  # {model_name: set(ids)}
        require_employee = False
        if self:
            self._cr.execute(
                'SELECT res_model, res_id, create_uid, public FROM ir_attachment WHERE id IN %s',
                [tuple(self.ids)])
            for res_model, res_id, create_uid, public in self._cr.fetchall():
                if public and mode == 'read':
                    continue
                if not (res_model and res_id):
                    if create_uid != self._uid:
                        require_employee = True
                    continue
                model_ids[res_model].add(res_id)
        if values and values.get('res_model') and values.get('res_id'):
            model_ids[values['res_model']].add(values['res_id'])

        # check access rights on the records
        for res_model, res_ids in model_ids.items():
            # ignore attachments that are not attached to a resource anymore
            # when checking access rights (resource was deleted but attachment
            # was not)
            if res_model not in self.env:
                require_employee = True
                continue
            records = self.env[res_model].browse(res_ids).exists()
            if len(records) < len(res_ids):
                require_employee = True
            # For related models, check if we can write to the model, as unlinking
            # and creating attachments can be seen as an update to the model
            records.check_access_rights('write' if mode in (
                'create', 'unlink') else mode)
            records.check_access_rule(mode)

        if require_employee:
            if not (self.env.user._is_admin()
                    or self.env.user.has_group('base.group_user')):
                raise AccessError(
                    _("Sorry, you are not allowed to access this document."))

    @api.model
    def _search(self,
                args,
                offset=0,
                limit=None,
                order=None,
                count=False,
                access_rights_uid=None):
        # add res_field=False in domain if not present; the arg[0] trick below
        # works for domain items and '&'/'|'/'!' operators too
        if not any(arg[0] in ('id', 'res_field') for arg in args):
            args.insert(0, ('res_field', '=', False))

        ids = super(IrAttachment,
                    self)._search(args,
                                  offset=offset,
                                  limit=limit,
                                  order=order,
                                  count=False,
                                  access_rights_uid=access_rights_uid)

        if self._uid == SUPERUSER_ID:
            # rules do not apply for the superuser
            return len(ids) if count else ids

        if not ids:
            return 0 if count else []

        # Work with a set, as list.remove() is prohibitive for large lists of documents
        # (takes 20+ seconds on a db with 100k docs during search_count()!)
        orig_ids = ids
        ids = set(ids)

        # For attachments, the permissions of the document they are attached to
        # apply, so we must remove attachments for which the user cannot access
        # the linked document.
        # Use pure SQL rather than read() as it is about 50% faster for large dbs (100k+ docs),
        # and the permissions are checked in super() and below anyway.
        model_attachments = defaultdict(
            lambda: defaultdict(set))  # {res_model: {res_id: set(ids)}}
        self._cr.execute(
            """SELECT id, res_model, res_id, public FROM ir_attachment WHERE id IN %s""",
            [tuple(ids)])
        for row in self._cr.dictfetchall():
            if not row['res_model'] or row['public']:
                continue
            # model_attachments = {res_model: {res_id: set(ids)}}
            model_attachments[row['res_model']][row['res_id']].add(row['id'])

        # To avoid multiple queries for each attachment found, checks are
        # performed in batch as much as possible.
        for res_model, targets in model_attachments.items():
            if res_model not in self.env:
                continue
            if not self.env[res_model].check_access_rights('read', False):
                # remove all corresponding attachment ids
                ids.difference_update(itertools.chain(*targets.values()))
                continue
            # filter ids according to what access rules permit
            target_ids = list(targets)
            allowed = self.env[res_model].with_context(
                active_test=False).search([('id', 'in', target_ids)])
            for res_id in set(target_ids).difference(allowed.ids):
                ids.difference_update(targets[res_id])

        # sort result according to the original sort ordering
        result = [id for id in orig_ids if id in ids]
        return len(result) if count else list(result)

    @api.multi
    def read(self, fields=None, load='_classic_read'):
        self.check('read')
        return super(IrAttachment, self).read(fields, load=load)

    @api.multi
    def write(self, vals):
        self.check('write', values=vals)
        # remove computed field depending of datas
        for field in ('file_size', 'checksum'):
            vals.pop(field, False)
        if 'mimetype' in vals or 'datas' in vals:
            vals = self._check_contents(vals)
        return super(IrAttachment, self).write(vals)

    @api.multi
    def copy(self, default=None):
        self.check('write')
        return super(IrAttachment, self).copy(default)

    @api.multi
    def unlink(self):
        self.check('unlink')

        # First delete in the database, *then* in the filesystem if the
        # database allowed it. Helps avoid errors when concurrent transactions
        # are deleting the same file, and some of the transactions are
        # rolled back by PostgreSQL (due to concurrent updates detection).
        to_delete = set(attach.store_fname for attach in self
                        if attach.store_fname)
        res = super(IrAttachment, self).unlink()
        for file_path in to_delete:
            self._file_delete(file_path)

        return res

    @api.model
    def create(self, values):
        # remove computed field depending of datas
        for field in ('file_size', 'checksum'):
            values.pop(field, False)
        values = self._check_contents(values)
        self.browse().check('write', values=values)
        return super(IrAttachment, self).create(values)

    @api.model
    def action_get(self):
        return self.env['ir.actions.act_window'].for_xml_id(
            'base', 'action_attachment')
Example #25
0
class ResPartner(models.Model):
    _inherit = 'res.partner'

    signup_token = fields.Char(copy=False, groups="base.group_erp_manager")
    signup_type = fields.Char(string='Signup Token Type',
                              copy=False,
                              groups="base.group_erp_manager")
    signup_expiration = fields.Datetime(copy=False,
                                        groups="base.group_erp_manager")
    signup_valid = fields.Boolean(compute='_compute_signup_valid',
                                  string='Signup Token is Valid')
    signup_url = fields.Char(compute='_compute_signup_url',
                             string='Signup URL')

    @api.depends('signup_token', 'signup_expiration')
    def _compute_signup_valid(self):
        dt = now()
        for partner, partner_sudo in zip(self, self.sudo()):
            partner.signup_valid = bool(partner_sudo.signup_token) and \
            (not partner_sudo.signup_expiration or dt <= partner_sudo.signup_expiration)

    def _compute_signup_url(self):
        """ proxy for function field towards actual implementation """
        result = self.sudo()._get_signup_url_for_action()
        for partner in self:
            if any(
                    u.has_group('base.group_user') for u in partner.user_ids
                    if u != self.env.user):
                self.env['res.users'].check_access_rights('write')
            partner.signup_url = result.get(partner.id, False)

    def _get_signup_url_for_action(self,
                                   url=None,
                                   action=None,
                                   view_type=None,
                                   menu_id=None,
                                   res_id=None,
                                   model=None):
        """ generate a signup url for the given partner ids and action, possibly overriding
            the url state components (menu_id, id, view_type) """

        res = dict.fromkeys(self.ids, False)
        for partner in self:
            base_url = partner.get_base_url()
            # when required, make sure the partner has a valid signup token
            if self.env.context.get('signup_valid') and not partner.user_ids:
                partner.sudo().signup_prepare()

            route = 'login'
            # the parameters to encode for the query
            query = dict(db=self.env.cr.dbname)
            signup_type = self.env.context.get(
                'signup_force_type_in_url',
                partner.sudo().signup_type or '')
            if signup_type:
                route = 'reset_password' if signup_type == 'reset' else signup_type

            if partner.sudo().signup_token and signup_type:
                query['token'] = partner.sudo().signup_token
            elif partner.user_ids:
                query['login'] = partner.user_ids[0].login
            else:
                continue  # no signup token, no user, thus no signup url!

            if url:
                query['redirect'] = url
            else:
                fragment = dict()
                base = '/web#'
                if action == '/mail/view':
                    base = '/mail/view?'
                elif action:
                    fragment['action'] = action
                if view_type:
                    fragment['view_type'] = view_type
                if menu_id:
                    fragment['menu_id'] = menu_id
                if model:
                    fragment['model'] = model
                if res_id:
                    fragment['res_id'] = res_id

                if fragment:
                    query['redirect'] = base + werkzeug.urls.url_encode(
                        fragment)

            url = "/web/%s?%s" % (route, werkzeug.urls.url_encode(query))
            if not self.env.context.get('relative_url'):
                url = werkzeug.urls.url_join(base_url, url)
            res[partner.id] = url

        return res

    def action_signup_prepare(self):
        return self.signup_prepare()

    def signup_get_auth_param(self):
        """ Get a signup token related to the partner if signup is enabled.
            If the partner already has a user, get the login parameter.
        """
        if not self.env.user.has_group(
                'base.group_user') and not self.env.is_admin():
            raise exceptions.AccessDenied()

        res = defaultdict(dict)

        allow_signup = self.env['res.users']._get_signup_invitation_scope(
        ) == 'b2c'
        for partner in self:
            partner = partner.sudo()
            if allow_signup and not partner.user_ids:
                partner.signup_prepare()
                res[partner.id]['auth_signup_token'] = partner.signup_token
            elif partner.user_ids:
                res[partner.id]['auth_login'] = partner.user_ids[0].login
        return res

    def signup_cancel(self):
        return self.write({
            'signup_token': False,
            'signup_type': False,
            'signup_expiration': False
        })

    def signup_prepare(self, signup_type="signup", expiration=False):
        """ generate a new token for the partners with the given validity, if necessary
            :param expiration: the expiration datetime of the token (string, optional)
        """
        for partner in self:
            if expiration or not partner.signup_valid:
                token = random_token()
                while self._signup_retrieve_partner(token):
                    token = random_token()
                partner.write({
                    'signup_token': token,
                    'signup_type': signup_type,
                    'signup_expiration': expiration
                })
        return True

    @api.model
    def _signup_retrieve_partner(self,
                                 token,
                                 check_validity=False,
                                 raise_exception=False):
        """ find the partner corresponding to a token, and possibly check its validity
            :param token: the token to resolve
            :param check_validity: if True, also check validity
            :param raise_exception: if True, raise exception instead of returning False
            :return: partner (browse record) or False (if raise_exception is False)
        """
        partner = self.search([('signup_token', '=', token)], limit=1)
        if not partner:
            if raise_exception:
                raise exceptions.UserError(
                    _("Signup token '%s' is not valid", token))
            return False
        if check_validity and not partner.signup_valid:
            if raise_exception:
                raise exceptions.UserError(
                    _("Signup token '%s' is no longer valid", token))
            return False
        return partner

    @api.model
    def signup_retrieve_info(self, token):
        """ retrieve the user info about the token
            :return: a dictionary with the user information:
                - 'db': the name of the database
                - 'token': the token, if token is valid
                - 'name': the name of the partner, if token is valid
                - 'login': the user login, if the user already exists
                - 'email': the partner email, if the user does not exist
        """
        partner = self._signup_retrieve_partner(token, raise_exception=True)
        res = {'db': self.env.cr.dbname}
        if partner.signup_valid:
            res['token'] = token
            res['name'] = partner.name
        if partner.user_ids:
            res['login'] = partner.user_ids[0].login
        else:
            res['email'] = res['login'] = partner.email or ''
        return res
Example #26
0
class HrAttendance(models.Model):
    _name = "hr.attendance"
    _description = "Attendance"
    _order = "check_in desc"

    def _default_employee(self):
        return self.env['hr.employee'].search([('user_id', '=', self.env.uid)],
                                              limit=1)

    employee_id = fields.Many2one('hr.employee',
                                  string="Employee",
                                  default=_default_employee,
                                  required=True,
                                  ondelete='cascade',
                                  index=True)
    department_id = fields.Many2one('hr.department',
                                    string="Department",
                                    related="employee_id.department_id",
                                    readonly=True)
    check_in = fields.Datetime(string="Check In",
                               default=fields.Datetime.now,
                               required=True)
    check_out = fields.Datetime(string="Check Out")
    worked_hours = fields.Float(string='Worked Hours',
                                compute='_compute_worked_hours',
                                store=True,
                                readonly=True)

    @api.multi
    def name_get(self):
        result = []
        for attendance in self:
            if not attendance.check_out:
                result.append(
                    (attendance.id, _("%(empl_name)s from %(check_in)s") % {
                        'empl_name':
                        attendance.employee_id.name,
                        'check_in':
                        fields.Datetime.to_string(
                            fields.Datetime.context_timestamp(
                                attendance,
                                fields.Datetime.from_string(
                                    attendance.check_in))),
                    }))
            else:
                result.append(
                    (attendance.id,
                     _("%(empl_name)s from %(check_in)s to %(check_out)s") % {
                         'empl_name':
                         attendance.employee_id.name,
                         'check_in':
                         fields.Datetime.to_string(
                             fields.Datetime.context_timestamp(
                                 attendance,
                                 fields.Datetime.from_string(
                                     attendance.check_in))),
                         'check_out':
                         fields.Datetime.to_string(
                             fields.Datetime.context_timestamp(
                                 attendance,
                                 fields.Datetime.from_string(
                                     attendance.check_out))),
                     }))
        return result

    @api.depends('check_in', 'check_out')
    def _compute_worked_hours(self):
        for attendance in self:
            if attendance.check_out:
                delta = datetime.strptime(
                    attendance.check_out,
                    DEFAULT_SERVER_DATETIME_FORMAT) - datetime.strptime(
                        attendance.check_in, DEFAULT_SERVER_DATETIME_FORMAT)
                attendance.worked_hours = delta.total_seconds() / 3600.0

    @api.constrains('check_in', 'check_out')
    def _check_validity_check_in_check_out(self):
        """ verifies if check_in is earlier than check_out. """
        for attendance in self:
            if attendance.check_in and attendance.check_out:
                if attendance.check_out < attendance.check_in:
                    raise exceptions.ValidationError(
                        _('"Check Out" time cannot be earlier than "Check In" time.'
                          ))

    @api.constrains('check_in', 'check_out', 'employee_id')
    def _check_validity(self):
        """ Verifies the validity of the attendance record compared to the others from the same employee.
            For the same employee we must have :
                * maximum 1 "open" attendance record (without check_out)
                * no overlapping time slices with previous employee records
        """
        for attendance in self:
            # we take the latest attendance before our check_in time and check it doesn't overlap with ours
            last_attendance_before_check_in = self.env['hr.attendance'].search(
                [
                    ('employee_id', '=', attendance.employee_id.id),
                    ('check_in', '<=', attendance.check_in),
                    ('id', '!=', attendance.id),
                ],
                order='check_in desc',
                limit=1)
            if last_attendance_before_check_in and last_attendance_before_check_in.check_out and last_attendance_before_check_in.check_out > attendance.check_in:
                raise exceptions.ValidationError(
                    _("Cannot create new attendance record for %(empl_name)s, the employee was already checked in on %(datetime)s"
                      ) % {
                          'empl_name':
                          attendance.employee_id.name,
                          'datetime':
                          fields.Datetime.to_string(
                              fields.Datetime.context_timestamp(
                                  self,
                                  fields.Datetime.from_string(
                                      attendance.check_in))),
                      })

            if not attendance.check_out:
                # if our attendance is "open" (no check_out), we verify there is no other "open" attendance
                no_check_out_attendances = self.env['hr.attendance'].search([
                    ('employee_id', '=', attendance.employee_id.id),
                    ('check_out', '=', False),
                    ('id', '!=', attendance.id),
                ])
                if no_check_out_attendances:
                    raise exceptions.ValidationError(
                        _("Cannot create new attendance record for %(empl_name)s, the employee hasn't checked out since %(datetime)s"
                          ) % {
                              'empl_name':
                              attendance.employee_id.name,
                              'datetime':
                              fields.Datetime.to_string(
                                  fields.Datetime.context_timestamp(
                                      self,
                                      fields.Datetime.from_string(
                                          no_check_out_attendances.check_in))),
                          })
            else:
                # we verify that the latest attendance with check_in time before our check_out time
                # is the same as the one before our check_in time computed before, otherwise it overlaps
                last_attendance_before_check_out = self.env[
                    'hr.attendance'].search([
                        ('employee_id', '=', attendance.employee_id.id),
                        ('check_in', '<', attendance.check_out),
                        ('id', '!=', attendance.id),
                    ],
                                            order='check_in desc',
                                            limit=1)
                if last_attendance_before_check_out and last_attendance_before_check_in != last_attendance_before_check_out:
                    raise exceptions.ValidationError(
                        _("Cannot create new attendance record for %(empl_name)s, the employee was already checked in on %(datetime)s"
                          ) % {
                              'empl_name':
                              attendance.employee_id.name,
                              'datetime':
                              fields.Datetime.to_string(
                                  fields.Datetime.context_timestamp(
                                      self,
                                      fields.Datetime.from_string(
                                          last_attendance_before_check_out.
                                          check_in))),
                          })

    @api.multi
    def copy(self):
        raise exceptions.UserError(_('You cannot duplicate an attendance.'))
class PaymentTransaction(models.Model):
    """ Transaction Model. Each specific acquirer can extend the model by adding
    its own fields.

    Methods that can be added in an acquirer-specific implementation:

     - ``<name>_create``: method receiving values used when creating a new
       transaction and that returns a dictionary that will update those values.
       This method can be used to tweak some transaction values.

    Methods defined for convention, depending on your controllers:

     - ``<name>_form_feedback(self, data)``: method that handles the data coming
       from the acquirer after the transaction. It will generally receives data
       posted by the acquirer after the transaction.
    """
    _name = 'payment.transaction'
    _description = 'Payment Transaction'
    _order = 'id desc'
    _rec_name = 'reference'

    @api.model
    def _lang_get(self):
        return self.env['res.lang'].get_installed()

    @api.model
    def _get_default_partner_country_id(self):
        return self.env['res.company']._company_default_get(
            'payment.transaction').country_id.id

    create_date = fields.Datetime('Creation Date', readonly=True)
    date_validate = fields.Datetime('Validation Date')
    acquirer_id = fields.Many2one('payment.acquirer',
                                  'Acquirer',
                                  required=True)
    provider = fields.Selection(string='Provider',
                                related='acquirer_id.provider')
    type = fields.Selection([('validation', 'Validation of the bank card'),
                             ('server2server', 'Server To Server'),
                             ('form', 'Form'),
                             ('form_save', 'Form with tokenization')],
                            'Type',
                            default='form',
                            required=True)
    state = fields.Selection([('draft', 'Draft'), ('pending', 'Pending'),
                              ('authorized', 'Authorized'), ('done', 'Done'),
                              ('refunding', 'Refunding'),
                              ('refunded', 'Refunded'), ('error', 'Error'),
                              ('cancel', 'Canceled')],
                             'Status',
                             copy=False,
                             default='draft',
                             required=True,
                             track_visibility='onchange')
    state_message = fields.Text(
        'Message',
        help=
        'Field used to store error and/or validation messages for information')
    # payment
    amount = fields.Float('Amount',
                          digits=(16, 2),
                          required=True,
                          track_visibility='always',
                          help='Amount')
    fees = fields.Float(
        'Fees',
        digits=(16, 2),
        track_visibility='always',
        help='Fees amount; set by the system because depends on the acquirer')
    currency_id = fields.Many2one('res.currency', 'Currency', required=True)
    reference = fields.Char('Reference',
                            default=lambda self: self.env['ir.sequence'].
                            next_by_code('payment.transaction'),
                            required=True,
                            help='Internal reference of the TX')
    acquirer_reference = fields.Char(
        'Acquirer Reference',
        help='Reference of the TX as stored in the acquirer database')
    # duplicate partner / transaction data to store the values at transaction time
    partner_id = fields.Many2one('res.partner',
                                 'Customer',
                                 track_visibility='onchange')
    partner_name = fields.Char('Partner Name')
    partner_lang = fields.Selection(_lang_get,
                                    'Language',
                                    default=lambda self: self.env.lang)
    partner_email = fields.Char('Email')
    partner_zip = fields.Char('Zip')
    partner_address = fields.Char('Address')
    partner_city = fields.Char('City')
    partner_country_id = fields.Many2one(
        'res.country',
        'Country',
        default=_get_default_partner_country_id,
        required=True)
    partner_phone = fields.Char('Phone')
    html_3ds = fields.Char('3D Secure HTML')

    callback_model_id = fields.Many2one('ir.model',
                                        'Callback Document Model',
                                        groups="base.group_system")
    callback_res_id = fields.Integer('Callback Document ID',
                                     groups="base.group_system")
    callback_method = fields.Char('Callback Method',
                                  groups="base.group_system")
    callback_hash = fields.Char('Callback Hash', groups="base.group_system")

    payment_token_id = fields.Many2one(
        'payment.token',
        'Payment Token',
        domain="[('acquirer_id', '=', acquirer_id)]")

    @api.onchange('partner_id')
    def _onchange_partner_id(self):
        onchange_vals = self.on_change_partner_id(self.partner_id.id).get(
            'value', {})
        self.update(onchange_vals)

    @api.multi
    def on_change_partner_id(self, partner_id):
        partner = None
        if partner_id:
            partner = self.env['res.partner'].browse(partner_id)
            return {
                'value': {
                    'partner_name':
                    partner and partner.name or False,
                    'partner_lang':
                    partner and partner.lang or 'en_US',
                    'partner_email':
                    partner and partner.email or False,
                    'partner_zip':
                    partner and partner.zip or False,
                    'partner_address':
                    _partner_format_address(partner and partner.street or '',
                                            partner and partner.street2 or ''),
                    'partner_city':
                    partner and partner.city or False,
                    'partner_country_id':
                    partner and partner.country_id.id
                    or self._get_default_partner_country_id(),
                    'partner_phone':
                    partner and partner.phone or False,
                }
            }
        return {}

    @api.constrains('reference', 'state')
    def _check_reference(self):
        for transaction in self.filtered(lambda tx: tx.state not in
                                         ('cancel', 'error')):
            if self.search_count([('reference', '=', transaction.reference)
                                  ]) != 1:
                raise exceptions.ValidationError(
                    _('The payment transaction reference must be unique!'))
        return True

    @api.constrains('state', 'acquirer_id')
    def _check_authorize_state(self):
        failed_tx = self.filtered(
            lambda tx: tx.state == 'authorized' and tx.acquirer_id.
            provider not in self.env['payment.acquirer']._get_feature_support(
            )['authorize'])
        if failed_tx:
            raise exceptions.ValidationError(
                _('The %s payment acquirers are not allowed to manual capture mode!'
                  % failed_tx.mapped('acquirer_id.name')))

    @api.model
    def create(self, values):
        if values.get('partner_id'):  # @TDENOTE: not sure
            values.update(
                self.on_change_partner_id(values['partner_id'])['value'])

        # call custom create method if defined (i.e. ogone_create for ogone)
        if values.get('acquirer_id'):
            acquirer = self.env['payment.acquirer'].browse(
                values['acquirer_id'])

            # compute fees
            custom_method_name = '%s_compute_fees' % acquirer.provider
            if hasattr(acquirer, custom_method_name):
                fees = getattr(acquirer, custom_method_name)(
                    values.get('amount', 0.0), values.get('currency_id'),
                    values.get('partner_country_id'))
                values['fees'] = float_round(fees, 2)

            # custom create
            custom_method_name = '%s_create' % acquirer.provider
            if hasattr(acquirer, custom_method_name):
                values.update(getattr(self, custom_method_name)(values))

        # Default value of reference is
        tx = super(PaymentTransaction, self).create(values)
        if not values.get('reference'):
            tx.write({'reference': str(tx.id)})

        # Generate callback hash if it is configured on the tx; avoid generating unnecessary stuff
        # (limited sudo env for checking callback presence, must work for manual transactions too)
        tx_sudo = tx.sudo()
        if tx_sudo.callback_model_id and tx_sudo.callback_res_id and tx_sudo.callback_method:
            tx.write({'callback_hash': tx._generate_callback_hash()})

        return tx

    @api.multi
    def write(self, values):
        if ('acquirer_id' in values
                or 'amount' in values) and 'fees' not in values:
            # The acquirer or the amount has changed, and the fees are not explicitly forced. Fees must be recomputed.
            acquirer = None
            if values.get('acquirer_id'):
                acquirer = self.env['payment.acquirer'].browse(
                    values['acquirer_id'])
            for tx in self:
                vals = dict(values, fees=0.0)
                if not acquirer:
                    acquirer = tx.acquirer_id
                custom_method_name = '%s_compute_fees' % acquirer.provider
                # TDE FIXME: shouldn't we use fee_implemented ?
                if hasattr(acquirer, custom_method_name):
                    fees = getattr(acquirer, custom_method_name)(
                        (values['amount'] if 'amount' in values else tx.amount)
                        or 0.0, values.get('currency_id') or tx.currency_id.id,
                        values.get('partner_country_id')
                        or tx.partner_country_id.id)
                    vals['fees'] = float_round(fees, 2)
                res = super(PaymentTransaction, tx).write(vals)
            return res
        return super(PaymentTransaction, self).write(values)

    @api.model
    def get_next_reference(self, reference):
        return self._get_next_reference(reference)

    @api.model
    def _get_next_reference(self, reference, acquirer=None):
        ref_suffix = 1
        init_ref = reference
        while self.env['payment.transaction'].sudo().search_count([
            ('reference', '=', reference)
        ]):
            reference = init_ref + 'x' + str(ref_suffix)
            ref_suffix += 1
        return reference

    def _generate_callback_hash(self):
        self.ensure_one()
        secret = self.env['ir.config_parameter'].sudo().get_param(
            'database.secret')
        token = '%s%s%s' % (self.callback_model_id.model, self.callback_res_id,
                            self.sudo().callback_method)
        return hmac.new(secret.encode('utf-8'), token.encode('utf-8'),
                        hashlib.sha256).hexdigest()

    # --------------------------------------------------
    # FORM RELATED METHODS
    # --------------------------------------------------

    @api.multi
    def render(self):
        values = {
            'reference': self.reference,
            'amount': self.amount,
            'currency_id': self.currency_id.id,
            'currency': self.currency_id,
            'partner': self.partner_id,
            'partner_name': self.partner_name,
            'partner_lang': self.partner_lang,
            'partner_email': self.partner_email,
            'partner_zip': self.partner_zip,
            'partner_address': self.partner_address,
            'partner_city': self.partner_city,
            'partner_country_id': self.partner_country_id.id,
            'partner_country': self.partner_country_id,
            'partner_phone': self.partner_phone,
            'partner_state': None,
        }
        return self.acquirer_id.render(None, None, None, values=values)

    @api.model
    def form_feedback(self, data, acquirer_name):
        invalid_parameters, tx = None, None

        tx_find_method_name = '_%s_form_get_tx_from_data' % acquirer_name
        if hasattr(self, tx_find_method_name):
            tx = getattr(self, tx_find_method_name)(data)

        # TDE TODO: form_get_invalid_parameters from model to multi
        invalid_param_method_name = '_%s_form_get_invalid_parameters' % acquirer_name
        if hasattr(self, invalid_param_method_name):
            invalid_parameters = getattr(tx, invalid_param_method_name)(data)

        if invalid_parameters:
            _error_message = '%s: incorrect tx data:\n' % (acquirer_name)
            for item in invalid_parameters:
                _error_message += '\t%s: received %s instead of %s\n' % (
                    item[0], item[1], item[2])
            _logger.error(_error_message)
            return False

        # TDE TODO: form_validate from model to multi
        feedback_method_name = '_%s_form_validate' % acquirer_name
        if hasattr(self, feedback_method_name):
            return getattr(tx, feedback_method_name)(data)

        return True

    @api.multi
    def _post_process_after_done(self, **kwargs):
        return True

    # --------------------------------------------------
    # SERVER2SERVER RELATED METHODS
    # --------------------------------------------------

    @api.multi
    def s2s_do_transaction(self, **kwargs):
        custom_method_name = '%s_s2s_do_transaction' % self.acquirer_id.provider
        if hasattr(self, custom_method_name):
            return getattr(self, custom_method_name)(**kwargs)

    @api.multi
    def s2s_do_refund(self, **kwargs):
        custom_method_name = '%s_s2s_do_refund' % self.acquirer_id.provider
        if hasattr(self, custom_method_name):
            return getattr(self, custom_method_name)(**kwargs)

    @api.multi
    def s2s_capture_transaction(self, **kwargs):
        custom_method_name = '%s_s2s_capture_transaction' % self.acquirer_id.provider
        if hasattr(self, custom_method_name):
            return getattr(self, custom_method_name)(**kwargs)

    @api.multi
    def s2s_void_transaction(self, **kwargs):
        custom_method_name = '%s_s2s_void_transaction' % self.acquirer_id.provider
        if hasattr(self, custom_method_name):
            return getattr(self, custom_method_name)(**kwargs)

    @api.multi
    def s2s_get_tx_status(self):
        """ Get the tx status. """
        invalid_param_method_name = '_%s_s2s_get_tx_status' % self.acquirer_id.provider
        if hasattr(self, invalid_param_method_name):
            return getattr(self, invalid_param_method_name)()
        return True

    @api.multi
    def execute_callback(self):
        res = None
        for transaction in self:
            # limited sudo env, only for checking callback presence, not for running it!
            # manual transactions have no callback, and can pass without being run by admin user
            tx_sudo = transaction.sudo()
            if not (tx_sudo.callback_model_id and tx_sudo.callback_res_id
                    and tx_sudo.callback_method):
                continue

            valid_token = transaction._generate_callback_hash()
            if not consteq(ustr(valid_token), transaction.callback_hash):
                _logger.warning(
                    "Invalid callback signature for transaction %d" %
                    (transaction.id))
                continue

            record = self.env[transaction.callback_model_id.model].browse(
                transaction.callback_res_id).exists()
            if record:
                res = getattr(record, transaction.callback_method)(transaction)
            else:
                _logger.warning(
                    "Did not found record %s.%s for callback of transaction %d"
                    % (transaction.callback_model_id.model,
                       transaction.callback_res_id, transaction.id))
        return res

    @api.multi
    def action_capture(self):
        if any(self.mapped(lambda tx: tx.state != 'authorized')):
            raise ValidationError(
                _('Only transactions in the Authorized status can be captured.'
                  ))
        for tx in self:
            tx.s2s_capture_transaction()

    @api.multi
    def action_void(self):
        if any(self.mapped(lambda tx: tx.state != 'authorized')):
            raise ValidationError(
                _('Only transactions in the Authorized status can be voided.'))
        for tx in self:
            tx.s2s_void_transaction()
Example #28
0
class HrWorkEntry(models.Model):
    _name = 'hr.work.entry'
    _description = 'HR Work Entry'
    _order = 'conflict desc,state,date_start'

    name = fields.Char(required=True)
    active = fields.Boolean(default=True)
    employee_id = fields.Many2one(
        'hr.employee',
        required=True,
        domain=
        "['|', ('company_id', '=', False), ('company_id', '=', company_id)]")
    date_start = fields.Datetime(required=True, string='From')
    date_stop = fields.Datetime(compute='_compute_date_stop',
                                store=True,
                                readonly=False,
                                string='To')
    duration = fields.Float(compute='_compute_duration',
                            store=True,
                            string="Period")
    work_entry_type_id = fields.Many2one('hr.work.entry.type')
    color = fields.Integer(related='work_entry_type_id.color', readonly=True)
    state = fields.Selection([('draft', 'Draft'), ('validated', 'Validated'),
                              ('conflict', 'Conflict'),
                              ('cancelled', 'Cancelled')],
                             default='draft')
    company_id = fields.Many2one('res.company',
                                 string='Company',
                                 readonly=True,
                                 required=True,
                                 default=lambda self: self.env.company)
    conflict = fields.Boolean(
        'Conflicts', compute='_compute_conflict',
        store=True)  # Used to show conflicting work entries first

    _sql_constraints = [
        ('_work_entry_has_end', 'check (date_stop IS NOT NULL)',
         'Work entry must end. Please define an end date or a duration.'),
        ('_work_entry_start_before_end', 'check (date_stop > date_start)',
         'Starting time should be before end time.')
    ]

    @api.depends('state')
    def _compute_conflict(self):
        for rec in self:
            rec.conflict = rec.state == 'conflict'

    @api.depends('date_stop', 'date_start')
    def _compute_duration(self):
        for work_entry in self:
            work_entry.duration = work_entry._get_duration(
                work_entry.date_start, work_entry.date_stop)

    @api.depends('date_start', 'duration')
    def _compute_date_stop(self):
        for work_entry in self.filtered(lambda w: w.date_start and w.duration):
            work_entry.date_stop = work_entry.date_start + relativedelta(
                hours=work_entry.duration)

    def _get_duration(self, date_start, date_stop):
        if not date_start or not date_stop:
            return 0
        dt = date_stop - date_start
        return dt.days * 24 + dt.seconds / 3600  # Number of hours

    def action_validate(self):
        """
        Try to validate work entries.
        If some errors are found, set `state` to conflict for conflicting work entries
        and validation fails.
        :return: True if validation succeded
        """
        work_entries = self.filtered(
            lambda work_entry: work_entry.state != 'validated')
        if not work_entries._check_if_error():
            work_entries.write({'state': 'validated'})
            return True
        return False

    def _check_if_error(self):
        if not self:
            return False
        undefined_type = self.filtered(lambda b: not b.work_entry_type_id)
        undefined_type.write({'state': 'conflict'})
        conflict = self._mark_conflicting_work_entries(
            min(self.mapped('date_start')), max(self.mapped('date_stop')))
        return undefined_type or conflict

    @api.model
    def _mark_conflicting_work_entries(self, start, stop):
        """
        Set `state` to `conflict` for overlapping work entries
        between two dates.
        Return True if overlapping work entries were detected.
        """
        # Use the postgresql range type `tsrange` which is a range of timestamp
        # It supports the intersection operator (&&) useful to detect overlap.
        # use '()' to exlude the lower and upper bounds of the range.
        # Filter on date_start and date_stop (both indexed) in the EXISTS clause to
        # limit the resulting set size and fasten the query.
        self.flush(['date_start', 'date_stop', 'employee_id', 'active'])
        query = """
            SELECT b1.id
            FROM hr_work_entry b1
            WHERE
            b1.date_start <= %s
            AND b1.date_stop >= %s
            AND active = TRUE
            AND EXISTS (
                SELECT 1
                FROM hr_work_entry b2
                WHERE
                    b2.date_start <= %s
                    AND b2.date_stop >= %s
                    AND active = TRUE
                    AND tsrange(b1.date_start, b1.date_stop, '()') && tsrange(b2.date_start, b2.date_stop, '()')
                    AND b1.id <> b2.id
                    AND b1.employee_id = b2.employee_id
            );
        """
        self.env.cr.execute(query, (stop, start, stop, start))
        conflicts = [res.get('id') for res in self.env.cr.dictfetchall()]
        self.browse(conflicts).write({
            'state': 'conflict',
        })
        return bool(conflicts)

    @api.model_create_multi
    def create(self, vals_list):
        work_entries = super().create(vals_list)
        work_entries._check_if_error()
        return work_entries

    def write(self, vals):
        skip_check = not bool({
            'date_start', 'date_stop', 'employee_id', 'work_entry_type_id',
            'active'
        } & vals.keys())
        if 'state' in vals:
            if vals['state'] == 'draft':
                vals['active'] = True
            elif vals['state'] == 'cancelled':
                vals['active'] = False
                skip_check &= all(self.mapped(lambda w: w.state != 'conflict'))

        if 'active' in vals:
            vals['state'] = 'draft' if vals['active'] else 'cancelled'

        with self._error_checking(skip=skip_check):
            return super(HrWorkEntry, self).write(vals)

    def unlink(self):
        with self._error_checking():
            return super().unlink()

    def _reset_conflicting_state(self):
        self.filtered(lambda w: w.state == 'conflict').write(
            {'state': 'draft'})

    @contextmanager
    def _error_checking(self, start=None, stop=None, skip=False):
        """
        Context manager used for conflicts checking.
        When exiting the context manager, conflicts are checked
        for all work entries within a date range. By default, the start and end dates are
        computed according to `self` (min and max respectively) but it can be overwritten by providing
        other values as parameter.
        :param start: datetime to overwrite the default behaviour
        :param stop: datetime to overwrite the default behaviour
        :param skip: If True, no error checking is done
        """
        try:
            skip = skip or self.env.context.get('hr_work_entry_no_check',
                                                False)
            start = start or min(self.mapped('date_start'), default=False)
            stop = stop or max(self.mapped('date_stop'), default=False)
            if not skip and start and stop:
                work_entries = self.sudo().with_context(
                    hr_work_entry_no_check=True).search([
                        ('date_start', '<', stop),
                        ('date_stop', '>', start),
                        ('state', 'not in', ('validated', 'cancelled')),
                    ])
                work_entries._reset_conflicting_state()
            yield
        except OperationalError:
            # the cursor is dead, do not attempt to use it or we will shadow the root exception
            # with a "psycopg2.InternalError: current transaction is aborted, ..."
            skip = True
            raise
        finally:
            if not skip and start and stop:
                # New work entries are handled in the create method,
                # no need to reload work entries.
                work_entries.exists()._check_if_error()
Example #29
0
class PurchaseOrder(models.Model):
    _inherit = 'purchase.order'

    @api.model
    def _default_picking_type(self):
        return self._get_picking_type(
            self.env.context.get('company_id') or self.env.company.id)

    incoterm_id = fields.Many2one(
        'account.incoterms',
        'Incoterm',
        states={'done': [('readonly', True)]},
        help=
        "International Commercial Terms are a series of predefined commercial terms used in international transactions."
    )

    picking_count = fields.Integer(compute='_compute_picking',
                                   string='Picking count',
                                   default=0,
                                   store=True)
    picking_ids = fields.Many2many('stock.picking',
                                   compute='_compute_picking',
                                   string='Receptions',
                                   copy=False,
                                   store=True)

    picking_type_id = fields.Many2one(
        'stock.picking.type',
        'Deliver To',
        states=Purchase.READONLY_STATES,
        required=True,
        default=_default_picking_type,
        domain=
        "['|', ('warehouse_id', '=', False), ('warehouse_id.company_id', '=', company_id)]",
        help="This will determine operation type of incoming shipment")
    default_location_dest_id_usage = fields.Selection(
        related='picking_type_id.default_location_dest_id.usage',
        string='Destination Location Type',
        help="Technical field used to display the Drop Ship Address",
        readonly=True)
    group_id = fields.Many2one('procurement.group',
                               string="Procurement Group",
                               copy=False)
    is_shipped = fields.Boolean(compute="_compute_is_shipped")
    effective_date = fields.Datetime(
        "Effective Date",
        compute='_compute_effective_date',
        store=True,
        copy=False,
        help="Completion date of the first receipt order.")
    on_time_rate = fields.Float(related='partner_id.on_time_rate',
                                compute_sudo=False)

    @api.depends('order_line.move_ids.returned_move_ids',
                 'order_line.move_ids.state', 'order_line.move_ids.picking_id')
    def _compute_picking(self):
        for order in self:
            pickings = self.env['stock.picking']
            for line in order.order_line:
                # We keep a limited scope on purpose. Ideally, we should also use move_orig_ids and
                # do some recursive search, but that could be prohibitive if not done correctly.
                moves = line.move_ids | line.move_ids.mapped(
                    'returned_move_ids')
                pickings |= moves.mapped('picking_id')
            order.picking_ids = pickings
            order.picking_count = len(pickings)

    @api.depends('picking_ids.date_done')
    def _compute_effective_date(self):
        for order in self:
            pickings = order.picking_ids.filtered(
                lambda x: x.state == 'done' and x.location_dest_id.usage ==
                'internal' and x.date_done)
            order.effective_date = min(pickings.mapped('date_done'),
                                       default=False)

    @api.depends('picking_ids', 'picking_ids.state')
    def _compute_is_shipped(self):
        for order in self:
            if order.picking_ids and all(x.state in ['done', 'cancel']
                                         for x in order.picking_ids):
                order.is_shipped = True
            else:
                order.is_shipped = False

    @api.onchange('picking_type_id')
    def _onchange_picking_type_id(self):
        if self.picking_type_id.default_location_dest_id.usage != 'customer':
            self.dest_address_id = False

    @api.onchange('company_id')
    def _onchange_company_id(self):
        p_type = self.picking_type_id
        if not (p_type and p_type.code == 'incoming' and
                (p_type.warehouse_id.company_id == self.company_id
                 or not p_type.warehouse_id)):
            self.picking_type_id = self._get_picking_type(self.company_id.id)

    # --------------------------------------------------
    # CRUD
    # --------------------------------------------------

    def write(self, vals):
        if vals.get('order_line') and self.state == 'purchase':
            for order in self:
                pre_order_line_qty = {
                    order_line: order_line.product_qty
                    for order_line in order.mapped('order_line')
                }
        res = super(PurchaseOrder, self).write(vals)
        if vals.get('order_line') and self.state == 'purchase':
            for order in self:
                to_log = {}
                for order_line in order.order_line:
                    if pre_order_line_qty.get(
                            order_line, False) and float_compare(
                                pre_order_line_qty[order_line],
                                order_line.product_qty,
                                precision_rounding=order_line.product_uom.
                                rounding) > 0:
                        to_log[order_line] = (order_line.product_qty,
                                              pre_order_line_qty[order_line])
                if to_log:
                    order._log_decrease_ordered_quantity(to_log)
        return res

    # --------------------------------------------------
    # Actions
    # --------------------------------------------------

    def button_approve(self, force=False):
        result = super(PurchaseOrder, self).button_approve(force=force)
        self._create_picking()
        return result

    def button_cancel(self):
        for order in self:
            for move in order.order_line.mapped('move_ids'):
                if move.state == 'done':
                    raise UserError(
                        _('Unable to cancel purchase order %s as some receptions have already been done.'
                          ) % (order.name))
            # If the product is MTO, change the procure_method of the closest move to purchase to MTS.
            # The purpose is to link the po that the user will manually generate to the existing moves's chain.
            if order.state in ('draft', 'sent', 'to approve', 'purchase'):
                for order_line in order.order_line:
                    order_line.move_ids._action_cancel()
                    if order_line.move_dest_ids:
                        move_dest_ids = order_line.move_dest_ids
                        if order_line.propagate_cancel:
                            move_dest_ids._action_cancel()
                        else:
                            move_dest_ids.write(
                                {'procure_method': 'make_to_stock'})
                            move_dest_ids._recompute_state()

            for pick in order.picking_ids.filtered(
                    lambda r: r.state != 'cancel'):
                pick.action_cancel()

            order.order_line.write({'move_dest_ids': [(5, 0, 0)]})

        return super(PurchaseOrder, self).button_cancel()

    def action_view_picking(self):
        """ This function returns an action that display existing picking orders of given purchase order ids. When only one found, show the picking immediately.
        """
        result = self.env["ir.actions.actions"]._for_xml_id(
            'stock.action_picking_tree_all')
        # override the context to get rid of the default filtering on operation type
        result['context'] = {
            'default_partner_id': self.partner_id.id,
            'default_origin': self.name,
            'default_picking_type_id': self.picking_type_id.id
        }
        pick_ids = self.mapped('picking_ids')
        # choose the view_mode accordingly
        if not pick_ids or len(pick_ids) > 1:
            result['domain'] = "[('id','in',%s)]" % (pick_ids.ids)
        elif len(pick_ids) == 1:
            res = self.env.ref('stock.view_picking_form', False)
            form_view = [(res and res.id or False, 'form')]
            if 'views' in result:
                result['views'] = form_view + [
                    (state, view)
                    for state, view in result['views'] if view != 'form'
                ]
            else:
                result['views'] = form_view
            result['res_id'] = pick_ids.id
        return result

    def _prepare_invoice(self):
        invoice_vals = super()._prepare_invoice()
        invoice_vals['invoice_incoterm_id'] = self.incoterm_id.id
        return invoice_vals

    # --------------------------------------------------
    # Business methods
    # --------------------------------------------------

    def _log_decrease_ordered_quantity(self, purchase_order_lines_quantities):
        def _keys_in_sorted(move):
            """ sort by picking and the responsible for the product the
            move.
            """
            return (move.picking_id.id, move.product_id.responsible_id.id)

        def _keys_in_groupby(move):
            """ group by picking and the responsible for the product the
            move.
            """
            return (move.picking_id, move.product_id.responsible_id)

        def _render_note_exception_quantity_po(order_exceptions):
            order_line_ids = self.env['purchase.order.line'].browse([
                order_line.id for order in order_exceptions.values()
                for order_line in order[0]
            ])
            purchase_order_ids = order_line_ids.mapped('order_id')
            move_ids = self.env['stock.move'].concat(*rendering_context.keys())
            impacted_pickings = move_ids.mapped(
                'picking_id')._get_impacted_pickings(
                    move_ids) - move_ids.mapped('picking_id')
            values = {
                'purchase_order_ids': purchase_order_ids,
                'order_exceptions': order_exceptions.values(),
                'impacted_pickings': impacted_pickings,
            }
            return self.env.ref('purchase_stock.exception_on_po')._render(
                values=values)

        documents = self.env['stock.picking']._log_activity_get_documents(
            purchase_order_lines_quantities, 'move_ids', 'DOWN',
            _keys_in_sorted, _keys_in_groupby)
        filtered_documents = {}
        for (parent, responsible), rendering_context in documents.items():
            if parent._name == 'stock.picking':
                if parent.state == 'cancel':
                    continue
            filtered_documents[(parent, responsible)] = rendering_context
        self.env['stock.picking']._log_activity(
            _render_note_exception_quantity_po, filtered_documents)

    def _get_destination_location(self):
        self.ensure_one()
        if self.dest_address_id:
            return self.dest_address_id.property_stock_customer.id
        return self.picking_type_id.default_location_dest_id.id

    @api.model
    def _get_picking_type(self, company_id):
        picking_type = self.env['stock.picking.type'].search([
            ('code', '=', 'incoming'),
            ('warehouse_id.company_id', '=', company_id)
        ])
        if not picking_type:
            picking_type = self.env['stock.picking.type'].search([
                ('code', '=', 'incoming'), ('warehouse_id', '=', False)
            ])
        return picking_type[:1]

    def _prepare_picking(self):
        if not self.group_id:
            self.group_id = self.group_id.create({
                'name':
                self.name,
                'partner_id':
                self.partner_id.id
            })
        if not self.partner_id.property_stock_supplier.id:
            raise UserError(
                _("You must set a Vendor Location for this partner %s",
                  self.partner_id.name))
        return {
            'picking_type_id': self.picking_type_id.id,
            'partner_id': self.partner_id.id,
            'user_id': False,
            'date': self.date_order,
            'origin': self.name,
            'location_dest_id': self._get_destination_location(),
            'location_id': self.partner_id.property_stock_supplier.id,
            'company_id': self.company_id.id,
        }

    def _create_picking(self):
        StockPicking = self.env['stock.picking']
        for order in self.filtered(lambda po: po.state in
                                   ('purchase', 'done')):
            if any(product.type in ['product', 'consu']
                   for product in order.order_line.product_id):
                order = order.with_company(order.company_id)
                pickings = order.picking_ids.filtered(lambda x: x.state not in
                                                      ('done', 'cancel'))
                if not pickings:
                    res = order._prepare_picking()
                    picking = StockPicking.with_user(SUPERUSER_ID).create(res)
                else:
                    picking = pickings[0]
                moves = order.order_line._create_stock_moves(picking)
                moves = moves.filtered(lambda x: x.state not in
                                       ('done', 'cancel'))._action_confirm()
                seq = 0
                for move in sorted(moves, key=lambda move: move.date):
                    seq += 5
                    move.sequence = seq
                moves._action_assign()
                picking.message_post_with_view(
                    'mail.message_origin_link',
                    values={
                        'self': picking,
                        'origin': order
                    },
                    subtype_id=self.env.ref('mail.mt_note').id)
        return True

    def _add_picking_info(self, activity):
        """Helper method to add picking info to the Date Updated activity when
        vender updates date_planned of the po lines.
        """
        validated_picking = self.picking_ids.filtered(
            lambda p: p.state == 'done')
        if validated_picking:
            activity.note += _(
                "<p>Those dates couldn’t be modified accordingly on the receipt %s which had already been validated.</p>"
            ) % validated_picking[0].name
        elif not self.picking_ids:
            activity.note += _("<p>Corresponding receipt not found.</p>")
        else:
            activity.note += _(
                "<p>Those dates have been updated accordingly on the receipt %s.</p>"
            ) % self.picking_ids[0].name

    def _create_update_date_activity(self, updated_dates):
        activity = super()._create_update_date_activity(updated_dates)
        self._add_picking_info(activity)

    def _update_update_date_activity(self, updated_dates, activity):
        # remove old picking info to update it
        note_lines = activity.note.split('<p>')
        note_lines.pop()
        activity.note = '<p>'.join(note_lines)
        super()._update_update_date_activity(updated_dates, activity)
        self._add_picking_info(activity)

    @api.model
    def _get_orders_to_remind(self):
        """When auto sending reminder mails, don't send for purchase order with
        validated receipts."""
        return super()._get_orders_to_remind().filtered(
            lambda p: not p.effective_date)
Example #30
0
class Users(models.Model):
    _inherit = 'res.users'

    def __init__(self, pool, cr):
        init_res = super(Users, self).__init__(pool, cr)
        type(self).SELF_WRITEABLE_FIELDS = list(
            set(self.SELF_WRITEABLE_FIELDS + [
                'country_id', 'city', 'website', 'website_description',
                'website_published'
            ]))
        return init_res

    create_date = fields.Datetime('Create Date',
                                  readonly=True,
                                  copy=False,
                                  index=True)
    karma = fields.Integer('Karma', default=0)
    badge_ids = fields.One2many('gamification.badge.user',
                                'user_id',
                                string='Badges',
                                copy=False)
    gold_badge = fields.Integer('Gold badges count',
                                compute="_get_user_badge_level")
    silver_badge = fields.Integer('Silver badges count',
                                  compute="_get_user_badge_level")
    bronze_badge = fields.Integer('Bronze badges count',
                                  compute="_get_user_badge_level")
    forum_waiting_posts_count = fields.Integer(
        'Waiting post', compute="_get_user_waiting_post")

    @api.multi
    @api.depends('badge_ids')
    def _get_user_badge_level(self):
        """ Return total badge per level of users
        TDE CLEANME: shouldn't check type is forum ? """
        for user in self:
            user.gold_badge = 0
            user.silver_badge = 0
            user.bronze_badge = 0

        self.env.cr.execute(
            """
            SELECT bu.user_id, b.level, count(1)
            FROM gamification_badge_user bu, gamification_badge b
            WHERE bu.user_id IN %s
              AND bu.badge_id = b.id
              AND b.level IS NOT NULL
            GROUP BY bu.user_id, b.level
            ORDER BY bu.user_id;
        """, [tuple(self.ids)])

        for (user_id, level, count) in self.env.cr.fetchall():
            # levels are gold, silver, bronze but fields have _badge postfix
            self.browse(user_id)['{}_badge'.format(level)] = count

    @api.multi
    def _get_user_waiting_post(self):
        for user in self:
            Post = self.env['forum.post']
            domain = [('parent_id', '=', False), ('state', '=', 'pending'),
                      ('create_uid', '=', user.id)]
            user.forum_waiting_posts_count = Post.search_count(domain)

    @api.model
    def _generate_forum_token(self, user_id, email):
        """Return a token for email validation. This token is valid for the day
        and is a hash based on a (secret) uuid generated by the forum module,
        the user_id, the email and currently the day (to be updated if necessary). """
        forum_uuid = self.env['ir.config_parameter'].sudo().get_param(
            'website_forum.uuid')
        return hashlib.sha256((
            u'%s-%s-%s-%s' %
            (datetime.now().replace(hour=0, minute=0, second=0, microsecond=0),
             forum_uuid, user_id, email)).encode('utf-8')).hexdigest()

    @api.one
    def send_forum_validation_email(self, forum_id=None):
        if not self.email:
            return False
        token = self._generate_forum_token(self.id, self.email)
        activation_template = self.env.ref('website_forum.validation_email')
        if activation_template:
            params = {'token': token, 'id': self.id, 'email': self.email}
            if forum_id:
                params['forum_id'] = forum_id
            base_url = self.env['ir.config_parameter'].sudo().get_param(
                'web.base.url')
            token_url = base_url + '/forum/validate_email?%s' % urls.url_encode(
                params)
            activation_template.sudo().with_context(
                token_url=token_url).send_mail(self.id, force_send=True)
        return True

    @api.one
    def process_forum_validation_token(self,
                                       token,
                                       email,
                                       forum_id=None,
                                       context=None):
        validation_token = self._generate_forum_token(self.id, email)
        if token == validation_token and self.karma == 0:
            karma = 3
            forum = None
            if forum_id:
                forum = self.env['forum.forum'].browse(forum_id)
            else:
                forum_ids = self.env['forum.forum'].search([], limit=1)
                if forum_ids:
                    forum = forum_ids[0]
            if forum:
                # karma gained: karma to ask a question and have 2 downvotes
                karma = forum.karma_ask + (-2 *
                                           forum.karma_gen_question_downvote)
            return self.write({'karma': karma})
        return False

    @api.multi
    def add_karma(self, karma):
        for user in self:
            user.karma += karma
        return True

    # Wrapper for call_kw with inherits
    @api.multi
    def open_website_url(self):
        return self.mapped('partner_id').open_website_url()