예제 #1
0
class FleetVehicleModelBrand(models.Model):
    _name = 'fleet.vehicle.model.brand'
    _description = 'Brand of the vehicle'
    _order = 'name asc'

    name = fields.Char('Make', required=True)
    image = fields.Binary(
        "Logo",
        attachment=True,
        help=
        "This field holds the image used as logo for the brand, limited to 1024x1024px."
    )
    image_medium = fields.Binary(
        "Medium-sized image",
        attachment=True,
        help="Medium-sized logo of the brand. It is automatically "
        "resized as a 128x128px image, with aspect ratio preserved. "
        "Use this field in form views or some kanban views.")
    image_small = fields.Binary(
        "Small-sized image",
        attachment=True,
        help="Small-sized logo of the brand. It is automatically "
        "resized as a 64x64px image, with aspect ratio preserved. "
        "Use this field anywhere a small image is required.")

    @api.model
    def create(self, vals):
        tools.image_resize_images(vals)
        return super(FleetVehicleModelBrand, self).create(vals)

    @api.multi
    def write(self, vals):
        tools.image_resize_images(vals)
        return super(FleetVehicleModelBrand, self).write(vals)
예제 #2
0
파일: ir_actions.py 프로젝트: vituocgia/izi
class IrActionsActClient(models.Model):
    _name = 'ir.actions.client'
    _inherit = 'ir.actions.actions'
    _table = 'ir_act_client'
    _sequence = 'ir_actions_id_seq'
    _order = 'name'

    name = fields.Char(string='Action Name', translate=True)
    type = fields.Char(default='ir.actions.client')

    tag = fields.Char(string='Client action tag',
                      required=True,
                      help="An arbitrary string, interpreted by the client"
                      " according to its own needs and wishes. There "
                      "is no central tag repository across clients.")
    target = fields.Selection([('current', 'Current Window'),
                               ('new', 'New Window'),
                               ('fullscreen', 'Full Screen'),
                               ('main', 'Main action of Current Window')],
                              default="current",
                              string='Target Window')
    res_model = fields.Char(
        string='Destination Model',
        help="Optional model, mostly used for needactions.")
    context = fields.Char(
        string='Context Value',
        default="{}",
        required=True,
        help=
        "Context dictionary as Python expression, empty by default (Default: {})"
    )
    params = fields.Binary(compute='_compute_params',
                           inverse='_inverse_params',
                           string='Supplementary arguments',
                           help="Arguments sent to the client along with "
                           "the view tag")
    params_store = fields.Binary(string='Params storage', readonly=True)

    @api.depends('params_store')
    def _compute_params(self):
        self_bin = self.with_context(bin_size=False,
                                     bin_size_params_store=False)
        for record, record_bin in pycompat.izip(self, self_bin):
            record.params = record_bin.params_store and safe_eval(
                record_bin.params_store, {'uid': self._uid})

    def _inverse_params(self):
        for record in self:
            params = record.params
            record.params_store = repr(params) if isinstance(params,
                                                             dict) else params
예제 #3
0
파일: product.py 프로젝트: vituocgia/izi
class ProductPublicCategory(models.Model):
    _name = "product.public.category"
    _inherit = ["website.seo.metadata"]
    _description = "Website Product Category"
    _order = "sequence, name"

    name = fields.Char(required=True, translate=True)
    parent_id = fields.Many2one('product.public.category', string='Parent Category', index=True)
    child_id = fields.One2many('product.public.category', 'parent_id', string='Children Categories')
    sequence = fields.Integer(help="Gives the sequence order when displaying a list of product categories.")
    # NOTE: there is no 'default image', because by default we don't show
    # thumbnails for categories. However if we have a thumbnail for at least one
    # category, then we display a default image on the other, so that the
    # buttons have consistent styling.
    # In this case, the default image is set by the js code.
    image = fields.Binary(attachment=True, help="This field holds the image used as image for the category, limited to 1024x1024px.")
    image_medium = fields.Binary(string='Medium-sized image', attachment=True,
                                 help="Medium-sized image of the category. It is automatically "
                                 "resized as a 128x128px image, with aspect ratio preserved. "
                                 "Use this field in form views or some kanban views.")
    image_small = fields.Binary(string='Small-sized image', attachment=True,
                                help="Small-sized image of the category. It is automatically "
                                "resized as a 64x64px image, with aspect ratio preserved. "
                                "Use this field anywhere a small image is required.")

    @api.model
    def create(self, vals):
        tools.image_resize_images(vals)
        return super(ProductPublicCategory, self).create(vals)

    @api.multi
    def write(self, vals):
        tools.image_resize_images(vals)
        return super(ProductPublicCategory, self).write(vals)

    @api.constrains('parent_id')
    def check_parent_id(self):
        if not self._check_recursion():
            raise ValueError(_('Error ! You cannot create recursive categories.'))

    @api.multi
    def name_get(self):
        res = []
        for category in self:
            names = [category.name]
            parent_category = category.parent_id
            while parent_category:
                names.append(parent_category.name)
                parent_category = parent_category.parent_id
            res.append((category.id, ' / '.join(reversed(names))))
        return res
예제 #4
0
class ConverterTest(models.Model):
    _name = 'web_editor.converter.test'

    # disable translation export for those brilliant field labels and values
    _translate = False

    char = fields.Char()
    integer = fields.Integer()
    float = fields.Float()
    numeric = fields.Float(digits=(16, 2))
    many2one = fields.Many2one('web_editor.converter.test.sub')
    binary = fields.Binary()
    date = fields.Date()
    datetime = fields.Datetime()
    selection = fields.Selection([
        (1, "réponse A"),
        (2, "réponse B"),
        (3, "réponse C"),
        (4, "réponse <D>"),
    ])
    selection_str = fields.Selection(
        [
            ('A', "Qu'il n'est pas arrivé à Toronto"),
            ('B', "Qu'il était supposé arriver à Toronto"),
            ('C', "Qu'est-ce qu'il fout ce maudit pancake, tabernacle ?"),
            ('D', "La réponse D"),
        ],
        string=u"Lorsqu'un pancake prend l'avion à destination de Toronto et "
        u"qu'il fait une escale technique à St Claude, on dit:")
    html = fields.Html()
    text = fields.Text()
예제 #5
0
class RestaurantFloor(models.Model):

    _name = 'restaurant.floor'

    name = fields.Char('Floor Name', required=True, help='An internal identification of the restaurant floor')
    pos_config_id = fields.Many2one('pos.config', string='Point of Sale')
    background_image = fields.Binary('Background Image', attachment=True, help='A background image used to display a floor layout in the point of sale interface')
    background_color = fields.Char('Background Color', help='The background color of the floor layout, (must be specified in a html-compatible format)', default='rgb(210, 210, 210)')
    table_ids = fields.One2many('restaurant.table', 'floor_id', string='Tables', help='The list of tables in this floor')
    sequence = fields.Integer('Sequence', help='Used to sort Floors', default=1)
예제 #6
0
class Sponsor(models.Model):
    _name = "event.sponsor"
    _description = 'Event Sponsor'
    _order = "sequence"

    event_id = fields.Many2one('event.event', 'Event', required=True)
    sponsor_type_id = fields.Many2one('event.sponsor.type', 'Sponsoring Type', required=True)
    partner_id = fields.Many2one('res.partner', 'Sponsor/Customer', required=True)
    url = fields.Char('Sponsor Website')
    sequence = fields.Integer('Sequence', store=True, related='sponsor_type_id.sequence')
    image_medium = fields.Binary(string='Logo', related='partner_id.image_medium', store=True, attachment=True)
예제 #7
0
class BaseImportModule(models.TransientModel):
    """ Import Module """
    _name = "base.import.module"
    _description = "Import Module"

    module_file = fields.Binary(string='Module .ZIP file', required=True)
    state = fields.Selection([('init', 'init'), ('done', 'done')],
                             string='Status',
                             readonly=True,
                             default='init')
    import_message = fields.Char()
    force = fields.Boolean(
        string='Force init',
        help=
        "Force init mode even if installed. (will update `noupdate='1'` records)"
    )

    @api.multi
    def import_module(self):
        self.ensure_one()
        IrModule = self.env['ir.module.module']
        zip_data = base64.decodestring(self.module_file)
        fp = BytesIO()
        fp.write(zip_data)
        res = IrModule.import_zipfile(fp, force=self.force)
        self.write({'state': 'done', 'import_message': res[0]})
        context = dict(self.env.context, module_name=res[1])
        # Return wizard otherwise it will close wizard and will not show result message to user.
        return {
            'name': 'Import Module',
            'view_type': 'form',
            'view_mode': 'form',
            'target': 'new',
            'res_id': self.id,
            'res_model': 'base.import.module',
            'type': 'ir.actions.act_window',
            'context': context,
        }

    @api.multi
    def action_module_open(self):
        self.ensure_one()
        return {
            'domain': [('name', 'in', self.env.context.get('module_name',
                                                           []))],
            'name': 'Modules',
            'view_type': 'form',
            'view_mode': 'tree,form',
            'res_model': 'ir.module.module',
            'view_id': False,
            'type': 'ir.actions.act_window',
        }
예제 #8
0
class FleetVehicleModel(models.Model):
    _name = 'fleet.vehicle.model'
    _description = 'Model of a vehicle'
    _order = 'name asc'

    name = fields.Char('Model name', required=True)
    brand_id = fields.Many2one('fleet.vehicle.model.brand',
                               'Make',
                               required=True,
                               help='Make of the vehicle')
    vendors = fields.Many2many('res.partner',
                               'fleet_vehicle_model_vendors',
                               'model_id',
                               'partner_id',
                               string='Vendors')
    image = fields.Binary(related='brand_id.image', string="Logo")
    image_medium = fields.Binary(related='brand_id.image_medium',
                                 string="Logo (medium)")
    image_small = fields.Binary(related='brand_id.image_small',
                                string="Logo (small)")

    @api.multi
    @api.depends('name', 'brand_id')
    def name_get(self):
        res = []
        for record in self:
            name = record.name
            if record.brand_id.name:
                name = record.brand_id.name + '/' + name
            res.append((record.id, name))
        return res

    @api.onchange('brand_id')
    def _onchange_brand(self):
        if self.brand_id:
            self.image_medium = self.brand_id.image
        else:
            self.image_medium = False
예제 #9
0
파일: pos_cache.py 프로젝트: vituocgia/izi
class pos_cache(models.Model):
    _name = 'pos.cache'

    cache = fields.Binary(attachment=True)
    product_domain = fields.Text(required=True)
    product_fields = fields.Text(required=True)

    config_id = fields.Many2one('pos.config',
                                ondelete='cascade',
                                required=True)
    compute_user_id = fields.Many2one('res.users',
                                      'Cache compute user',
                                      required=True)

    @api.model
    def refresh_all_caches(self):
        self.env['pos.cache'].search([]).refresh_cache()

    @api.one
    def refresh_cache(self):
        Product = self.env['product.product'].sudo(self.compute_user_id.id)
        products = Product.search(self.get_product_domain())
        prod_ctx = products.with_context(
            pricelist=self.config_id.pricelist_id.id,
            display_default_code=False,
            lang=self.compute_user_id.lang)
        res = prod_ctx.read(self.get_product_fields())
        datas = {
            'cache': base64.encodestring(json.dumps(res).encode('utf-8')),
        }

        self.write(datas)

    @api.model
    def get_product_domain(self):
        return literal_eval(self.product_domain)

    @api.model
    def get_product_fields(self):
        return literal_eval(self.product_fields)

    @api.model
    def get_cache(self, domain, fields):
        if not self.cache or domain != self.get_product_domain(
        ) or fields != self.get_product_fields():
            self.product_domain = str(domain)
            self.product_fields = str(fields)
            self.refresh_cache()

        return json.loads(base64.decodestring(self.cache).decode('utf-8'))
예제 #10
0
class PaymentIcon(models.Model):
    _name = 'payment.icon'
    _description = 'Payment Icon'

    name = fields.Char(string='Name')
    acquirer_ids = fields.Many2many(
        'payment.acquirer',
        string="Acquirers",
        help="List of Acquirers supporting this payment icon.")
    image = fields.Binary(
        "Image",
        attachment=True,
        help=
        "This field holds the image used for this payment icon, limited to 1024x1024px"
    )

    image_payment_form = fields.Binary("Image displayed on the payment form",
                                       attachment=True)

    @api.model
    def create(self, vals):
        if 'image' in vals:
            image = ustr(vals['image'] or '').encode('utf-8')
            vals['image_payment_form'] = image_resize_image(image,
                                                            size=(45, 30))
            vals['image'] = image_resize_image(image, size=(64, 64))
        return super(PaymentIcon, self).create(vals)

    @api.multi
    def write(self, vals):
        if 'image' in vals:
            image = ustr(vals['image'] or '').encode('utf-8')
            vals['image_payment_form'] = image_resize_image(image,
                                                            size=(45, 30))
            vals['image'] = image_resize_image(image, size=(64, 64))
        return super(PaymentIcon, self).write(vals)
예제 #11
0
class BaseLanguageExport(models.TransientModel):
    _name = "base.language.export"

    @api.model
    def _get_languages(self):
        langs = self.env['res.lang'].search([('translatable', '=', True)])
        return [(NEW_LANG_KEY, _('New Language (Empty translation template)'))] + \
               [(lang.code, lang.name) for lang in langs]
   
    name = fields.Char('File Name', readonly=True)
    lang = fields.Selection(_get_languages, string='Language', required=True, default=NEW_LANG_KEY)
    format = fields.Selection([('csv','CSV File'), ('po','PO File'), ('tgz', 'TGZ Archive')],
                              string='File Format', required=True, default='csv')
    modules = fields.Many2many('ir.module.module', 'rel_modules_langexport', 'wiz_id', 'module_id',
                               string='Apps To Export', domain=[('state','=','installed')])
    data = fields.Binary('File', readonly=True)
    state = fields.Selection([('choose', 'choose'), ('get', 'get')], # choose language or get the file
                             default='choose')

    @api.multi
    def act_getfile(self):
        this = self[0]
        lang = this.lang if this.lang != NEW_LANG_KEY else False
        mods = sorted(this.mapped('modules.name')) or ['all']

        with contextlib.closing(io.BytesIO()) as buf:
            tools.trans_export(lang, mods, buf, this.format, self._cr)
            out = base64.encodestring(buf.getvalue())

        filename = 'new'
        if lang:
            filename = tools.get_iso_codes(lang)
        elif len(mods) == 1:
            filename = mods[0]
        extension = this.format
        if not lang and extension == 'po':
            extension = 'pot'
        name = "%s.%s" % (filename, extension)
        this.write({'state': 'get', 'data': out, 'name': name})
        return {
            'type': 'ir.actions.act_window',
            'res_model': 'base.language.export',
            'view_mode': 'form',
            'view_type': 'form',
            'res_id': this.id,
            'views': [(False, 'form')],
            'target': 'new',
        }
예제 #12
0
class BaseLanguageImport(models.TransientModel):
    _name = "base.language.import"
    _description = "Language Import"

    name = fields.Char('Language Name', required=True)
    code = fields.Char('ISO Code',
                       size=5,
                       required=True,
                       help="ISO Language and Country code, e.g. en_US")
    data = fields.Binary('File', required=True)
    filename = fields.Char('File Name', required=True)
    overwrite = fields.Boolean(
        'Overwrite Existing Terms',
        help=
        "If you enable this option, existing translations (including custom ones) "
        "will be overwritten and replaced by those in this file")

    @api.multi
    def import_lang(self):
        this = self[0]
        this = this.with_context(overwrite=this.overwrite)
        with TemporaryFile('wb+') as buf:
            try:
                buf.write(base64.decodestring(this.data))

                # now we determine the file format
                buf.seek(0)
                fileformat = os.path.splitext(this.filename)[-1][1:].lower()

                tools.trans_load_data(this._cr,
                                      buf,
                                      fileformat,
                                      this.code,
                                      lang_name=this.name,
                                      context=this._context)
            except Exception as e:
                _logger.exception(
                    'File unsuccessfully imported, due to format mismatch.')
                raise UserError(
                    _('File not imported due to format mismatch or a malformed file. (Valid formats are .csv, .po, .pot)\n\nTechnical Details:\n%s'
                      ) % tools.ustr(e))
        return True
예제 #13
0
파일: models.py 프로젝트: vituocgia/izi
class test_model(models.Model):
    _name = 'test_converter.test_model'

    char = fields.Char()
    integer = fields.Integer()
    float = fields.Float()
    numeric = fields.Float(digits=(16, 2))
    many2one = fields.Many2one('test_converter.test_model.sub',
                               group_expand='_gbf_m2o')
    binary = fields.Binary()
    date = fields.Date()
    datetime = fields.Datetime()
    selection = fields.Selection([
        (1, "réponse A"),
        (2, "réponse B"),
        (3, "réponse C"),
        (4, "réponse <D>"),
    ])
    selection_str = fields.Selection(
        [
            ('A', u"Qu'il n'est pas arrivé à Toronto"),
            ('B', u"Qu'il était supposé arriver à Toronto"),
            ('C', u"Qu'est-ce qu'il fout ce maudit pancake, tabernacle ?"),
            ('D', u"La réponse D"),
        ],
        string=u"Lorsqu'un pancake prend l'avion à destination de Toronto et "
        u"qu'il fait une escale technique à St Claude, on dit:")
    html = fields.Html()
    text = fields.Text()

    # `base` module does not contains any model that implement the functionality
    # `group_expand`; test this feature here...

    @api.model
    def _gbf_m2o(self, subs, domain, order):
        sub_ids = subs._search([], order=order, access_rights_uid=SUPERUSER_ID)
        return subs.browse(sub_ids)
예제 #14
0
class Country(models.Model):
    _name = 'res.country'
    _description = 'Country'
    _order = 'name'

    name = fields.Char(
        string='Country Name', required=True, translate=True, help='The full name of the country.')
    code = fields.Char(
        string='Country Code', size=2,
        help='The ISO country code in two chars. \nYou can use this field for quick search.')
    address_format = fields.Text(string="Layout in Reports",
        help="Display format to use for addresses belonging to this country.\n\n"
             "You can use python-style string pattern with all the fields of the address "
             "(for example, use '%(street)s' to display the field 'street') plus"
             "\n%(state_name)s: the name of the state"
             "\n%(state_code)s: the code of the state"
             "\n%(country_name)s: the name of the country"
             "\n%(country_code)s: the code of the country",
        default='%(street)s\n%(street2)s\n%(city)s %(state_code)s %(zip)s\n%(country_name)s')
    address_view_id = fields.Many2one(
        comodel_name='ir.ui.view', string="Input View",
        domain=[('model', '=', 'res.partner'), ('type', '=', 'form')],
        help="Use this field if you want to replace the usual way to encode a complete address. "
             "Note that the address_format field is used to modify the way to display addresses "
             "(in reports for example), while this field is used to modify the input form for "
             "addresses.")
    currency_id = fields.Many2one('res.currency', string='Currency')
    image = fields.Binary(attachment=True)
    phone_code = fields.Integer(string='Country Calling Code')
    country_group_ids = fields.Many2many('res.country.group', 'res_country_res_country_group_rel',
                         'res_country_id', 'res_country_group_id', string='Country Groups')
    state_ids = fields.One2many('res.country.state', 'country_id', string='States')
    name_position = fields.Selection([
            ('before', 'Before Address'),
            ('after', 'After Address'),
        ], string="Customer Name Position", default="before",
        help="Determines where the customer/company name should be placed, i.e. after or before the address.")
    vat_label = fields.Char(string='Vat Label', translate=True, help="Use this field if you want to change vat label.")

    _sql_constraints = [
        ('name_uniq', 'unique (name)',
            'The name of the country must be unique !'),
        ('code_uniq', 'unique (code)',
            'The code of the country must be unique !')
    ]

    name_search = location_name_search

    @api.model
    def create(self, vals):
        if vals.get('code'):
            vals['code'] = vals['code'].upper()
        return super(Country, self).create(vals)

    @api.multi
    def write(self, vals):
        if vals.get('code'):
            vals['code'] = vals['code'].upper()
        return super(Country, self).write(vals)

    @api.multi
    def get_address_fields(self):
        self.ensure_one()
        return re.findall(r'\((.+?)\)', self.address_format)
예제 #15
0
class Message(models.Model):
    """ Messages model: system notification (replacing res.log notifications),
        comments (OpenChatter discussion) and incoming emails. """
    _name = 'mail.message'
    _description = 'Message'
    _order = 'id desc'
    _rec_name = 'record_name'

    _message_read_limit = 30

    @api.model
    def _get_default_from(self):
        if self.env.user.email:
            return formataddr((self.env.user.name, self.env.user.email))
        raise UserError(_("Unable to send email, please configure the sender's email address."))

    @api.model
    def _get_default_author(self):
        return self.env.user.partner_id

    # content
    subject = fields.Char('Subject')
    date = fields.Datetime('Date', default=fields.Datetime.now)
    body = fields.Html('Contents', default='', sanitize_style=True, strip_classes=True)
    attachment_ids = fields.Many2many(
        'ir.attachment', 'message_attachment_rel',
        'message_id', 'attachment_id',
        string='Attachments',
        help='Attachments are linked to a document through model / res_id and to the message '
             'through this field.')
    parent_id = fields.Many2one(
        'mail.message', 'Parent Message', index=True, ondelete='set null',
        help="Initial thread message.")
    child_ids = fields.One2many('mail.message', 'parent_id', 'Child Messages')
    # related document
    model = fields.Char('Related Document Model', index=True)
    res_id = fields.Integer('Related Document ID', index=True)
    record_name = fields.Char('Message Record Name', help="Name get of the related document.")
    # characteristics
    message_type = fields.Selection([
        ('email', 'Email'),
        ('comment', 'Comment'),
        ('notification', 'System notification')],
        'Type', required=True, default='email',
        help="Message type: email for email message, notification for system "
             "message, comment for other messages such as user replies",
        oldname='type')
    subtype_id = fields.Many2one('mail.message.subtype', 'Subtype', ondelete='set null', index=True)
    mail_activity_type_id = fields.Many2one(
        'mail.activity.type', 'Mail Activity Type',
        index=True, ondelete='set null')
    # origin
    email_from = fields.Char(
        'From', default=_get_default_from,
        help="Email address of the sender. This field is set when no matching partner is found and replaces the author_id field in the chatter.")
    author_id = fields.Many2one(
        'res.partner', 'Author', index=True,
        ondelete='set null', default=_get_default_author,
        help="Author of the message. If not set, email_from may hold an email address that did not match any partner.")
    author_avatar = fields.Binary("Author's avatar", related='author_id.image_small')
    # recipients
    partner_ids = fields.Many2many('res.partner', string='Recipients')
    needaction_partner_ids = fields.Many2many(
        'res.partner', 'mail_message_res_partner_needaction_rel', string='Partners with Need Action')
    needaction = fields.Boolean(
        'Need Action', compute='_get_needaction', search='_search_needaction',
        help='Need Action')
    channel_ids = fields.Many2many(
        'mail.channel', 'mail_message_mail_channel_rel', string='Channels')
    # notifications
    notification_ids = fields.One2many(
        'mail.notification', 'mail_message_id', 'Notifications',
        auto_join=True, copy=False)
    # user interface
    starred_partner_ids = fields.Many2many(
        'res.partner', 'mail_message_res_partner_starred_rel', string='Favorited By')
    starred = fields.Boolean(
        'Starred', compute='_get_starred', search='_search_starred',
        help='Current user has a starred notification linked to this message')
    # tracking
    tracking_value_ids = fields.One2many(
        'mail.tracking.value', 'mail_message_id',
        string='Tracking values',
        groups="base.group_no_one",
        help='Tracked values are stored in a separate model. This field allow to reconstruct '
             'the tracking and to generate statistics on the model.')
    # mail gateway
    no_auto_thread = fields.Boolean(
        'No threading for answers',
        help='Answers do not go in the original document discussion thread. This has an impact on the generated message-id.')
    message_id = fields.Char('Message-Id', help='Message unique identifier', index=True, readonly=1, copy=False)
    reply_to = fields.Char('Reply-To', help='Reply email address. Setting the reply_to bypasses the automatic thread creation.')
    mail_server_id = fields.Many2one('ir.mail_server', 'Outgoing mail server')

    @api.multi
    def _get_needaction(self):
        """ Need action on a mail.message = notified on my channel """
        my_messages = self.env['mail.notification'].sudo().search([
            ('mail_message_id', 'in', self.ids),
            ('res_partner_id', '=', self.env.user.partner_id.id),
            ('is_read', '=', False)]).mapped('mail_message_id')
        for message in self:
            message.needaction = message in my_messages

    @api.model
    def _search_needaction(self, operator, operand):
        if operator == '=' and operand:
            return ['&', ('notification_ids.res_partner_id', '=', self.env.user.partner_id.id), ('notification_ids.is_read', '=', False)]
        return ['&', ('notification_ids.res_partner_id', '=', self.env.user.partner_id.id), ('notification_ids.is_read', '=', True)]

    @api.depends('starred_partner_ids')
    def _get_starred(self):
        """ Compute if the message is starred by the current user. """
        # TDE FIXME: use SQL
        starred = self.sudo().filtered(lambda msg: self.env.user.partner_id in msg.starred_partner_ids)
        for message in self:
            message.starred = message in starred

    @api.model
    def _search_starred(self, operator, operand):
        if operator == '=' and operand:
            return [('starred_partner_ids', 'in', [self.env.user.partner_id.id])]
        return [('starred_partner_ids', 'not in', [self.env.user.partner_id.id])]

    #------------------------------------------------------
    # Notification API
    #------------------------------------------------------

    @api.model
    def mark_all_as_read(self, channel_ids=None, domain=None):
        """ Remove all needactions of the current partner. If channel_ids is
            given, restrict to messages written in one of those channels. """
        partner_id = self.env.user.partner_id.id
        delete_mode = not self.env.user.share  # delete employee notifs, keep customer ones
        if not domain and delete_mode:
            query = "DELETE FROM mail_message_res_partner_needaction_rel WHERE res_partner_id IN %s"
            args = [(partner_id,)]
            if channel_ids:
                query += """
                    AND mail_message_id in
                        (SELECT mail_message_id
                        FROM mail_message_mail_channel_rel
                        WHERE mail_channel_id in %s)"""
                args += [tuple(channel_ids)]
            query += " RETURNING mail_message_id as id"
            self._cr.execute(query, args)
            self.invalidate_cache()

            ids = [m['id'] for m in self._cr.dictfetchall()]
        else:
            # not really efficient method: it does one db request for the
            # search, and one for each message in the result set to remove the
            # current user from the relation.
            msg_domain = [('needaction_partner_ids', 'in', partner_id)]
            if channel_ids:
                msg_domain += [('channel_ids', 'in', channel_ids)]
            unread_messages = self.search(expression.AND([msg_domain, domain]))
            notifications = self.env['mail.notification'].sudo().search([
                ('mail_message_id', 'in', unread_messages.ids),
                ('res_partner_id', '=', self.env.user.partner_id.id),
                ('is_read', '=', False)])
            if delete_mode:
                notifications.unlink()
            else:
                notifications.write({'is_read': True})
            ids = unread_messages.mapped('id')

        notification = {'type': 'mark_as_read', 'message_ids': ids, 'channel_ids': channel_ids}
        self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification)

        return ids

    @api.multi
    def mark_as_unread(self, channel_ids=None):
        """ Add needactions to messages for the current partner. """
        partner_id = self.env.user.partner_id.id
        for message in self:
            message.write({'needaction_partner_ids': [(4, partner_id)]})

        ids = [m.id for m in self]
        notification = {'type': 'mark_as_unread', 'message_ids': ids, 'channel_ids': channel_ids}
        self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification)

    @api.multi
    def set_message_done(self):
        """ Remove the needaction from messages for the current partner. """
        partner_id = self.env.user.partner_id
        delete_mode = not self.env.user.share  # delete employee notifs, keep customer ones

        notifications = self.env['mail.notification'].sudo().search([
            ('mail_message_id', 'in', self.ids),
            ('res_partner_id', '=', partner_id.id),
            ('is_read', '=', False)])

        if not notifications:
            return

        # notifies changes in messages through the bus.  To minimize the number of
        # notifications, we need to group the messages depending on their channel_ids
        groups = []
        messages = notifications.mapped('mail_message_id')
        current_channel_ids = messages[0].channel_ids
        current_group = []
        for record in messages:
            if record.channel_ids == current_channel_ids:
                current_group.append(record.id)
            else:
                groups.append((current_group, current_channel_ids))
                current_group = [record.id]
                current_channel_ids = record.channel_ids

        groups.append((current_group, current_channel_ids))
        current_group = [record.id]
        current_channel_ids = record.channel_ids

        if delete_mode:
            notifications.unlink()
        else:
            notifications.write({'is_read': True})

        for (msg_ids, channel_ids) in groups:
            notification = {'type': 'mark_as_read', 'message_ids': msg_ids, 'channel_ids': [c.id for c in channel_ids]}
            self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', partner_id.id), notification)

    @api.model
    def unstar_all(self):
        """ Unstar messages for the current partner. """
        partner_id = self.env.user.partner_id.id

        starred_messages = self.search([('starred_partner_ids', 'in', partner_id)])
        starred_messages.write({'starred_partner_ids': [(3, partner_id)]})

        ids = [m.id for m in starred_messages]
        notification = {'type': 'toggle_star', 'message_ids': ids, 'starred': False}
        self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification)

    @api.multi
    def toggle_message_starred(self):
        """ Toggle messages as (un)starred. Technically, the notifications related
            to uid are set to (un)starred.
        """
        # a user should always be able to star a message he can read
        self.check_access_rule('read')
        starred = not self.starred
        if starred:
            self.sudo().write({'starred_partner_ids': [(4, self.env.user.partner_id.id)]})
        else:
            self.sudo().write({'starred_partner_ids': [(3, self.env.user.partner_id.id)]})

        notification = {'type': 'toggle_star', 'message_ids': [self.id], 'starred': starred}
        self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification)

    #------------------------------------------------------
    # Message loading for web interface
    #------------------------------------------------------

    @api.model
    def _message_read_dict_postprocess(self, messages, message_tree):
        """ Post-processing on values given by message_read. This method will
            handle partners in batch to avoid doing numerous queries.

            :param list messages: list of message, as get_dict result
            :param dict message_tree: {[msg.id]: msg browse record as super user}
        """
        # 1. Aggregate partners (author_id and partner_ids), attachments and tracking values
        partners = self.env['res.partner'].sudo()
        attachments = self.env['ir.attachment']
        message_ids = list(message_tree.keys())
        for message in message_tree.values():
            if message.author_id:
                partners |= message.author_id
            if message.subtype_id and message.partner_ids:  # take notified people of message with a subtype
                partners |= message.partner_ids
            elif not message.subtype_id and message.partner_ids:  # take specified people of message without a subtype (log)
                partners |= message.partner_ids
            if message.needaction_partner_ids:  # notified
                partners |= message.needaction_partner_ids
            if message.attachment_ids:
                attachments |= message.attachment_ids
        # Read partners as SUPERUSER -> message being browsed as SUPERUSER it is already the case
        partners_names = partners.name_get()
        partner_tree = dict((partner[0], partner) for partner in partners_names)

        # 2. Attachments as SUPERUSER, because could receive msg and attachments for doc uid cannot see
        attachments_data = attachments.sudo().read(['id', 'datas_fname', 'name', 'mimetype'])
        attachments_tree = dict((attachment['id'], {
            'id': attachment['id'],
            'filename': attachment['datas_fname'],
            'name': attachment['name'],
            'mimetype': attachment['mimetype'],
        }) for attachment in attachments_data)

        # 3. Tracking values
        tracking_values = self.env['mail.tracking.value'].sudo().search([('mail_message_id', 'in', message_ids)])
        message_to_tracking = dict()
        tracking_tree = dict.fromkeys(tracking_values.ids, False)
        for tracking in tracking_values:
            message_to_tracking.setdefault(tracking.mail_message_id.id, list()).append(tracking.id)
            tracking_tree[tracking.id] = {
                'id': tracking.id,
                'changed_field': tracking.field_desc,
                'old_value': tracking.get_old_display_value()[0],
                'new_value': tracking.get_new_display_value()[0],
                'field_type': tracking.field_type,
            }

        # 4. Update message dictionaries
        for message_dict in messages:
            message_id = message_dict.get('id')
            message = message_tree[message_id]
            if message.author_id:
                author = partner_tree[message.author_id.id]
            else:
                author = (0, message.email_from)
            partner_ids = []
            if message.subtype_id:
                partner_ids = [partner_tree[partner.id] for partner in message.partner_ids
                                if partner.id in partner_tree]
            else:
                partner_ids = [partner_tree[partner.id] for partner in message.partner_ids
                                if partner.id in partner_tree]

            customer_email_data = []
            for notification in message.notification_ids.filtered(lambda notif: notif.res_partner_id.partner_share and notif.res_partner_id.active):
                customer_email_data.append((partner_tree[notification.res_partner_id.id][0], partner_tree[notification.res_partner_id.id][1], notification.email_status))

            attachment_ids = []
            for attachment in message.attachment_ids:
                if attachment.id in attachments_tree:
                    attachment_ids.append(attachments_tree[attachment.id])
            tracking_value_ids = []
            for tracking_value_id in message_to_tracking.get(message_id, list()):
                if tracking_value_id in tracking_tree:
                    tracking_value_ids.append(tracking_tree[tracking_value_id])

            message_dict.update({
                'author_id': author,
                'partner_ids': partner_ids,
                'customer_email_status': (all(d[2] == 'sent' for d in customer_email_data) and 'sent') or
                                        (any(d[2] == 'exception' for d in customer_email_data) and 'exception') or 
                                        (any(d[2] == 'bounce' for d in customer_email_data) and 'bounce') or 'ready',
                'customer_email_data': customer_email_data,
                'attachment_ids': attachment_ids,
                'tracking_value_ids': tracking_value_ids,
            })

        return True

    @api.model
    def message_fetch(self, domain, limit=20):
        return self.search(domain, limit=limit).message_format()

    @api.multi
    def message_format(self):
        """ Get the message values in the format for web client. Since message values can be broadcasted,
            computed fields MUST NOT BE READ and broadcasted.
            :returns list(dict).
             Example :
                {
                    'body': HTML content of the message
                    'model': u'res.partner',
                    'record_name': u'Agrolait',
                    'attachment_ids': [
                        {
                            'file_type_icon': u'webimage',
                            'id': 45,
                            'name': u'sample.png',
                            'filename': u'sample.png'
                        }
                    ],
                    'needaction_partner_ids': [], # list of partner ids
                    'res_id': 7,
                    'tracking_value_ids': [
                        {
                            'old_value': "",
                            'changed_field': "Customer",
                            'id': 2965,
                            'new_value': "Axelor"
                        }
                    ],
                    'author_id': (3, u'Administrator'),
                    'email_from': '*****@*****.**' # email address or False
                    'subtype_id': (1, u'Discussions'),
                    'channel_ids': [], # list of channel ids
                    'date': '2015-06-30 08:22:33',
                    'partner_ids': [[7, "Sacha Du Bourg-Palette"]], # list of partner name_get
                    'message_type': u'comment',
                    'id': 59,
                    'subject': False
                    'is_note': True # only if the subtype is internal
                }
        """
        message_values = self.read([
            'id', 'body', 'date', 'author_id', 'email_from',  # base message fields
            'message_type', 'subtype_id', 'subject',  # message specific
            'model', 'res_id', 'record_name',  # document related
            'channel_ids', 'partner_ids',  # recipients
            'starred_partner_ids',  # list of partner ids for whom the message is starred
        ])
        message_tree = dict((m.id, m) for m in self.sudo())
        self._message_read_dict_postprocess(message_values, message_tree)

        # add subtype data (is_note flag, subtype_description). Do it as sudo
        # because portal / public may have to look for internal subtypes
        subtype_ids = [msg['subtype_id'][0] for msg in message_values if msg['subtype_id']]
        subtypes = self.env['mail.message.subtype'].sudo().browse(subtype_ids).read(['internal', 'description'])
        subtypes_dict = dict((subtype['id'], subtype) for subtype in subtypes)

        # fetch notification status
        notif_dict = {}
        notifs = self.env['mail.notification'].sudo().search([('mail_message_id', 'in', list(mid for mid in message_tree)), ('is_read', '=', False)])
        for notif in notifs:
            mid = notif.mail_message_id.id
            if not notif_dict.get(mid):
                notif_dict[mid] = {'partner_id': list()}
            notif_dict[mid]['partner_id'].append(notif.res_partner_id.id)

        for message in message_values:
            message['needaction_partner_ids'] = notif_dict.get(message['id'], dict()).get('partner_id', [])
            message['is_note'] = message['subtype_id'] and subtypes_dict[message['subtype_id'][0]]['internal']
            message['subtype_description'] = message['subtype_id'] and subtypes_dict[message['subtype_id'][0]]['description']
            if message['model'] and self.env[message['model']]._original_module:
                message['module_icon'] = modules.module.get_module_icon(self.env[message['model']]._original_module)
        return message_values

    #------------------------------------------------------
    # mail_message internals
    #------------------------------------------------------

    @api.model_cr
    def init(self):
        self._cr.execute("""SELECT indexname FROM pg_indexes WHERE indexname = 'mail_message_model_res_id_idx'""")
        if not self._cr.fetchone():
            self._cr.execute("""CREATE INDEX mail_message_model_res_id_idx ON mail_message (model, res_id)""")

    @api.model
    def _find_allowed_model_wise(self, doc_model, doc_dict):
        doc_ids = list(doc_dict)
        allowed_doc_ids = self.env[doc_model].with_context(active_test=False).search([('id', 'in', doc_ids)]).ids
        return set([message_id for allowed_doc_id in allowed_doc_ids for message_id in doc_dict[allowed_doc_id]])

    @api.model
    def _find_allowed_doc_ids(self, model_ids):
        IrModelAccess = self.env['ir.model.access']
        allowed_ids = set()
        for doc_model, doc_dict in model_ids.items():
            if not IrModelAccess.check(doc_model, 'read', False):
                continue
            allowed_ids |= self._find_allowed_model_wise(doc_model, doc_dict)
        return allowed_ids

    @api.model
    def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
        """ Override that adds specific access rights of mail.message, to remove
        ids uid could not see according to our custom rules. Please refer to
        check_access_rule for more details about those rules.

        Non employees users see only message with subtype (aka do not see
        internal logs).

        After having received ids of a classic search, keep only:
        - if author_id == pid, uid is the author, OR
        - uid belongs to a notified channel, OR
        - uid is in the specified recipients, OR
        - uid has a notification on the message, OR
        - uid have read access to the related document is model, res_id
        - otherwise: remove the id
        """
        # Rules do not apply to administrator
        if self._uid == SUPERUSER_ID:
            return super(Message, self)._search(
                args, offset=offset, limit=limit, order=order,
                count=count, access_rights_uid=access_rights_uid)
        # Non-employee see only messages with a subtype (aka, no internal logs)
        if not self.env['res.users'].has_group('base.group_user'):
            args = ['&', '&', ('subtype_id', '!=', False), ('subtype_id.internal', '=', False)] + list(args)
        # Perform a super with count as False, to have the ids, not a counter
        ids = super(Message, self)._search(
            args, offset=offset, limit=limit, order=order,
            count=False, access_rights_uid=access_rights_uid)
        if not ids and count:
            return 0
        elif not ids:
            return ids

        pid = self.env.user.partner_id.id
        author_ids, partner_ids, channel_ids, allowed_ids = set([]), set([]), set([]), set([])
        model_ids = {}

        # check read access rights before checking the actual rules on the given ids
        super(Message, self.sudo(access_rights_uid or self._uid)).check_access_rights('read')

        self._cr.execute("""
            SELECT DISTINCT m.id, m.model, m.res_id, m.author_id,
                            COALESCE(partner_rel.res_partner_id, needaction_rel.res_partner_id),
                            channel_partner.channel_id as channel_id
            FROM "%s" m
            LEFT JOIN "mail_message_res_partner_rel" partner_rel
            ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = %%(pid)s
            LEFT JOIN "mail_message_res_partner_needaction_rel" needaction_rel
            ON needaction_rel.mail_message_id = m.id AND needaction_rel.res_partner_id = %%(pid)s
            LEFT JOIN "mail_message_mail_channel_rel" channel_rel
            ON channel_rel.mail_message_id = m.id
            LEFT JOIN "mail_channel" channel
            ON channel.id = channel_rel.mail_channel_id
            LEFT JOIN "mail_channel_partner" channel_partner
            ON channel_partner.channel_id = channel.id AND channel_partner.partner_id = %%(pid)s
            WHERE m.id = ANY (%%(ids)s)""" % self._table, dict(pid=pid, ids=ids))
        for id, rmod, rid, author_id, partner_id, channel_id in self._cr.fetchall():
            if author_id == pid:
                author_ids.add(id)
            elif partner_id == pid:
                partner_ids.add(id)
            elif channel_id:
                channel_ids.add(id)
            elif rmod and rid:
                model_ids.setdefault(rmod, {}).setdefault(rid, set()).add(id)

        allowed_ids = self._find_allowed_doc_ids(model_ids)

        final_ids = author_ids | partner_ids | channel_ids | allowed_ids

        if count:
            return len(final_ids)
        else:
            # re-construct a list based on ids, because set did not keep the original order
            id_list = [id for id in ids if id in final_ids]
            return id_list

    @api.multi
    def check_access_rule(self, operation):
        """ Access rules of mail.message:
            - read: if
                - author_id == pid, uid is the author OR
                - uid is in the recipients (partner_ids) OR
                - uid has been notified (needaction) OR
                - uid is member of a listern channel (channel_ids.partner_ids) OR
                - uid have read access to the related document if model, res_id
                - otherwise: raise
            - create: if
                - no model, no res_id (private message) OR
                - pid in message_follower_ids if model, res_id OR
                - uid can read the parent OR
                - uid have write or create access on the related document if model, res_id, OR
                - otherwise: raise
            - write: if
                - author_id == pid, uid is the author, OR
                - uid is in the recipients (partner_ids) OR
                - uid has write or create access on the related document if model, res_id
                - otherwise: raise
            - unlink: if
                - uid has write or create access on the related document if model, res_id
                - otherwise: raise

        Specific case: non employee users see only messages with subtype (aka do
        not see internal logs).
        """
        def _generate_model_record_ids(msg_val, msg_ids):
            """ :param model_record_ids: {'model': {'res_id': (msg_id, msg_id)}, ... }
                :param message_values: {'msg_id': {'model': .., 'res_id': .., 'author_id': ..}}
            """
            model_record_ids = {}
            for id in msg_ids:
                vals = msg_val.get(id, {})
                if vals.get('model') and vals.get('res_id'):
                    model_record_ids.setdefault(vals['model'], set()).add(vals['res_id'])
            return model_record_ids

        if self._uid == SUPERUSER_ID:
            return
        # Non employees see only messages with a subtype (aka, not internal logs)
        if not self.env['res.users'].has_group('base.group_user'):
            self._cr.execute('''SELECT DISTINCT message.id, message.subtype_id, subtype.internal
                                FROM "%s" AS message
                                LEFT JOIN "mail_message_subtype" as subtype
                                ON message.subtype_id = subtype.id
                                WHERE message.message_type = %%s AND (message.subtype_id IS NULL OR subtype.internal IS TRUE) AND message.id = ANY (%%s)''' % (self._table), ('comment', self.ids,))
            if self._cr.fetchall():
                raise AccessError(
                    _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') %
                    (self._description, operation))

        # Read mail_message.ids to have their values
        message_values = dict((res_id, {}) for res_id in self.ids)

        if operation in ['read', 'write']:
            self._cr.execute("""
                SELECT DISTINCT m.id, m.model, m.res_id, m.author_id, m.parent_id,
                                COALESCE(partner_rel.res_partner_id, needaction_rel.res_partner_id),
                                channel_partner.channel_id as channel_id
                FROM "%s" m
                LEFT JOIN "mail_message_res_partner_rel" partner_rel
                ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = %%(pid)s
                LEFT JOIN "mail_message_res_partner_needaction_rel" needaction_rel
                ON needaction_rel.mail_message_id = m.id AND needaction_rel.res_partner_id = %%(pid)s
                LEFT JOIN "mail_message_mail_channel_rel" channel_rel
                ON channel_rel.mail_message_id = m.id
                LEFT JOIN "mail_channel" channel
                ON channel.id = channel_rel.mail_channel_id
                LEFT JOIN "mail_channel_partner" channel_partner
                ON channel_partner.channel_id = channel.id AND channel_partner.partner_id = %%(pid)s
                WHERE m.id = ANY (%%(ids)s)""" % self._table, dict(pid=self.env.user.partner_id.id, ids=self.ids))
            for mid, rmod, rid, author_id, parent_id, partner_id, channel_id in self._cr.fetchall():
                message_values[mid] = {
                    'model': rmod,
                    'res_id': rid,
                    'author_id': author_id,
                    'parent_id': parent_id,
                    'notified': any((message_values[mid].get('notified'), partner_id, channel_id))
                }
        else:
            self._cr.execute("""SELECT DISTINCT id, model, res_id, author_id, parent_id FROM "%s" WHERE id = ANY (%%s)""" % self._table, (self.ids,))
            for mid, rmod, rid, author_id, parent_id in self._cr.fetchall():
                message_values[mid] = {'model': rmod, 'res_id': rid, 'author_id': author_id, 'parent_id': parent_id}

        # Author condition (READ, WRITE, CREATE (private))
        author_ids = []
        if operation == 'read' or operation == 'write':
            author_ids = [mid for mid, message in message_values.items()
                          if message.get('author_id') and message.get('author_id') == self.env.user.partner_id.id]
        elif operation == 'create':
            author_ids = [mid for mid, message in message_values.items()
                          if not message.get('model') and not message.get('res_id')]

        # Parent condition, for create (check for received notifications for the created message parent)
        notified_ids = []
        if operation == 'create':
            # TDE: probably clean me
            parent_ids = [message.get('parent_id') for message in message_values.values()
                          if message.get('parent_id')]
            self._cr.execute("""SELECT DISTINCT m.id, partner_rel.res_partner_id, channel_partner.partner_id FROM "%s" m
                LEFT JOIN "mail_message_res_partner_rel" partner_rel
                ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = (%%s)
                LEFT JOIN "mail_message_mail_channel_rel" channel_rel
                ON channel_rel.mail_message_id = m.id
                LEFT JOIN "mail_channel" channel
                ON channel.id = channel_rel.mail_channel_id
                LEFT JOIN "mail_channel_partner" channel_partner
                ON channel_partner.channel_id = channel.id AND channel_partner.partner_id = (%%s)
                WHERE m.id = ANY (%%s)""" % self._table, (self.env.user.partner_id.id, self.env.user.partner_id.id, parent_ids,))
            not_parent_ids = [mid[0] for mid in self._cr.fetchall() if any([mid[1], mid[2]])]
            notified_ids += [mid for mid, message in message_values.items()
                             if message.get('parent_id') in not_parent_ids]

        # Recipients condition, for read and write (partner_ids) and create (message_follower_ids)
        other_ids = set(self.ids).difference(set(author_ids), set(notified_ids))
        model_record_ids = _generate_model_record_ids(message_values, other_ids)
        if operation in ['read', 'write']:
            notified_ids = [mid for mid, message in message_values.items() if message.get('notified')]
        elif operation == 'create':
            for doc_model, doc_ids in model_record_ids.items():
                followers = self.env['mail.followers'].sudo().search([
                    ('res_model', '=', doc_model),
                    ('res_id', 'in', list(doc_ids)),
                    ('partner_id', '=', self.env.user.partner_id.id),
                    ])
                fol_mids = [follower.res_id for follower in followers]
                notified_ids += [mid for mid, message in message_values.items()
                                 if message.get('model') == doc_model and message.get('res_id') in fol_mids]

        # CRUD: Access rights related to the document
        other_ids = other_ids.difference(set(notified_ids))
        model_record_ids = _generate_model_record_ids(message_values, other_ids)
        document_related_ids = []
        for model, doc_ids in model_record_ids.items():
            DocumentModel = self.env[model]
            mids = DocumentModel.browse(doc_ids).exists()
            if hasattr(DocumentModel, 'check_mail_message_access'):
                DocumentModel.check_mail_message_access(mids.ids, operation)  # ?? mids ?
            else:
                self.env['mail.thread'].check_mail_message_access(mids.ids, operation, model_name=model)
            document_related_ids += [mid for mid, message in message_values.items()
                                     if message.get('model') == model and message.get('res_id') in mids.ids]

        # Calculate remaining ids: if not void, raise an error
        other_ids = other_ids.difference(set(document_related_ids))
        if not (other_ids and self.browse(other_ids).exists()):
            return
        raise AccessError(
            _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') %
            (self._description, operation))

    @api.model
    def _get_record_name(self, values):
        """ Return the related document name, using name_get. It is done using
            SUPERUSER_ID, to be sure to have the record name correctly stored. """
        model = values.get('model', self.env.context.get('default_model'))
        res_id = values.get('res_id', self.env.context.get('default_res_id'))
        if not model or not res_id or model not in self.env:
            return False
        return self.env[model].sudo().browse(res_id).name_get()[0][1]

    @api.model
    def _get_reply_to(self, values):
        """ Return a specific reply_to: alias of the document through
        message_get_reply_to or take the email_from """
        model, res_id, email_from = values.get('model', self._context.get('default_model')), values.get('res_id', self._context.get('default_res_id')), values.get('email_from')  # ctx values / defualt_get res ?
        if model and hasattr(self.env[model], 'message_get_reply_to'):
            # return self.env[model].browse(res_id).message_get_reply_to([res_id], default=email_from)[res_id]
            return self.env[model].message_get_reply_to([res_id], default=email_from)[res_id]
        else:
            # return self.env['mail.thread'].message_get_reply_to(default=email_from)[None]
            return self.env['mail.thread'].message_get_reply_to([None], default=email_from)[None]

    @api.model
    def _get_message_id(self, values):
        if values.get('no_auto_thread', False) is True:
            message_id = tools.generate_tracking_message_id('reply_to')
        elif values.get('res_id') and values.get('model'):
            message_id = tools.generate_tracking_message_id('%(res_id)s-%(model)s' % values)
        else:
            message_id = tools.generate_tracking_message_id('private')
        return message_id

    @api.multi
    def _invalidate_documents(self):
        """ Invalidate the cache of the documents followed by ``self``. """
        for record in self:
            if record.model and record.res_id:
                self.env[record.model].invalidate_cache(ids=[record.res_id])

    @api.model
    def create(self, values):
        # coming from mail.js that does not have pid in its values
        if self.env.context.get('default_starred'):
            self = self.with_context({'default_starred_partner_ids': [(4, self.env.user.partner_id.id)]})

        if 'email_from' not in values:  # needed to compute reply_to
            values['email_from'] = self._get_default_from()
        if not values.get('message_id'):
            values['message_id'] = self._get_message_id(values)
        if 'reply_to' not in values:
            values['reply_to'] = self._get_reply_to(values)
        if 'record_name' not in values and 'default_record_name' not in self.env.context:
            values['record_name'] = self._get_record_name(values)

        if 'attachment_ids' not in values:
            values.setdefault('attachment_ids', [])

        # extract base64 images
        if 'body' in values:
            Attachments = self.env['ir.attachment']
            data_to_url = {}
            def base64_to_boundary(match):
                key = match.group(2)
                if not data_to_url.get(key):
                    name = 'image%s' % len(data_to_url)
                    attachment = Attachments.create({
                        'name': name,
                        'datas': match.group(2),
                        'datas_fname': name,
                        'res_model': 'mail.message',
                    })
                    attachment.generate_access_token()
                    values['attachment_ids'].append((4, attachment.id))
                    data_to_url[key] = ['/web/image/%s?access_token=%s' % (attachment.id, attachment.access_token), name]
                return '%s%s alt="%s"' % (data_to_url[key][0], match.group(3), data_to_url[key][1])
            values['body'] = _image_dataurl.sub(base64_to_boundary, tools.ustr(values['body']))

        # delegate creation of tracking after the create as sudo to avoid access rights issues
        tracking_values_cmd = values.pop('tracking_value_ids', False)
        message = super(Message, self).create(values)
        if tracking_values_cmd:
            message.sudo().write({'tracking_value_ids': tracking_values_cmd})

        message._invalidate_documents()

        if not self.env.context.get('message_create_from_mail_mail'):
            message._notify(force_send=self.env.context.get('mail_notify_force_send', True),
                            user_signature=self.env.context.get('mail_notify_user_signature', True))
        return message

    @api.multi
    def read(self, fields=None, load='_classic_read'):
        """ Override to explicitely call check_access_rule, that is not called
            by the ORM. It instead directly fetches ir.rules and apply them. """
        self.check_access_rule('read')
        return super(Message, self).read(fields=fields, load=load)

    @api.multi
    def write(self, vals):
        if 'model' in vals or 'res_id' in vals:
            self._invalidate_documents()
        res = super(Message, self).write(vals)
        self._invalidate_documents()
        return res

    @api.multi
    def unlink(self):
        # cascade-delete attachments that are directly attached to the message (should only happen
        # for mail.messages that act as parent for a standalone mail.mail record).
        self.check_access_rule('unlink')
        self.mapped('attachment_ids').filtered(
            lambda attach: attach.res_model == self._name and (attach.res_id in self.ids or attach.res_id == 0)
        ).unlink()
        self._invalidate_documents()
        return super(Message, self).unlink()

    #------------------------------------------------------
    # Messaging API
    #------------------------------------------------------

    @api.multi
    def _notify(self, force_send=False, send_after_commit=True, user_signature=True):
        """ Compute recipients to notify based on specified recipients and document
        followers. Delegate notification to partners to send emails and bus notifications
        and to channels to broadcast messages on channels """
        group_user = self.env.ref('base.group_user')
        # have a sudoed copy to manipulate partners (public can go here with website modules like forum / blog / ... )
        self_sudo = self.sudo()

        self.ensure_one()
        partners_sudo = self_sudo.partner_ids
        channels_sudo = self_sudo.channel_ids

        # all followers of the mail.message document have to be added as partners and notified
        # and filter to employees only if the subtype is internal
        if self_sudo.subtype_id and self.model and self.res_id:
            followers = self_sudo.env['mail.followers'].search([
                ('res_model', '=', self.model),
                ('res_id', '=', self.res_id),
                ('subtype_ids', 'in', self_sudo.subtype_id.id),
            ])
            if self_sudo.subtype_id.internal:
                followers = followers.filtered(lambda fol: fol.channel_id or (fol.partner_id.user_ids and group_user in fol.partner_id.user_ids[0].mapped('groups_id')))
            channels_sudo |= followers.mapped('channel_id')
            partners_sudo |= followers.mapped('partner_id')

        # remove author from notified partners
        if not self._context.get('mail_notify_author', False) and self_sudo.author_id:
            partners_sudo = partners_sudo - self_sudo.author_id

        # update message, with maybe custom values
        message_values = {}
        if channels_sudo:
            message_values['channel_ids'] = [(6, 0, channels_sudo.ids)]
        if partners_sudo:
            message_values['needaction_partner_ids'] = [(6, 0, partners_sudo.ids)]
        if self.model and self.res_id and hasattr(self.env[self.model], 'message_get_message_notify_values'):
            message_values.update(self.env[self.model].browse(self.res_id).message_get_message_notify_values(self, message_values))
        if message_values:
            self.write(message_values)

        # notify partners and channels
        # those methods are called as SUPERUSER because portal users posting messages
        # have no access to partner model. Maybe propagating a real uid could be necessary.
        email_channels = channels_sudo.filtered(lambda channel: channel.email_send)
        notif_partners = partners_sudo.filtered(lambda partner: 'inbox' in partner.mapped('user_ids.notification_type'))
        if email_channels or partners_sudo - notif_partners:
            partners_sudo.search([
                '|',
                ('id', 'in', (partners_sudo - notif_partners).ids),
                ('channel_ids', 'in', email_channels.ids),
                ('email', '!=', self_sudo.author_id.email or self_sudo.email_from),
            ])._notify(self, force_send=force_send, send_after_commit=send_after_commit, user_signature=user_signature)

        notif_partners._notify_by_chat(self)

        channels_sudo._notify(self)

        # Discard cache, because child / parent allow reading and therefore
        # change access rights.
        if self.parent_id:
            self.parent_id.invalidate_cache()

        return True
예제 #16
0
class AccountBankStatementImport(models.TransientModel):
    _name = 'account.bank.statement.import'
    _description = 'Import Bank Statement'

    data_file = fields.Binary(
        string='Bank Statement File',
        required=True,
        help=
        'Get you bank statements in electronic format from your bank and select them here.'
    )
    filename = fields.Char()

    @api.multi
    def import_file(self):
        """ Process the file chosen in the wizard, create bank statement(s) and go to reconciliation. """
        self.ensure_one()
        # Let the appropriate implementation module parse the file and return the required data
        # The active_id is passed in context in case an implementation module requires information about the wizard state (see QIF)
        currency_code, account_number, stmts_vals = self.with_context(
            active_id=self.ids[0])._parse_file(base64.b64decode(
                self.data_file))
        # Check raw data
        self._check_parsed_data(stmts_vals)
        # Try to find the currency and journal in izi
        currency, journal = self._find_additional_data(currency_code,
                                                       account_number)
        # If no journal found, ask the user about creating one
        if not journal:
            # The active_id is passed in context so the wizard can call import_file again once the journal is created
            return self.with_context(
                active_id=self.ids[0])._journal_creation_wizard(
                    currency, account_number)
        if not journal.default_debit_account_id or not journal.default_credit_account_id:
            raise UserError(
                _('You have to set a Default Debit Account and a Default Credit Account for the journal: %s'
                  ) % (journal.name, ))
        # Prepare statement data to be used for bank statements creation
        stmts_vals = self._complete_stmts_vals(stmts_vals, journal,
                                               account_number)
        # Create the bank statements
        statement_ids, notifications = self._create_bank_statements(stmts_vals)
        # Now that the import worked out, set it as the bank_statements_source of the journal
        journal.bank_statements_source = 'file_import'
        # Finally dispatch to reconciliation interface
        action = self.env.ref('account.action_bank_reconcile_bank_statements')
        return {
            'name': action.name,
            'tag': action.tag,
            'context': {
                'statement_ids': statement_ids,
                'notifications': notifications
            },
            'type': 'ir.actions.client',
        }

    def _journal_creation_wizard(self, currency, account_number):
        """ Calls a wizard that allows the user to carry on with journal creation """
        return {
            'name': _('Journal Creation'),
            'type': 'ir.actions.act_window',
            'res_model': 'account.bank.statement.import.journal.creation',
            'view_type': 'form',
            'view_mode': 'form',
            'target': 'new',
            'context': {
                'statement_import_transient_id': self.env.context['active_id'],
                'default_bank_acc_number': account_number,
                'default_name': _('Bank') + ' ' + account_number,
                'default_currency_id': currency and currency.id or False,
                'default_type': 'bank',
            }
        }

    def _parse_file(self, data_file):
        """ Each module adding a file support must extends this method. It processes the file if it can, returns super otherwise, resulting in a chain of responsability.
            This method parses the given file and returns the data required by the bank statement import process, as specified below.
            rtype: triplet (if a value can't be retrieved, use None)
                - currency code: string (e.g: 'EUR')
                    The ISO 4217 currency code, case insensitive
                - account number: string (e.g: 'BE1234567890')
                    The number of the bank account which the statement belongs to
                - bank statements data: list of dict containing (optional items marked by o) :
                    - 'name': string (e.g: '000000123')
                    - 'date': date (e.g: 2013-06-26)
                    -o 'balance_start': float (e.g: 8368.56)
                    -o 'balance_end_real': float (e.g: 8888.88)
                    - 'transactions': list of dict containing :
                        - 'name': string (e.g: 'KBC-INVESTERINGSKREDIET 787-5562831-01')
                        - 'date': date
                        - 'amount': float
                        - 'unique_import_id': string
                        -o 'account_number': string
                            Will be used to find/create the res.partner.bank in izi
                        -o 'note': string
                        -o 'partner_name': string
                        -o 'ref': string
        """
        raise UserError(
            _('Could not make sense of the given file.\nDid you install the module to support this type of file ?'
              ))

    def _check_parsed_data(self, stmts_vals):
        """ Basic and structural verifications """
        if len(stmts_vals) == 0:
            raise UserError(_('This file doesn\'t contain any statement.'))

        no_st_line = True
        for vals in stmts_vals:
            if vals['transactions'] and len(vals['transactions']) > 0:
                no_st_line = False
                break
        if no_st_line:
            raise UserError(_('This file doesn\'t contain any transaction.'))

    def _check_journal_bank_account(self, journal, account_number):
        return journal.bank_account_id.sanitized_acc_number == account_number

    def _find_additional_data(self, currency_code, account_number):
        """ Look for a res.currency and account.journal using values extracted from the
            statement and make sure it's consistent.
        """
        company_currency = self.env.user.company_id.currency_id
        journal_obj = self.env['account.journal']
        currency = None
        sanitized_account_number = sanitize_account_number(account_number)

        if currency_code:
            currency = self.env['res.currency'].search(
                [('name', '=ilike', currency_code)], limit=1)
            if not currency:
                raise UserError(
                    _("No currency found matching '%s'.") % currency_code)
            if currency == company_currency:
                currency = False

        journal = journal_obj.browse(self.env.context.get('journal_id', []))
        if account_number:
            # No bank account on the journal : create one from the account number of the statement
            if journal and not journal.bank_account_id:
                journal.set_bank_account(account_number)
            # No journal passed to the wizard : try to find one using the account number of the statement
            elif not journal:
                journal = journal_obj.search([
                    ('bank_account_id.sanitized_acc_number', '=',
                     sanitized_account_number)
                ])
            # Already a bank account on the journal : check it's the same as on the statement
            else:
                if not self._check_journal_bank_account(
                        journal, sanitized_account_number):
                    raise UserError(
                        _('The account of this statement (%s) is not the same as the journal (%s).'
                          ) %
                        (account_number, journal.bank_account_id.acc_number))

        # If importing into an existing journal, its currency must be the same as the bank statement
        if journal:
            journal_currency = journal.currency_id
            if currency is None:
                currency = journal_currency
            if currency and currency != journal_currency:
                statement_cur_code = not currency and company_currency.name or currency.name
                journal_cur_code = not journal_currency and company_currency.name or journal_currency.name
                raise UserError(
                    _('The currency of the bank statement (%s) is not the same as the currency of the journal (%s) !'
                      ) % (statement_cur_code, journal_cur_code))

        # If we couldn't find / can't create a journal, everything is lost
        if not journal and not account_number:
            raise UserError(
                _('Cannot find in which journal import this statement. Please manually select a journal.'
                  ))

        return currency, journal

    def _complete_stmts_vals(self, stmts_vals, journal, account_number):
        for st_vals in stmts_vals:
            st_vals['journal_id'] = journal.id
            if not st_vals.get('reference'):
                st_vals['reference'] = self.filename
            if st_vals.get('number'):
                #build the full name like BNK/2016/00135 by just giving the number '135'
                st_vals['name'] = journal.sequence_id.with_context(
                    ir_sequence_date=st_vals.get('date')).get_next_char(
                        st_vals['number'])
                del (st_vals['number'])
            for line_vals in st_vals['transactions']:
                unique_import_id = line_vals.get('unique_import_id')
                if unique_import_id:
                    sanitized_account_number = sanitize_account_number(
                        account_number)
                    line_vals['unique_import_id'] = (
                        sanitized_account_number and sanitized_account_number +
                        '-' or '') + str(journal.id) + '-' + unique_import_id

                if not line_vals.get('bank_account_id'):
                    # Find the partner and his bank account or create the bank account. The partner selected during the
                    # reconciliation process will be linked to the bank when the statement is closed.
                    partner_id = False
                    bank_account_id = False
                    identifying_string = line_vals.get('account_number')
                    if identifying_string:
                        partner_bank = self.env['res.partner.bank'].search(
                            [('acc_number', '=', identifying_string)], limit=1)
                        if partner_bank:
                            bank_account_id = partner_bank.id
                            partner_id = partner_bank.partner_id.id
                        else:
                            bank_account_id = self.env[
                                'res.partner.bank'].create({
                                    'acc_number':
                                    line_vals['account_number'],
                                    'partner_id':
                                    False,
                                }).id
                    line_vals['partner_id'] = partner_id
                    line_vals['bank_account_id'] = bank_account_id

        return stmts_vals

    def _create_bank_statements(self, stmts_vals):
        """ Create new bank statements from imported values, filtering out already imported transactions, and returns data used by the reconciliation widget """
        BankStatement = self.env['account.bank.statement']
        BankStatementLine = self.env['account.bank.statement.line']

        # Filter out already imported transactions and create statements
        statement_ids = []
        ignored_statement_lines_import_ids = []
        for st_vals in stmts_vals:
            filtered_st_lines = []
            for line_vals in st_vals['transactions']:
                if 'unique_import_id' not in line_vals \
                   or not line_vals['unique_import_id'] \
                   or not bool(BankStatementLine.sudo().search([('unique_import_id', '=', line_vals['unique_import_id'])], limit=1)):
                    filtered_st_lines.append(line_vals)
                else:
                    ignored_statement_lines_import_ids.append(
                        line_vals['unique_import_id'])
                    if 'balance_start' in st_vals:
                        st_vals['balance_start'] += float(line_vals['amount'])

            if len(filtered_st_lines) > 0:
                # Remove values that won't be used to create records
                st_vals.pop('transactions', None)
                for line_vals in filtered_st_lines:
                    line_vals.pop('account_number', None)
                # Create the satement
                st_vals['line_ids'] = [[0, False, line]
                                       for line in filtered_st_lines]
                statement_ids.append(BankStatement.create(st_vals).id)
        if len(statement_ids) == 0:
            raise UserError(_('You have already imported that file.'))

        # Prepare import feedback
        notifications = []
        num_ignored = len(ignored_statement_lines_import_ids)
        if num_ignored > 0:
            notifications += [{
                'type':
                'warning',
                'message':
                _("%d transactions had already been imported and were ignored."
                  ) % num_ignored if num_ignored > 1 else
                _("1 transaction had already been imported and was ignored."),
                'details': {
                    'name':
                    _('Already imported items'),
                    'model':
                    'account.bank.statement.line',
                    'ids':
                    BankStatementLine.search([
                        ('unique_import_id', 'in',
                         ignored_statement_lines_import_ids)
                    ]).ids
                }
            }]
        return statement_ids, notifications
예제 #17
0
class Channel(models.Model):
    """ A mail.channel is a discussion group that may behave like a listener
    on documents. """
    _description = 'Discussion channel'
    _name = 'mail.channel'
    _mail_flat_thread = False
    _mail_post_access = 'read'
    _inherit = ['mail.thread', 'mail.alias.mixin']

    MAX_BOUNCE_LIMIT = 10

    def _get_default_image(self):
        image_path = modules.get_module_resource('mail', 'static/src/img', 'groupdefault.png')
        return tools.image_resize_image_big(base64.b64encode(open(image_path, 'rb').read()))

    @api.model
    def default_get(self, fields):
        res = super(Channel, self).default_get(fields)
        if not res.get('alias_contact') and (not fields or 'alias_contact' in fields):
            res['alias_contact'] = 'everyone' if res.get('public', 'private') == 'public' else 'followers'
        return res

    name = fields.Char('Name', required=True, translate=True)
    channel_type = fields.Selection([
        ('chat', 'Chat Discussion'),
        ('channel', 'Channel')],
        'Channel Type', default='channel')
    description = fields.Text('Description')
    uuid = fields.Char('UUID', size=50, index=True, default=lambda self: '%s' % uuid.uuid4(), copy=False)
    email_send = fields.Boolean('Send messages by email', default=False)
    # multi users channel
    channel_last_seen_partner_ids = fields.One2many('mail.channel.partner', 'channel_id', string='Last Seen')
    channel_partner_ids = fields.Many2many('res.partner', 'mail_channel_partner', 'channel_id', 'partner_id', string='Listeners')
    channel_message_ids = fields.Many2many('mail.message', 'mail_message_mail_channel_rel')
    is_member = fields.Boolean('Is a member', compute='_compute_is_member')
    # access
    public = fields.Selection([
        ('public', 'Everyone'),
        ('private', 'Invited people only'),
        ('groups', 'Selected group of users')],
        'Privacy', required=True, default='groups',
        help='This group is visible by non members. Invisible groups can add members through the invite button.')
    group_public_id = fields.Many2one('res.groups', string='Authorized Group',
                                      default=lambda self: self.env.ref('base.group_user'))
    group_ids = fields.Many2many(
        'res.groups', string='Auto Subscription',
        help="Members of those groups will automatically added as followers. "
             "Note that they will be able to manage their subscription manually "
             "if necessary.")
    # image: all image fields are base64 encoded and PIL-supported
    image = fields.Binary("Photo", default=_get_default_image, attachment=True,
        help="This field holds the image used as photo for the group, limited to 1024x1024px.")
    image_medium = fields.Binary('Medium-sized photo', attachment=True,
        help="Medium-sized photo of the group. It is automatically "
             "resized as a 128x128px image, with aspect ratio preserved. "
             "Use this field in form views or some kanban views.")
    image_small = fields.Binary('Small-sized photo', attachment=True,
        help="Small-sized photo of the group. It is automatically "
             "resized as a 64x64px image, with aspect ratio preserved. "
             "Use this field anywhere a small image is required.")
    is_subscribed = fields.Boolean(
        'Is Subscribed', compute='_compute_is_subscribed')

    @api.one
    @api.depends('channel_partner_ids')
    def _compute_is_subscribed(self):
        self.is_subscribed = self.env.user.partner_id in self.channel_partner_ids

    @api.multi
    def _compute_is_member(self):
        memberships = self.env['mail.channel.partner'].sudo().search([
            ('channel_id', 'in', self.ids),
            ('partner_id', '=', self.env.user.partner_id.id),
            ])
        membership_ids = memberships.mapped('channel_id')
        for record in self:
            record.is_member = record in membership_ids

    @api.onchange('public')
    def _onchange_public(self):
        if self.public == 'public':
            self.alias_contact = 'everyone'
        else:
            self.alias_contact = 'followers'

    @api.model
    def create(self, vals):
        tools.image_resize_images(vals)
        # Create channel and alias
        channel = super(Channel, self.with_context(
            alias_model_name=self._name, alias_parent_model_name=self._name, mail_create_nolog=True, mail_create_nosubscribe=True)
        ).create(vals)
        channel.alias_id.write({"alias_force_thread_id": channel.id, 'alias_parent_thread_id': channel.id})

        if vals.get('group_ids'):
            channel._subscribe_users()

        # make channel listen itself: posting on a channel notifies the channel
        if not self._context.get('mail_channel_noautofollow'):
            channel.message_subscribe(channel_ids=[channel.id])

        return channel

    @api.multi
    def unlink(self):
        aliases = self.mapped('alias_id')

        # Delete mail.channel
        try:
            all_emp_group = self.env.ref('mail.channel_all_employees')
        except ValueError:
            all_emp_group = None
        if all_emp_group and all_emp_group in self:
            raise UserError(_('You cannot delete those groups, as the Whole Company group is required by other modules.'))
        res = super(Channel, self).unlink()
        # Cascade-delete mail aliases as well, as they should not exist without the mail.channel.
        aliases.sudo().unlink()
        return res

    @api.multi
    def write(self, vals):
        tools.image_resize_images(vals)
        result = super(Channel, self).write(vals)
        if vals.get('group_ids'):
            self._subscribe_users()
        return result

    def get_alias_model_name(self, vals):
        return vals.get('alias_model', 'mail.channel')

    def _subscribe_users(self):
        for mail_channel in self:
            mail_channel.write({'channel_partner_ids': [(4, pid) for pid in mail_channel.mapped('group_ids').mapped('users').mapped('partner_id').ids]})

    @api.multi
    def action_follow(self):
        self.ensure_one()
        channel_partner = self.mapped('channel_last_seen_partner_ids').filtered(lambda cp: cp.partner_id == self.env.user.partner_id)
        if not channel_partner:
            return self.write({'channel_last_seen_partner_ids': [(0, 0, {'partner_id': self.env.user.partner_id.id})]})

    @api.multi
    def action_unfollow(self):
        return self._action_unfollow(self.env.user.partner_id)

    @api.multi
    def _action_unfollow(self, partner):
        channel_info = self.channel_info('unsubscribe')[0]  # must be computed before leaving the channel (access rights)
        result = self.write({'channel_partner_ids': [(3, partner.id)]})
        self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', partner.id), channel_info)
        if not self.email_send:
            notification = _('<div class="o_mail_notification">left <a href="#" class="o_channel_redirect" data-oe-id="%s">#%s</a></div>') % (self.id, self.name,)
            # post 'channel left' message as root since the partner just unsubscribed from the channel
            self.sudo().message_post(body=notification, message_type="notification", subtype="mail.mt_comment", author_id=partner.id)
        return result

    @api.multi
    def _notification_recipients(self, message, groups):
        """ All recipients of a message on a channel are considered as partners.
        This means they will receive a minimal email, without a link to access
        in the backend. Mailing lists should indeed send minimal emails to avoid
        the noise. """
        groups = super(Channel, self)._notification_recipients(message, groups)
        for (index, (group_name, group_func, group_data)) in enumerate(groups):
            if group_name != 'customer':
                groups[index] = (group_name, lambda partner: False, group_data)
        return groups

    @api.multi
    def message_get_email_values(self, notif_mail=None):
        self.ensure_one()
        res = super(Channel, self).message_get_email_values(notif_mail=notif_mail)
        headers = {}
        if res.get('headers'):
            try:
                headers.update(safe_eval(res['headers']))
            except Exception:
                pass
        headers['Precedence'] = 'list'
        # avoid out-of-office replies from MS Exchange
        # http://blogs.technet.com/b/exchange/archive/2006/10/06/3395024.aspx
        headers['X-Auto-Response-Suppress'] = 'OOF'
        if self.alias_domain and self.alias_name:
            headers['List-Id'] = '<%s.%s>' % (self.alias_name, self.alias_domain)
            headers['List-Post'] = '<mailto:%s@%s>' % (self.alias_name, self.alias_domain)
            # Avoid users thinking it was a personal message
            # X-Forge-To: will replace To: after SMTP envelope is determined by ir.mail.server
            list_to = '"%s" <%s@%s>' % (self.name, self.alias_name, self.alias_domain)
            headers['X-Forge-To'] = list_to
        res['headers'] = repr(headers)
        return res

    @api.multi
    def message_receive_bounce(self, email, partner, mail_id=None):
        """ Override bounce management to unsubscribe bouncing addresses """
        for p in partner:
            if p.message_bounce >= self.MAX_BOUNCE_LIMIT:
                self._action_unfollow(p)
        return super(Channel, self).message_receive_bounce(email, partner, mail_id=mail_id)

    @api.multi
    def message_get_recipient_values(self, notif_message=None, recipient_ids=None):
        # real mailing list: multiple recipients (hidden by X-Forge-To)
        if self.alias_domain and self.alias_name:
            return {
                'email_to': ','.join(formataddr((partner.name, partner.email)) for partner in self.env['res.partner'].sudo().browse(recipient_ids)),
                'recipient_ids': [],
            }
        return super(Channel, self).message_get_recipient_values(notif_message=notif_message, recipient_ids=recipient_ids)

    @api.multi
    @api.returns('self', lambda value: value.id)
    def message_post(self, body='', subject=None, message_type='notification', subtype=None, parent_id=False, attachments=None, content_subtype='html', **kwargs):
        # auto pin 'direct_message' channel partner
        self.filtered(lambda channel: channel.channel_type == 'chat').mapped('channel_last_seen_partner_ids').write({'is_pinned': True})
        message = super(Channel, self.with_context(mail_create_nosubscribe=True)).message_post(body=body, subject=subject, message_type=message_type, subtype=subtype, parent_id=parent_id, attachments=attachments, content_subtype=content_subtype, **kwargs)
        return message

    def _alias_check_contact(self, message, message_dict, alias):
        if alias.alias_contact == 'followers' and self.ids:
            author = self.env['res.partner'].browse(message_dict.get('author_id', False))
            if not author or author not in self.channel_partner_ids:
                return {
                    'error_message': _('restricted to channel members'),
                }
            return True
        return super(Channel, self)._alias_check_contact(message, message_dict, alias)

    @api.model_cr
    def init(self):
        self._cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('mail_channel_partner_seen_message_id_idx',))
        if not self._cr.fetchone():
            self._cr.execute('CREATE INDEX mail_channel_partner_seen_message_id_idx ON mail_channel_partner (channel_id,partner_id,seen_message_id)')

    #------------------------------------------------------
    # Instant Messaging API
    #------------------------------------------------------
    # A channel header should be broadcasted:
    #   - when adding user to channel (only to the new added partners)
    #   - when folding/minimizing a channel (only to the user making the action)
    # A message should be broadcasted:
    #   - when a message is posted on a channel (to the channel, using _notify() method)

    # Anonymous method
    @api.multi
    def _broadcast(self, partner_ids):
        """ Broadcast the current channel header to the given partner ids
            :param partner_ids : the partner to notify
        """
        notifications = self._channel_channel_notifications(partner_ids)
        self.env['bus.bus'].sendmany(notifications)

    @api.multi
    def _channel_channel_notifications(self, partner_ids):
        """ Generate the bus notifications of current channel for the given partner ids
            :param partner_ids : the partner to send the current channel header
            :returns list of bus notifications (tuple (bus_channe, message_content))
        """
        notifications = []
        for partner in self.env['res.partner'].browse(partner_ids):
            user_id = partner.user_ids and partner.user_ids[0] or False
            if user_id:
                for channel_info in self.sudo(user_id).channel_info():
                    notifications.append([(self._cr.dbname, 'res.partner', partner.id), channel_info])
        return notifications

    @api.multi
    def _notify(self, message):
        """ Broadcast the given message on the current channels.
            Send the message on the Bus Channel (uuid for public mail.channel, and partner private bus channel (the tuple)).
            A partner will receive only on message on its bus channel, even if this message belongs to multiple mail channel. Then 'channel_ids' field
            of the received message indicates on wich mail channel the message should be displayed.
            :param : mail.message to broadcast
        """
        if not self:
            return
        message.ensure_one()
        notifications = self._channel_message_notifications(message)
        self.env['bus.bus'].sendmany(notifications)

    @api.multi
    def _channel_message_notifications(self, message):
        """ Generate the bus notifications for the given message
            :param message : the mail.message to sent
            :returns list of bus notifications (tuple (bus_channe, message_content))
        """
        message_values = message.message_format()[0]
        notifications = []
        for channel in self:
            notifications.append([(self._cr.dbname, 'mail.channel', channel.id), dict(message_values)])
            # add uuid to allow anonymous to listen
            if channel.public == 'public':
                notifications.append([channel.uuid, dict(message_values)])
        return notifications

    @api.multi
    def channel_info(self, extra_info = False):
        """ Get the informations header for the current channels
            :returns a list of channels values
            :rtype : list(dict)
        """
        channel_infos = []
        partner_channels = self.env['mail.channel.partner']
        # find the channel partner state, if logged user
        if self.env.user and self.env.user.partner_id:
            partner_channels = self.env['mail.channel.partner'].search([('partner_id', '=', self.env.user.partner_id.id), ('channel_id', 'in', self.ids)])
        # for each channel, build the information header and include the logged partner information
        for channel in self:
            info = {
                'id': channel.id,
                'name': channel.name,
                'uuid': channel.uuid,
                'state': 'open',
                'is_minimized': False,
                'channel_type': channel.channel_type,
                'public': channel.public,
                'mass_mailing': channel.email_send,
                'group_based_subscription': bool(channel.group_ids),
            }
            if extra_info:
                info['info'] = extra_info
            # add the partner for 'direct mesage' channel
            if channel.channel_type == 'chat':
                info['direct_partner'] = (channel.sudo()
                                          .with_context(active_test=False)
                                          .channel_partner_ids
                                          .filtered(lambda p: p.id != self.env.user.partner_id.id)
                                          .read(['id', 'name', 'im_status']))

            # add last message preview (only used in mobile)
            if self._context.get('isMobile', False):
                last_message = channel.channel_fetch_preview()
                if last_message:
                    info['last_message'] = last_message[0].get('last_message')

            # add user session state, if available and if user is logged
            if partner_channels.ids:
                partner_channel = partner_channels.filtered(lambda c: channel.id == c.channel_id.id)
                if len(partner_channel) >= 1:
                    partner_channel = partner_channel[0]
                    info['state'] = partner_channel.fold_state or 'open'
                    info['is_minimized'] = partner_channel.is_minimized
                    info['seen_message_id'] = partner_channel.seen_message_id.id
                # add needaction and unread counter, since the user is logged
                info['message_needaction_counter'] = channel.message_needaction_counter
                info['message_unread_counter'] = channel.message_unread_counter
            channel_infos.append(info)
        return channel_infos

    @api.multi
    def channel_fetch_message(self, last_id=False, limit=20):
        """ Return message values of the current channel.
            :param last_id : last message id to start the research
            :param limit : maximum number of messages to fetch
            :returns list of messages values
            :rtype : list(dict)
        """
        self.ensure_one()
        domain = [("channel_ids", "in", self.ids)]
        if last_id:
            domain.append(("id", "<", last_id))
        return self.env['mail.message'].message_fetch(domain=domain, limit=limit)

    # User methods
    @api.model
    def channel_get(self, partners_to, pin=True):
        """ Get the canonical private channel between some partners, create it if needed.
            To reuse an old channel (conversation), this one must be private, and contains
            only the given partners.
            :param partners_to : list of res.partner ids to add to the conversation
            :param pin : True if getting the channel should pin it for the current user
            :returns a channel header, or False if the users_to was False
            :rtype : dict
        """
        if partners_to:
            partners_to.append(self.env.user.partner_id.id)
            # determine type according to the number of partner in the channel
            self.env.cr.execute("""
                SELECT P.channel_id as channel_id
                FROM mail_channel C, mail_channel_partner P
                WHERE P.channel_id = C.id
                    AND C.public LIKE 'private'
                    AND P.partner_id IN %s
                    AND channel_type LIKE 'chat'
                GROUP BY P.channel_id
                HAVING COUNT(P.partner_id) = %s
            """, (tuple(partners_to), len(partners_to),))
            result = self.env.cr.dictfetchall()
            if result:
                # get the existing channel between the given partners
                channel = self.browse(result[0].get('channel_id'))
                # pin up the channel for the current partner
                if pin:
                    self.env['mail.channel.partner'].search([('partner_id', '=', self.env.user.partner_id.id), ('channel_id', '=', channel.id)]).write({'is_pinned': True})
            else:
                # create a new one
                channel = self.create({
                    'channel_partner_ids': [(4, partner_id) for partner_id in partners_to],
                    'public': 'private',
                    'channel_type': 'chat',
                    'email_send': False,
                    'name': ', '.join(self.env['res.partner'].sudo().browse(partners_to).mapped('name')),
                })
                # broadcast the channel header to the other partner (not me)
                channel._broadcast(partners_to)
            return channel.channel_info()[0]
        return False

    @api.model
    def channel_get_and_minimize(self, partners_to):
        channel = self.channel_get(partners_to)
        if channel:
            self.channel_minimize(channel['uuid'])
        return channel

    @api.model
    def channel_fold(self, uuid, state=None):
        """ Update the fold_state of the given session. In order to syncronize web browser
            tabs, the change will be broadcast to himself (the current user channel).
            Note: the user need to be logged
            :param state : the new status of the session for the current user.
        """
        domain = [('partner_id', '=', self.env.user.partner_id.id), ('channel_id.uuid', '=', uuid)]
        for session_state in self.env['mail.channel.partner'].search(domain):
            if not state:
                state = session_state.fold_state
                if session_state.fold_state == 'open':
                    state = 'folded'
                else:
                    state = 'open'
            session_state.write({
                'fold_state': state,
                'is_minimized': bool(state != 'closed'),
            })
            self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), session_state.channel_id.channel_info()[0])

    @api.model
    def channel_minimize(self, uuid, minimized=True):
        values = {
            'fold_state': minimized and 'open' or 'closed',
            'is_minimized': minimized
        }
        domain = [('partner_id', '=', self.env.user.partner_id.id), ('channel_id.uuid', '=', uuid)]
        channel_partners = self.env['mail.channel.partner'].search(domain)
        channel_partners.write(values)
        self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), channel_partners.channel_id.channel_info()[0])

    @api.model
    def channel_pin(self, uuid, pinned=False):
        # add the person in the channel, and pin it (or unpin it)
        channel = self.search([('uuid', '=', uuid)])
        channel_partners = self.env['mail.channel.partner'].search([('partner_id', '=', self.env.user.partner_id.id), ('channel_id', '=', channel.id)])
        if not pinned:
            self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), channel.channel_info('unsubscribe')[0])
        if channel_partners:
            channel_partners.write({'is_pinned': pinned})

    @api.multi
    def channel_seen(self):
        self.ensure_one()
        if self.channel_message_ids.ids:
            last_message_id = self.channel_message_ids.ids[0] # zero is the index of the last message
            self.env['mail.channel.partner'].search([('channel_id', 'in', self.ids), ('partner_id', '=', self.env.user.partner_id.id)]).write({'seen_message_id': last_message_id})
            self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), {'info': 'channel_seen', 'id': self.id, 'last_message_id': last_message_id})
            return last_message_id

    @api.multi
    def channel_invite(self, partner_ids):
        """ Add the given partner_ids to the current channels and broadcast the channel header to them.
            :param partner_ids : list of partner id to add
        """
        partners = self.env['res.partner'].browse(partner_ids)
        # add the partner
        for channel in self:
            partners_to_add = partners - channel.channel_partner_ids
            channel.write({'channel_last_seen_partner_ids': [(0, 0, {'partner_id': partner_id}) for partner_id in partners_to_add.ids]})
            for partner in partners_to_add:
                if partner.id != self.env.user.partner_id.id:
                    notification = _('<div class="o_mail_notification">%(author)s invited %(new_partner)s to <a href="#" class="o_channel_redirect" data-oe-id="%(channel_id)s">#%(channel_name)s</a></div>') % {
                        'author': self.env.user.display_name,
                        'new_partner': partner.display_name,
                        'channel_id': channel.id,
                        'channel_name': channel.name,
                    }
                else:
                    notification = _('<div class="o_mail_notification">joined <a href="#" class="o_channel_redirect" data-oe-id="%s">#%s</a></div>') % (channel.id, channel.name,)
                self.message_post(body=notification, message_type="notification", subtype="mail.mt_comment", author_id=partner.id)

        # broadcast the channel header to the added partner
        self._broadcast(partner_ids)

    #------------------------------------------------------
    # Instant Messaging View Specific (Slack Client Action)
    #------------------------------------------------------
    @api.model
    def channel_fetch_slot(self):
        """ Return the channels of the user grouped by 'slot' (channel, direct_message or private_group), and
            the mapping between partner_id/channel_id for direct_message channels.
            :returns dict : the grouped channels and the mapping
        """
        values = {}
        my_partner_id = self.env.user.partner_id.id
        pinned_channels = self.env['mail.channel.partner'].search([('partner_id', '=', my_partner_id), ('is_pinned', '=', True)]).mapped('channel_id')

        # get the group/public channels
        values['channel_channel'] = self.search([('channel_type', '=', 'channel'), ('public', 'in', ['public', 'groups']), ('channel_partner_ids', 'in', [my_partner_id])]).channel_info()

        # get the pinned 'direct message' channel
        direct_message_channels = self.search([('channel_type', '=', 'chat'), ('id', 'in', pinned_channels.ids)])
        values['channel_direct_message'] = direct_message_channels.channel_info()

        # get the private group
        values['channel_private_group'] = self.search([('channel_type', '=', 'channel'), ('public', '=', 'private'), ('channel_partner_ids', 'in', [my_partner_id])]).channel_info()
        return values

    @api.model
    def channel_search_to_join(self, name=None, domain=None):
        """ Return the channel info of the channel the current partner can join
            :param name : the name of the researched channels
            :param domain : the base domain of the research
            :returns dict : channel dict
        """
        if not domain:
            domain = []
        domain = expression.AND([
            [('channel_type', '=', 'channel')],
            [('channel_partner_ids', 'not in', [self.env.user.partner_id.id])],
            [('public', '!=', 'private')],
            domain
        ])
        if name:
            domain = expression.AND([domain, [('name', 'ilike', '%'+name+'%')]])
        return self.search(domain).read(['name', 'public', 'uuid', 'channel_type'])

    @api.multi
    def channel_join_and_get_info(self):
        self.ensure_one()
        if self.channel_type == 'channel' and not self.email_send:
            notification = _('<div class="o_mail_notification">joined <a href="#" class="o_channel_redirect" data-oe-id="%s">#%s</a></div>') % (self.id, self.name,)
            self.message_post(body=notification, message_type="notification", subtype="mail.mt_comment")
        self.action_follow()

        channel_info = self.channel_info()[0]
        self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), channel_info)
        return channel_info

    @api.model
    def channel_create(self, name, privacy='public'):
        """ Create a channel and add the current partner, broadcast it (to make the user directly
            listen to it when polling)
            :param name : the name of the channel to create
            :param privacy : privacy of the channel. Should be 'public' or 'private'.
            :return dict : channel header
        """
        # create the channel
        new_channel = self.create({
            'name': name,
            'public': privacy,
            'email_send': False,
            'channel_partner_ids': [(4, self.env.user.partner_id.id)]
        })
        notification = _('<div class="o_mail_notification">created <a href="#" class="o_channel_redirect" data-oe-id="%s">#%s</a></div>') % (new_channel.id, new_channel.name,)
        new_channel.message_post(body=notification, message_type="notification", subtype="mail.mt_comment")
        channel_info = new_channel.channel_info('creation')[0]
        self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), channel_info)
        return channel_info

    @api.model
    def get_mention_suggestions(self, search, limit=8):
        """ Return 'limit'-first channels' id, name and public fields such that the name matches a
            'search' string. Exclude channels of type chat (DM), and private channels the current
            user isn't registered to. """
        domain = expression.AND([
                        [('name', 'ilike', search)],
                        [('channel_type', '=', 'channel')],
                        expression.OR([
                            [('public', '!=', 'private')],
                            [('channel_partner_ids', 'in', [self.env.user.partner_id.id])]
                        ])
                    ])
        return self.search_read(domain, ['id', 'name', 'public'], limit=limit)

    @api.model
    def channel_fetch_listeners(self, uuid):
        """ Return the id, name and email of partners listening to the given channel """
        self._cr.execute("""
            SELECT P.id, P.name, P.email
            FROM mail_channel_partner CP
                INNER JOIN res_partner P ON CP.partner_id = P.id
                INNER JOIN mail_channel C ON CP.channel_id = C.id
            WHERE C.uuid = %s""", (uuid,))
        return self._cr.dictfetchall()

    @api.multi
    def channel_fetch_preview(self):
        """ Return the last message of the given channels """
        self._cr.execute("""
            SELECT mail_channel_id AS id, MAX(mail_message_id) AS message_id
            FROM mail_message_mail_channel_rel
            WHERE mail_channel_id IN %s
            GROUP BY mail_channel_id
            """, (tuple(self.ids),))
        channels_preview = dict((r['message_id'], r) for r in self._cr.dictfetchall())
        last_messages = self.env['mail.message'].browse(channels_preview).message_format()
        for message in last_messages:
            channel = channels_preview[message['id']]
            del(channel['message_id'])
            channel['last_message'] = message
        return list(channels_preview.values())

    #------------------------------------------------------
    # Commands
    #------------------------------------------------------
    @api.model
    @ormcache()
    def get_mention_commands(self):
        """ Returns the allowed commands in channels """
        commands = []
        for n in dir(self):
            match = re.search('^_define_command_(.+?)$', n)
            if match:
                command = getattr(self, n)()
                command['name'] = match.group(1)
                commands.append(command)
        return commands

    @api.multi
    def execute_command(self, command='', **kwargs):
        """ Executes a given command """
        self.ensure_one()
        command_callback = getattr(self, '_execute_command_' + command, False)
        if command_callback:
            command_callback(**kwargs)

    def _send_transient_message(self, partner_to, content):
        """ Notifies partner_to that a message (not stored in DB) has been
            written in this channel """
        self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', partner_to.id), {
            'body': "<span class='o_mail_notification'>" + content + "</span>",
            'channel_ids': [self.id],
            'info': 'transient_message',
        })

    def _define_command_help(self):
        return {'help': _("Show an helper message")}

    def _execute_command_help(self, **kwargs):
        partner = self.env.user.partner_id
        if self.channel_type == 'channel':
            msg = _("You are in channel <b>#%s</b>.") % self.name
            if self.public == 'private':
                msg += _(" This channel is private. People must be invited to join it.")
        else:
            channel_partners = self.env['mail.channel.partner'].search([('partner_id', '!=', partner.id), ('channel_id', '=', self.id)])
            msg = _("You are in a private conversation with <b>@%s</b>.") % (channel_partners[0].partner_id.name if channel_partners else _('Anonymous'))
        msg += _("""<br><br>
            You can mention someone by typing <b>@username</b>, this will grab its attention.<br>
            You can mention a channel by typing <b>#channel</b>.<br>
            You can execute a command by typing <b>/command</b>.<br>
            You can insert canned responses in your message by typing <b>:shortcut</b>.<br>""")

        self._send_transient_message(partner, msg)

    def _define_command_leave(self):
        return {'help': _("Leave this channel")}

    def _execute_command_leave(self, **kwargs):
        if self.channel_type == 'channel':
            self.action_unfollow()
        else:
            self.channel_pin(self.uuid, False)

    def _define_command_who(self):
        return {
            'channel_types': ['channel', 'chat'],
            'help': _("List users in the current channel")
        }

    def _execute_command_who(self, **kwargs):
        partner = self.env.user.partner_id
        members = [
            '<a href="#" data-oe-id='+str(p.id)+' data-oe-model="res.partner">@'+p.name+'</a>'
            for p in self.channel_partner_ids[:30] if p != partner
        ]
        if len(members) == 0:
            msg = _("You are alone in this channel.")
        else:
            dots = "..." if len(members) != len(self.channel_partner_ids) - 1 else ""
            msg = _("Users in this channel: %s %s and you.") % (", ".join(members), dots)

        self._send_transient_message(partner, msg)
예제 #18
0
파일: xml_decl.py 프로젝트: vituocgia/izi
class XmlDeclaration(models.TransientModel):
    """
    Intrastat XML Declaration
    """
    _name = "l10n_be_intrastat_xml.xml_decl"
    _description = 'Intrastat XML Declaration'

    def _default_get_month(self):
        return fields.Date.from_string(
            fields.Date.context_today(self)).strftime('%m')

    def _default_get_year(self):
        return fields.Date.from_string(
            fields.Date.context_today(self)).strftime('%Y')

    name = fields.Char(string='File Name', default='intrastat.xml')
    month = fields.Selection([('01', 'January'), ('02', 'February'),
                              ('03', 'March'), ('04', 'April'), ('05', 'May'),
                              ('06', 'June'), ('07', 'July'), ('08', 'August'),
                              ('09', 'September'), ('10', 'October'),
                              ('11', 'November'), ('12', 'December')],
                             string='Month',
                             required=True,
                             default=_default_get_month)
    year = fields.Char(size=4, required=True, default=_default_get_year)
    company_id = fields.Many2one('res.company',
                                 string='Company',
                                 required=True,
                                 default=lambda self: self.env.user.company_id)
    arrivals = fields.Selection([('be-exempt', 'Exempt'),
                                 ('be-standard', 'Standard'),
                                 ('be-extended', 'Extended')],
                                required=True,
                                default='be-standard')
    dispatches = fields.Selection([('be-exempt', 'Exempt'),
                                   ('be-standard', 'Standard'),
                                   ('be-extended', 'Extended')],
                                  required=True,
                                  default='be-standard')
    file_save = fields.Binary(string='Intrastat Report File', readonly=True)
    state = fields.Selection([('draft', 'Draft'), ('download', 'Download')],
                             default='draft')

    @api.model
    def _company_warning(self, translated_msg):
        """ Raise a error with custom message, asking user to configure company settings """
        raise exceptions.RedirectWarning(
            translated_msg,
            self.env.ref('base.action_res_company_form').id,
            _('Go to company configuration screen'))

    @api.multi
    def create_xml(self):
        """Creates xml that is to be exported and sent to estate for partner vat intra.
        :return: Value for next action.
        :rtype: dict
        """
        self.ensure_one()
        company = self.company_id
        if not (company.partner_id and company.partner_id.country_id
                and company.partner_id.country_id.id):
            self._company_warning(
                _('The country of your company is not set, '
                  'please make sure to configure it first.'))
        if not company.company_registry:
            self._company_warning(
                _('The registry number of your company is not set, '
                  'please make sure to configure it first.'))
        if len(self.year) != 4:
            raise exceptions.Warning(_('Year must be 4 digits number (YYYY)'))

        #Create root declaration
        decl = ET.Element('DeclarationReport')
        decl.set('xmlns', INTRASTAT_XMLNS)

        #Add Administration elements
        admin = ET.SubElement(decl, 'Administration')
        fromtag = ET.SubElement(admin, 'From')
        fromtag.text = company.company_registry
        fromtag.set('declarerType', 'KBO')
        ET.SubElement(admin, 'To').text = "NBB"
        ET.SubElement(admin, 'Domain').text = "SXX"
        if self.arrivals == 'be-standard':
            decl.append(self.sudo()._get_lines(dispatchmode=False,
                                               extendedmode=False))
        elif self.arrivals == 'be-extended':
            decl.append(self.sudo()._get_lines(dispatchmode=False,
                                               extendedmode=True))
        if self.dispatches == 'be-standard':
            decl.append(self.sudo()._get_lines(dispatchmode=True,
                                               extendedmode=False))
        elif self.dispatches == 'be-extended':
            decl.append(self.sudo()._get_lines(dispatchmode=True,
                                               extendedmode=True))

        #Get xml string with declaration
        data_file = ET.tostring(decl, encoding='UTF-8', method='xml')

        #change state of the wizard
        self.write({
            'name': 'intrastat_%s%s.xml' % (self.year, self.month),
            'file_save': base64.encodestring(data_file),
            'state': 'download'
        })
        return {
            'name': _('Save'),
            'view_type': 'form',
            'view_mode': 'form',
            'res_model': 'l10n_be_intrastat_xml.xml_decl',
            'type': 'ir.actions.act_window',
            'target': 'new',
            'res_id': self.id,
        }

    @api.multi
    def _get_lines(self, dispatchmode=False, extendedmode=False):
        company = self.company_id
        IntrastatRegion = self.env['l10n_be_intrastat.region']

        if dispatchmode:
            mode1 = 'out_invoice'
            mode2 = 'in_refund'
            declcode = "29"
        else:
            mode1 = 'in_invoice'
            mode2 = 'out_refund'
            declcode = "19"

        decl = ET.Element('Report')
        if not extendedmode:
            decl.set('code', 'EX%sS' % declcode)
        else:
            decl.set('code', 'EX%sE' % declcode)
        decl.set('date', '%s-%s' % (self.year, self.month))
        datas = ET.SubElement(decl, 'Data')
        if not extendedmode:
            datas.set('form', 'EXF%sS' % declcode)
        else:
            datas.set('form', 'EXF%sE' % declcode)
        datas.set('close', 'true')
        intrastatkey = namedtuple(
            "intrastatkey",
            ['EXTRF', 'EXCNT', 'EXTTA', 'EXREG', 'EXGO', 'EXTPC', 'EXDELTRM'])
        entries = {}

        query = """
            SELECT
                inv_line.id
            FROM
                account_invoice_line inv_line
                JOIN account_invoice inv ON inv_line.invoice_id=inv.id
                LEFT JOIN res_country ON res_country.id = inv.intrastat_country_id
                LEFT JOIN res_partner ON res_partner.id = inv.partner_id
                LEFT JOIN res_country countrypartner ON countrypartner.id = res_partner.country_id
                JOIN product_product ON inv_line.product_id=product_product.id
                JOIN product_template ON product_product.product_tmpl_id=product_template.id
            WHERE
                inv.state IN ('open','paid')
                AND inv.company_id=%s
                AND not product_template.type='service'
                AND (res_country.intrastat=true OR (inv.intrastat_country_id is NULL
                                                    AND countrypartner.intrastat=true))
                AND ((res_country.code IS NOT NULL AND not res_country.code=%s)
                     OR (res_country.code is NULL AND countrypartner.code IS NOT NULL
                     AND not countrypartner.code=%s))
                AND inv.type IN (%s, %s)
                AND to_char(inv.date_invoice, 'YYYY')=%s
                AND to_char(inv.date_invoice, 'MM')=%s
            """

        self.env.cr.execute(query,
                            (company.id, company.partner_id.country_id.code,
                             company.partner_id.country_id.code, mode1, mode2,
                             self.year, self.month))
        lines = self.env.cr.fetchall()
        invoicelines_ids = [rec[0] for rec in lines]
        invoicelines = self.env['account.invoice.line'].browse(
            invoicelines_ids)

        for inv_line in invoicelines:

            #Check type of transaction
            if inv_line.intrastat_transaction_id:
                extta = inv_line.intrastat_transaction_id.code
            else:
                extta = "1"
            #Check country
            if inv_line.invoice_id.intrastat_country_id:
                excnt = inv_line.invoice_id.intrastat_country_id.code
            else:
                excnt = inv_line.invoice_id.partner_id.country_id.code

            #Check region
            #If purchase, comes from purchase order, linked to a location,
            #which is linked to the warehouse
            #if sales, the sales order is linked to the warehouse
            #if sales, from a delivery order, linked to a location,
            #which is linked to the warehouse
            #If none found, get the company one.
            exreg = None
            if inv_line.invoice_id.type in ('in_invoice', 'in_refund'):
                #comes from purchase
                po_lines = self.env['purchase.order.line'].search(
                    [('invoice_lines', 'in', inv_line.id)], limit=1)
                if po_lines:
                    if self._is_situation_triangular(company,
                                                     po_line=po_lines):
                        continue
                    location = self.env['stock.location'].browse(
                        po_lines.order_id._get_destination_location())
                    region_id = self.env[
                        'stock.warehouse'].get_regionid_from_locationid(
                            location)
                    if region_id:
                        exreg = IntrastatRegion.browse(region_id).code
            elif inv_line.invoice_id.type in ('out_invoice', 'out_refund'):
                #comes from sales
                so_lines = self.env['sale.order.line'].search(
                    [('invoice_lines', 'in', inv_line.id)], limit=1)
                if so_lines:
                    if self._is_situation_triangular(company,
                                                     so_line=so_lines):
                        continue
                    saleorder = so_lines.order_id
                    if saleorder and saleorder.warehouse_id and saleorder.warehouse_id.region_id:
                        exreg = IntrastatRegion.browse(
                            saleorder.warehouse_id.region_id.id).code

            if not exreg:
                if company.region_id:
                    exreg = company.region_id.code
                else:
                    self._company_warning(
                        _('The Intrastat Region of the selected company is not set, '
                          'please make sure to configure it first.'))

            #Check commodity codes
            intrastat_id = inv_line.product_id.get_intrastat_recursively()
            if intrastat_id:
                exgo = self.env['report.intrastat.code'].browse(
                    intrastat_id).name
            else:
                raise exceptions.Warning(
                    _('Product "%s" has no intrastat code, please configure it'
                      ) % inv_line.product_id.display_name)

            #In extended mode, 2 more fields required
            if extendedmode:
                #Check means of transport
                if inv_line.invoice_id.transport_mode_id:
                    extpc = inv_line.invoice_id.transport_mode_id.code
                elif company.transport_mode_id:
                    extpc = company.transport_mode_id.code
                else:
                    self._company_warning(
                        _('The default Intrastat transport mode of your company '
                          'is not set, please make sure to configure it first.'
                          ))

                #Check incoterm
                if inv_line.invoice_id.incoterm_id:
                    exdeltrm = inv_line.invoice_id.incoterm_id.code
                elif company.incoterm_id:
                    exdeltrm = company.incoterm_id.code
                else:
                    self._company_warning(
                        _('The default Incoterm of your company is not set, '
                          'please make sure to configure it first.'))
            else:
                extpc = ""
                exdeltrm = ""
            linekey = intrastatkey(EXTRF=declcode,
                                   EXCNT=excnt,
                                   EXTTA=extta,
                                   EXREG=exreg,
                                   EXGO=exgo,
                                   EXTPC=extpc,
                                   EXDELTRM=exdeltrm)
            #We have the key
            #calculate amounts
            if inv_line.price_unit and inv_line.quantity:
                amount = inv_line.price_unit * inv_line.quantity
            else:
                amount = 0
            weight = (inv_line.product_id.weight or 0.0) * \
                inv_line.uom_id._compute_quantity(inv_line.quantity, inv_line.product_id.uom_id)
            if not inv_line.product_id.uom_id.category_id:
                supply_units = inv_line.quantity
            else:
                supply_units = inv_line.quantity * inv_line.uom_id.factor
            amounts = entries.setdefault(linekey, (0, 0, 0))
            amounts = (amounts[0] + amount, amounts[1] + weight,
                       amounts[2] + supply_units)
            entries[linekey] = amounts

        numlgn = 0
        for linekey in entries:
            amounts = entries[linekey]
            if round(amounts[0], 0) == 0:
                continue
            numlgn += 1
            item = ET.SubElement(datas, 'Item')
            self._set_Dim(item, 'EXSEQCODE', text_type(numlgn))
            self._set_Dim(item, 'EXTRF', text_type(linekey.EXTRF))
            self._set_Dim(item, 'EXCNT', text_type(linekey.EXCNT))
            self._set_Dim(item, 'EXTTA', text_type(linekey.EXTTA))
            self._set_Dim(item, 'EXREG', text_type(linekey.EXREG))
            self._set_Dim(item, 'EXTGO', text_type(linekey.EXGO))
            if extendedmode:
                self._set_Dim(item, 'EXTPC', text_type(linekey.EXTPC))
                self._set_Dim(item, 'EXDELTRM', text_type(linekey.EXDELTRM))
            self._set_Dim(item, 'EXTXVAL',
                          text_type(round(amounts[0], 0)).replace(".", ","))
            self._set_Dim(item, 'EXWEIGHT',
                          text_type(round(amounts[1], 0)).replace(".", ","))
            self._set_Dim(item, 'EXUNITS',
                          text_type(round(amounts[2], 0)).replace(".", ","))

        if numlgn == 0:
            #no datas
            datas.set('action', 'nihil')
        return decl

    def _set_Dim(self, item, prop, value):
        dim = ET.SubElement(item, 'Dim')
        dim.set('prop', prop)
        dim.text = value

    def _is_situation_triangular(self, company, po_line=False, so_line=False):
        # Ignoring what is purchased and sold by us with a dropshipping route
        # outside of our country, or completely within it
        # https://www.nbb.be/doc/dq/f_pdf_ex/intra2017fr.pdf (§ 4.x)
        dropship_pick_type = self.env.ref(
            'stock_dropshipping.picking_type_dropship',
            raise_if_not_found=False)
        if not dropship_pick_type:
            return False
        stock_move_domain = [('picking_type_id', '=', dropship_pick_type.id)]

        if po_line:
            stock_move_domain.append(('purchase_line_id', '=', po_line.id))
        if so_line:
            stock_move_domain.append(
                ('procurement_id.sale_line_id', '=', so_line.id))

        stock_move = self.env['stock.move'].search(stock_move_domain, limit=1)
        return stock_move and (
            (stock_move.partner_id.country_id.code != company.country_id.code
             and stock_move.picking_partner_id.country_id.code !=
             company.country_id.code) or
            (stock_move.partner_id.country_id.code == company.country_id.code
             and stock_move.picking_partner_id.country_id.code
             == company.country_id.code))
예제 #19
0
파일: rating.py 프로젝트: vituocgia/izi
class RatingMixin(models.AbstractModel):
    _name = 'rating.mixin'
    _description = "Rating Mixin"

    rating_ids = fields.One2many(
        'rating.rating',
        'res_id',
        string='Rating',
        domain=lambda self: [('res_model', '=', self._name)],
        auto_join=True)
    rating_last_value = fields.Float('Rating Last Value',
                                     compute='_compute_rating_last_value',
                                     compute_sudo=True,
                                     store=True)
    rating_last_feedback = fields.Text('Rating Last Feedback',
                                       related='rating_ids.feedback')
    rating_last_image = fields.Binary('Rating Last Image',
                                      related='rating_ids.rating_image')
    rating_count = fields.Integer('Rating count',
                                  compute="_compute_rating_count")

    @api.multi
    @api.depends('rating_ids.rating')
    def _compute_rating_last_value(self):
        for record in self:
            ratings = self.env['rating.rating'].search(
                [('res_model', '=', self._name), ('res_id', '=', record.id)],
                limit=1)
            if ratings:
                record.rating_last_value = ratings.rating

    @api.multi
    def _compute_rating_count(self):
        read_group_res = self.env['rating.rating'].read_group(
            [('res_model', '=', self._name), ('res_id', 'in', self.ids),
             ('consumed', '=', True)], ['res_id'],
            groupby=['res_id'])
        result = dict.fromkeys(self.ids, 0)
        for data in read_group_res:
            result[data['res_id']] += data['res_id_count']
        for record in self:
            record.rating_count = result.get(record.id)

    def write(self, values):
        """ If the rated ressource name is modified, we should update the rating res_name too. """
        result = super(RatingMixin, self).write(values)
        if self._rec_name in values:
            self.rating_ids._compute_res_name()
        return result

    def unlink(self):
        """ When removing a record, its rating should be deleted too. """
        record_ids = self.ids
        result = super(RatingMixin, self).unlink()
        self.env['rating.rating'].sudo().search([
            ('res_model', '=', self._name), ('res_id', 'in', record_ids)
        ]).unlink()
        return result

    def rating_get_parent_model_name(self, vals):
        """ Return the parent model name """
        return None

    def rating_get_parent_id(self):
        """ Return the parent record id """
        return None

    def rating_get_partner_id(self):
        if hasattr(self, 'partner_id') and self.partner_id:
            return self.partner_id
        return self.env['res.partner']

    def rating_get_rated_partner_id(self):
        if hasattr(self, 'user_id') and self.user_id.partner_id:
            return self.user_id.partner_id
        return self.env['res.partner']

    def rating_get_access_token(self, partner=None):
        if not partner:
            partner = self.rating_get_partner_id()
        rated_partner = self.rating_get_rated_partner_id()
        ratings = self.rating_ids.filtered(
            lambda x: x.partner_id.id == partner.id and not x.consumed)
        if not ratings:
            record_model_id = self.env['ir.model'].sudo().search(
                [('model', '=', self._name)], limit=1).id
            rating = self.env['rating.rating'].create({
                'partner_id':
                partner.id,
                'rated_partner_id':
                rated_partner.id,
                'res_model_id':
                record_model_id,
                'res_id':
                self.id
            })
        else:
            rating = ratings[0]
        return rating.access_token

    @api.multi
    def rating_send_request(self, template, lang=False, force_send=True):
        """ This method send rating request by email, using a template given
        in parameter. """
        lang = lang or 'en_US'
        for record in self:
            template.with_context(lang=lang).send_mail(record.id,
                                                       force_send=force_send)

    @api.multi
    def rating_apply(self, rate, token=None, feedback=None, subtype=None):
        """ Apply a rating given a token. If the current model inherits from
        mail.thread mixing, a message is posted on its chatter.
        :param rate : the rating value to apply
        :type rate : float
        :param token : access token
        :param feedback : additional feedback
        :type feedback : string
        :param subtype : subtype for mail
        :type subtype : string
        :returns rating.rating record
        """
        Rating, rating = self.env['rating.rating'], None
        if token:
            rating = self.env['rating.rating'].search(
                [('access_token', '=', token)], limit=1)
        else:
            rating = Rating.search([('res_model', '=', self._name),
                                    ('res_id', '=', self.ids[0])],
                                   limit=1)
        if rating:
            rating.write({
                'rating': rate,
                'feedback': feedback,
                'consumed': True
            })
            if hasattr(self, 'message_post'):
                feedback = tools.plaintext2html(feedback or '')
                self.message_post(
                    body=
                    "<img src='/rating/static/src/img/rating_%s.png' alt=':rating_%s' style='width:20px;height:20px;float:left;margin-right: 5px;'/>%s"
                    % (rate, rate, feedback),
                    subtype=subtype or "mail.mt_comment",
                    author_id=rating.partner_id and rating.partner_id.id or
                    None  # None will set the default author in mail_thread.py
                )
            if hasattr(self, 'stage_id') and self.stage_id and hasattr(
                    self.stage_id, 'auto_validation_kanban_state'
            ) and self.stage_id.auto_validation_kanban_state:
                if rating.rating > 5:
                    self.write({'kanban_state': 'done'})
                if rating.rating < 5:
                    self.write({'kanban_state': 'blocked'})
        return rating

    @api.multi
    def rating_get_repartition(self, add_stats=False, domain=None):
        """ get the repatition of rating grade for the given res_ids.
            :param add_stats : flag to add stat to the result
            :type add_stats : boolean
            :param domain : optional extra domain of the rating to include/exclude in repartition
            :return dictionnary
                if not add_stats, the dict is like
                    - key is the rating value (integer)
                    - value is the number of object (res_model, res_id) having the value
                otherwise, key is the value of the information (string) : either stat name (avg, total, ...) or 'repartition'
                containing the same dict if add_stats was False.
        """
        base_domain = [('res_model', '=', self._name),
                       ('res_id', 'in', self.ids), ('rating', '>=', 1),
                       ('consumed', '=', True)]
        if domain:
            base_domain += domain
        data = self.env['rating.rating'].read_group(base_domain, ['rating'],
                                                    ['rating', 'res_id'])
        # init dict with all posible rate value, except 0 (no value for the rating)
        values = dict.fromkeys(range(1, 11), 0)
        values.update((d['rating'], d['rating_count']) for d in data)
        # add other stats
        if add_stats:
            rating_number = sum(values.values())
            result = {
                'repartition':
                values,
                'avg':
                sum(float(key * values[key]) for key in values) /
                rating_number if rating_number > 0 else 0,
                'total':
                sum(it['rating_count'] for it in data),
            }
            return result
        return values

    @api.multi
    def rating_get_grades(self, domain=None):
        """ get the repatition of rating grade for the given res_ids.
            :param domain : optional domain of the rating to include/exclude in grades computation
            :return dictionnary where the key is the grade (great, okay, bad), and the value, the number of object (res_model, res_id) having the grade
                    the grade are compute as    0-30% : Bad
                                                31-69%: Okay
                                                70-100%: Great
        """
        data = self.rating_get_repartition(domain=domain)
        res = dict.fromkeys(['great', 'okay', 'bad'], 0)
        for key in data:
            if key >= RATING_LIMIT_SATISFIED:
                res['great'] += data[key]
            elif key > RATING_LIMIT_OK:
                res['okay'] += data[key]
            else:
                res['bad'] += data[key]
        return res

    @api.multi
    def rating_get_stats(self, domain=None):
        """ get the statistics of the rating repatition
            :param domain : optional domain of the rating to include/exclude in statistic computation
            :return dictionnary where
                - key is the the name of the information (stat name)
                - value is statistic value : 'percent' contains the repartition in percentage, 'avg' is the average rate
                  and 'total' is the number of rating
        """
        data = self.rating_get_repartition(domain=domain, add_stats=True)
        result = {
            'avg': data['avg'],
            'total': data['total'],
            'percent': dict.fromkeys(range(1, 11), 0),
        }
        for rate in data['repartition']:
            result['percent'][rate] = (
                data['repartition'][rate] *
                100) / data['total'] if data['total'] > 0 else 0
        return result
예제 #20
0
파일: rating.py 프로젝트: vituocgia/izi
class Rating(models.Model):

    _name = "rating.rating"
    _description = "Rating"
    _order = 'write_date desc'
    _rec_name = 'res_name'
    _sql_constraints = [
        ('rating_range', 'check(rating >= 0 and rating <= 10)',
         'Rating should be between 0 to 10'),
    ]

    @api.one
    @api.depends('res_model', 'res_id')
    def _compute_res_name(self):
        name = self.env[self.res_model].sudo().browse(self.res_id).name_get()
        self.res_name = name and name[0][1] or ('%s/%s') % (self.res_model,
                                                            self.res_id)

    @api.model
    def new_access_token(self):
        return uuid.uuid4().hex

    res_name = fields.Char(string='Resource name',
                           compute='_compute_res_name',
                           store=True,
                           help="The name of the rated resource.")
    res_model_id = fields.Many2one('ir.model',
                                   'Related Document Model',
                                   index=True,
                                   ondelete='cascade',
                                   help='Model of the followed resource')
    res_model = fields.Char(string='Document Model',
                            related='res_model_id.model',
                            store=True,
                            index=True,
                            readonly=True)
    res_id = fields.Integer(string='Document',
                            required=True,
                            help="Identifier of the rated object",
                            index=True)
    parent_res_name = fields.Char('Parent Document Name',
                                  compute='_compute_parent_res_name',
                                  store=True)
    parent_res_model_id = fields.Many2one('ir.model',
                                          'Parent Related Document Model',
                                          index=True)
    parent_res_model = fields.Char('Parent Document Model',
                                   store=True,
                                   related='parent_res_model_id.model',
                                   index=True)
    parent_res_id = fields.Integer('Parent Document', index=True)
    rated_partner_id = fields.Many2one('res.partner',
                                       string="Rated person",
                                       help="Owner of the rated resource")
    partner_id = fields.Many2one('res.partner',
                                 string='Customer',
                                 help="Author of the rating")
    rating = fields.Float(string="Rating",
                          group_operator="avg",
                          default=0,
                          help="Rating value: 0=Unhappy, 10=Happy")
    rating_image = fields.Binary('Image', compute='_compute_rating_image')
    rating_text = fields.Selection(
        [('satisfied', 'Satisfied'), ('not_satisfied', 'Not satisfied'),
         ('highly_dissatisfied', 'Highly dissatisfied'),
         ('no_rating', 'No Rating yet')],
        string='Rating',
        store=True,
        compute='_compute_rating_text',
        readonly=True)
    feedback = fields.Text('Comment', help="Reason of the rating")
    message_id = fields.Many2one(
        'mail.message',
        string="Linked message",
        help=
        "Associated message when posting a review. Mainly used in website addons.",
        index=True)
    access_token = fields.Char(
        'Security Token',
        default=new_access_token,
        help="Access token to set the rating of the value")
    consumed = fields.Boolean(string="Filled Rating",
                              help="Enabled if the rating has been filled.")

    @api.depends('parent_res_model', 'parent_res_id')
    def _compute_parent_res_name(self):
        for rating in self:
            name = False
            if rating.parent_res_model and rating.parent_res_id:
                name = self.env[rating.parent_res_model].sudo().browse(
                    rating.parent_res_id).name_get()
                name = name and name[0][1] or ('%s/%s') % (
                    rating.parent_res_model, rating.parent_res_id)
            rating.parent_res_name = name

    @api.multi
    @api.depends('rating')
    def _compute_rating_image(self):
        for rating in self:
            try:
                image_path = get_resource_path(
                    'rating', 'static/src/img',
                    'rating_%s.png' % (int(rating.rating), ))
                rating.rating_image = base64.b64encode(
                    open(image_path, 'rb').read())
            except (IOError, OSError):
                rating.rating_image = False

    @api.depends('rating')
    def _compute_rating_text(self):
        for rating in self:
            if rating.rating >= RATING_LIMIT_SATISFIED:
                rating.rating_text = 'satisfied'
            elif rating.rating > RATING_LIMIT_OK:
                rating.rating_text = 'not_satisfied'
            elif rating.rating >= RATING_LIMIT_MIN:
                rating.rating_text = 'highly_dissatisfied'
            else:
                rating.rating_text = 'no_rating'

    @api.model
    def create(self, values):
        if values.get('res_model_id') and values.get('res_id'):
            values.update(self._find_parent_data(values))
        return super(Rating, self).create(values)

    @api.multi
    def write(self, values):
        if values.get('res_model_id') and values.get('res_id'):
            values.update(self._find_parent_data(values))
        return super(Rating, self).write(values)

    def _find_parent_data(self, values):
        """ Determine the parent res_model/res_id, based on the values to create or write """
        current_model_name = self.env['ir.model'].sudo().browse(
            values['res_model_id']).model
        current_record = self.env[current_model_name].browse(values['res_id'])
        data = {
            'parent_res_model_id': False,
            'parent_res_id': False,
        }
        if hasattr(current_record, 'rating_get_parent_model_name'):
            parent_res_model = current_record.rating_get_parent_model_name(
                values)
            data['parent_res_model_id'] = self.env['ir.model']._get(
                parent_res_model).id
            data['parent_res_id'] = current_record.rating_get_parent_id()
        return data

    @api.multi
    def reset(self):
        for record in self:
            record.write({
                'rating': 0,
                'access_token': record.new_access_token(),
                'feedback': False,
                'consumed': False,
            })

    def action_open_rated_object(self):
        self.ensure_one()
        return {
            'type': 'ir.actions.act_window',
            'res_model': self.res_model,
            'res_id': self.res_id,
            'views': [[False, 'form']]
        }
예제 #21
0
class ProductProduct(models.Model):
    _name = "product.product"
    _description = "Product"
    _inherits = {'product.template': 'product_tmpl_id'}
    _inherit = ['mail.thread', 'mail.activity.mixin']
    _order = 'default_code, name, id'

    price = fields.Float('Price',
                         compute='_compute_product_price',
                         digits=dp.get_precision('Product Price'),
                         inverse='_set_product_price')
    price_extra = fields.Float(
        'Variant Price Extra',
        compute='_compute_product_price_extra',
        digits=dp.get_precision('Product Price'),
        help="This is the sum of the extra price of all attributes")
    lst_price = fields.Float(
        'Sale Price',
        compute='_compute_product_lst_price',
        digits=dp.get_precision('Product Price'),
        inverse='_set_product_lst_price',
        help=
        "The sale price is managed from the product template. Click on the 'Variant Prices' button to set the extra attribute prices."
    )

    default_code = fields.Char('Internal Reference', index=True)
    code = fields.Char('Reference', compute='_compute_product_code')
    partner_ref = fields.Char('Customer Ref', compute='_compute_partner_ref')

    active = fields.Boolean(
        'Active',
        default=True,
        help=
        "If unchecked, it will allow you to hide the product without removing it."
    )
    product_tmpl_id = fields.Many2one('product.template',
                                      'Product Template',
                                      auto_join=True,
                                      index=True,
                                      ondelete="cascade",
                                      required=True)
    barcode = fields.Char(
        'Barcode',
        copy=False,
        oldname='ean13',
        help="International Article Number used for product identification.")
    attribute_value_ids = fields.Many2many('product.attribute.value',
                                           string='Attributes',
                                           ondelete='restrict')
    # image: all image fields are base64 encoded and PIL-supported
    image_variant = fields.Binary(
        "Variant Image",
        attachment=True,
        help=
        "This field holds the image used as image for the product variant, limited to 1024x1024px."
    )
    image = fields.Binary(
        "Big-sized image",
        compute='_compute_images',
        inverse='_set_image',
        help=
        "Image of the product variant (Big-sized image of product template if false). It is automatically "
        "resized as a 1024x1024px image, with aspect ratio preserved.")
    image_small = fields.Binary(
        "Small-sized image",
        compute='_compute_images',
        inverse='_set_image_small',
        help=
        "Image of the product variant (Small-sized image of product template if false)."
    )
    image_medium = fields.Binary(
        "Medium-sized image",
        compute='_compute_images',
        inverse='_set_image_medium',
        help=
        "Image of the product variant (Medium-sized image of product template if false)."
    )

    standard_price = fields.Float(
        'Cost',
        company_dependent=True,
        digits=dp.get_precision('Product Price'),
        groups="base.group_user",
        help=
        "Cost used for stock valuation in standard price and as a first price to set in average/fifo. "
        "Also used as a base price for pricelists. "
        "Expressed in the default unit of measure of the product.")
    volume = fields.Float('Volume', help="The volume in m3.")
    weight = fields.Float(
        'Weight',
        digits=dp.get_precision('Stock Weight'),
        help=
        "The weight of the contents in Kg, not including any packaging, etc.")

    pricelist_item_ids = fields.Many2many('product.pricelist.item',
                                          'Pricelist Items',
                                          compute='_get_pricelist_items')

    packaging_ids = fields.One2many(
        'product.packaging',
        'product_id',
        'Product Packages',
        help="Gives the different ways to package the same product.")

    _sql_constraints = [
        ('barcode_uniq', 'unique(barcode)',
         "A barcode can only be assigned to one product !"),
    ]

    def _get_invoice_policy(self):
        return False

    def _compute_product_price(self):
        prices = {}
        pricelist_id_or_name = self._context.get('pricelist')
        if pricelist_id_or_name:
            pricelist = None
            partner = self._context.get('partner', False)
            quantity = self._context.get('quantity', 1.0)

            # Support context pricelists specified as display_name or ID for compatibility
            if isinstance(pricelist_id_or_name, pycompat.string_types):
                pricelist_name_search = self.env[
                    'product.pricelist'].name_search(pricelist_id_or_name,
                                                     operator='=',
                                                     limit=1)
                if pricelist_name_search:
                    pricelist = self.env['product.pricelist'].browse(
                        [pricelist_name_search[0][0]])
            elif isinstance(pricelist_id_or_name, pycompat.integer_types):
                pricelist = self.env['product.pricelist'].browse(
                    pricelist_id_or_name)

            if pricelist:
                quantities = [quantity] * len(self)
                partners = [partner] * len(self)
                prices = pricelist.get_products_price(self, quantities,
                                                      partners)

        for product in self:
            product.price = prices.get(product.id, 0.0)

    def _set_product_price(self):
        for product in self:
            if self._context.get('uom'):
                value = self.env['product.uom'].browse(
                    self._context['uom'])._compute_price(
                        product.price, product.uom_id)
            else:
                value = product.price
            value -= product.price_extra
            product.write({'list_price': value})

    def _set_product_lst_price(self):
        for product in self:
            if self._context.get('uom'):
                value = self.env['product.uom'].browse(
                    self._context['uom'])._compute_price(
                        product.lst_price, product.uom_id)
            else:
                value = product.lst_price
            value -= product.price_extra
            product.write({'list_price': value})

    @api.depends('attribute_value_ids.price_ids.price_extra',
                 'attribute_value_ids.price_ids.product_tmpl_id')
    def _compute_product_price_extra(self):
        # TDE FIXME: do a real multi and optimize a bit ?
        for product in self:
            price_extra = 0.0
            for attribute_price in product.mapped(
                    'attribute_value_ids.price_ids'):
                if attribute_price.product_tmpl_id == product.product_tmpl_id:
                    price_extra += attribute_price.price_extra
            product.price_extra = price_extra

    @api.depends('list_price', 'price_extra')
    def _compute_product_lst_price(self):
        to_uom = None
        if 'uom' in self._context:
            to_uom = self.env['product.uom'].browse([self._context['uom']])

        for product in self:
            if to_uom:
                list_price = product.uom_id._compute_price(
                    product.list_price, to_uom)
            else:
                list_price = product.list_price
            product.lst_price = list_price + product.price_extra

    @api.one
    def _compute_product_code(self):
        for supplier_info in self.seller_ids:
            if supplier_info.name.id == self._context.get('partner_id'):
                self.code = supplier_info.product_code or self.default_code
        else:
            self.code = self.default_code

    @api.one
    def _compute_partner_ref(self):
        for supplier_info in self.seller_ids:
            if supplier_info.name.id == self._context.get('partner_id'):
                product_name = supplier_info.product_name or self.default_code
                self.partner_ref = '%s%s' % (self.code and '[%s] ' % self.code
                                             or '', product_name)
        else:
            self.partner_ref = self.name_get()[0][1]

    @api.one
    @api.depends('image_variant', 'product_tmpl_id.image')
    def _compute_images(self):
        if self._context.get('bin_size'):
            self.image_medium = self.image_variant
            self.image_small = self.image_variant
            self.image = self.image_variant
        else:
            resized_images = tools.image_get_resized_images(
                self.image_variant, return_big=True, avoid_resize_medium=True)
            self.image_medium = resized_images['image_medium']
            self.image_small = resized_images['image_small']
            self.image = resized_images['image']
        if not self.image_medium:
            self.image_medium = self.product_tmpl_id.image_medium
        if not self.image_small:
            self.image_small = self.product_tmpl_id.image_small
        if not self.image:
            self.image = self.product_tmpl_id.image

    @api.one
    def _set_image(self):
        self._set_image_value(self.image)

    @api.one
    def _set_image_medium(self):
        self._set_image_value(self.image_medium)

    @api.one
    def _set_image_small(self):
        self._set_image_value(self.image_small)

    @api.one
    def _set_image_value(self, value):
        if isinstance(value, pycompat.text_type):
            value = value.encode('ascii')
        image = tools.image_resize_image_big(value)
        if self.product_tmpl_id.image:
            self.image_variant = image
        else:
            self.product_tmpl_id.image = image

    @api.one
    def _get_pricelist_items(self):
        self.pricelist_item_ids = self.env['product.pricelist.item'].search([
            '|', ('product_id', '=', self.id),
            ('product_tmpl_id', '=', self.product_tmpl_id.id)
        ]).ids

    @api.constrains('attribute_value_ids')
    def _check_attribute_value_ids(self):
        for product in self:
            attributes = self.env['product.attribute']
            for value in product.attribute_value_ids:
                if value.attribute_id in attributes:
                    raise ValidationError(
                        _('Error! It is not allowed to choose more than one value for a given attribute.'
                          ))
                if value.attribute_id.create_variant:
                    attributes |= value.attribute_id
        return True

    @api.onchange('uom_id', 'uom_po_id')
    def _onchange_uom(self):
        if self.uom_id and self.uom_po_id and self.uom_id.category_id != self.uom_po_id.category_id:
            self.uom_po_id = self.uom_id

    @api.model
    def create(self, vals):
        product = super(
            ProductProduct,
            self.with_context(create_product_product=True)).create(vals)
        # When a unique variant is created from tmpl then the standard price is set by _set_standard_price
        if not (self.env.context.get('create_from_tmpl')
                and len(product.product_tmpl_id.product_variant_ids) == 1):
            product._set_standard_price(vals.get('standard_price') or 0.0)
        return product

    @api.multi
    def write(self, values):
        ''' Store the standard price change in order to be able to retrieve the cost of a product for a given date'''
        res = super(ProductProduct, self).write(values)
        if 'standard_price' in values:
            self._set_standard_price(values['standard_price'])
        return res

    @api.multi
    def unlink(self):
        unlink_products = self.env['product.product']
        unlink_templates = self.env['product.template']
        for product in self:
            # Check if product still exists, in case it has been unlinked by unlinking its template
            if not product.exists():
                continue
            # Check if the product is last product of this template
            other_products = self.search([('product_tmpl_id', '=',
                                           product.product_tmpl_id.id),
                                          ('id', '!=', product.id)])
            if not other_products:
                unlink_templates |= product.product_tmpl_id
            unlink_products |= product
        res = super(ProductProduct, unlink_products).unlink()
        # delete templates after calling super, as deleting template could lead to deleting
        # products due to ondelete='cascade'
        unlink_templates.unlink()
        return res

    @api.multi
    def copy(self, default=None):
        # TDE FIXME: clean context / variant brol
        if default is None:
            default = {}
        if self._context.get('variant'):
            # if we copy a variant or create one, we keep the same template
            default['product_tmpl_id'] = self.product_tmpl_id.id
        elif 'name' not in default:
            default['name'] = self.name

        return super(ProductProduct, self).copy(default=default)

    @api.model
    def search(self, args, offset=0, limit=None, order=None, count=False):
        # TDE FIXME: strange
        if self._context.get('search_default_categ_id'):
            args.append((('categ_id', 'child_of',
                          self._context['search_default_categ_id'])))
        return super(ProductProduct, self).search(args,
                                                  offset=offset,
                                                  limit=limit,
                                                  order=order,
                                                  count=count)

    @api.multi
    def name_get(self):
        # TDE: this could be cleaned a bit I think

        def _name_get(d):
            name = d.get('name', '')
            code = self._context.get('display_default_code', True) and d.get(
                'default_code', False) or False
            if code:
                name = '[%s] %s' % (code, name)
            return (d['id'], name)

        partner_id = self._context.get('partner_id')
        if partner_id:
            partner_ids = [
                partner_id, self.env['res.partner'].browse(
                    partner_id).commercial_partner_id.id
            ]
        else:
            partner_ids = []

        # all user don't have access to seller and partner
        # check access and use superuser
        self.check_access_rights("read")
        self.check_access_rule("read")

        result = []
        for product in self.sudo():
            # display only the attributes with multiple possible values on the template
            variable_attributes = product.attribute_line_ids.filtered(
                lambda l: len(l.value_ids) > 1).mapped('attribute_id')
            variant = product.attribute_value_ids._variant_name(
                variable_attributes)

            name = variant and "%s (%s)" % (product.name,
                                            variant) or product.name
            sellers = []
            if partner_ids:
                sellers = [
                    x for x in product.seller_ids
                    if (x.name.id in partner_ids) and (x.product_id == product)
                ]
                if not sellers:
                    sellers = [
                        x for x in product.seller_ids
                        if (x.name.id in partner_ids) and not x.product_id
                    ]
            if sellers:
                for s in sellers:
                    seller_variant = s.product_name and (
                        variant and "%s (%s)" %
                        (s.product_name, variant) or s.product_name) or False
                    mydict = {
                        'id': product.id,
                        'name': seller_variant or name,
                        'default_code': s.product_code or product.default_code,
                    }
                    temp = _name_get(mydict)
                    if temp not in result:
                        result.append(temp)
            else:
                mydict = {
                    'id': product.id,
                    'name': name,
                    'default_code': product.default_code,
                }
                result.append(_name_get(mydict))
        return result

    @api.model
    def name_search(self, name='', args=None, operator='ilike', limit=100):
        if not args:
            args = []
        if name:
            positive_operators = ['=', 'ilike', '=ilike', 'like', '=like']
            products = self.env['product.product']
            if operator in positive_operators:
                products = self.search([('default_code', '=', name)] + args,
                                       limit=limit)
                if not products:
                    products = self.search([('barcode', '=', name)] + args,
                                           limit=limit)
            if not products and operator not in expression.NEGATIVE_TERM_OPERATORS:
                # Do not merge the 2 next lines into one single search, SQL search performance would be abysmal
                # on a database with thousands of matching products, due to the huge merge+unique needed for the
                # OR operator (and given the fact that the 'name' lookup results come from the ir.translation table
                # Performing a quick memory merge of ids in Python will give much better performance
                products = self.search(args +
                                       [('default_code', operator, name)],
                                       limit=limit)
                if not limit or len(products) < limit:
                    # we may underrun the limit because of dupes in the results, that's fine
                    limit2 = (limit - len(products)) if limit else False
                    products += self.search(args +
                                            [('name', operator, name),
                                             ('id', 'not in', products.ids)],
                                            limit=limit2)
            elif not products and operator in expression.NEGATIVE_TERM_OPERATORS:
                domain = expression.OR([
                    [
                        '&', ('default_code', operator, name),
                        ('name', operator, name)
                    ],
                    [
                        '&', ('default_code', '=', False),
                        ('name', operator, name)
                    ],
                ])
                domain = expression.AND([args, domain])
                products = self.search(domain, limit=limit)
            if not products and operator in positive_operators:
                ptrn = re.compile('(\[(.*?)\])')
                res = ptrn.search(name)
                if res:
                    products = self.search(
                        [('default_code', '=', res.group(2))] + args,
                        limit=limit)
            # still no results, partner in context: search on supplier info as last hope to find something
            if not products and self._context.get('partner_id'):
                suppliers = self.env['product.supplierinfo'].search([
                    ('name', '=', self._context.get('partner_id')), '|',
                    ('product_code', operator, name),
                    ('product_name', operator, name)
                ])
                if suppliers:
                    products = self.search(
                        [('product_tmpl_id.seller_ids', 'in', suppliers.ids)],
                        limit=limit)
        else:
            products = self.search(args, limit=limit)
        return products.name_get()

    @api.model
    def view_header_get(self, view_id, view_type):
        res = super(ProductProduct, self).view_header_get(view_id, view_type)
        if self._context.get('categ_id'):
            return _('Products: ') + self.env['product.category'].browse(
                self._context['categ_id']).name
        return res

    @api.multi
    def open_product_template(self):
        """ Utility method used to add an "Open Template" button in product views """
        self.ensure_one()
        return {
            'type': 'ir.actions.act_window',
            'res_model': 'product.template',
            'view_mode': 'form',
            'res_id': self.product_tmpl_id.id,
            'target': 'new'
        }

    @api.multi
    def _select_seller(self,
                       partner_id=False,
                       quantity=0.0,
                       date=None,
                       uom_id=False):
        self.ensure_one()
        if date is None:
            date = fields.Date.context_today(self)
        precision = self.env['decimal.precision'].precision_get(
            'Product Unit of Measure')

        res = self.env['product.supplierinfo']
        for seller in self.seller_ids:
            # Set quantity in UoM of seller
            quantity_uom_seller = quantity
            if quantity_uom_seller and uom_id and uom_id != seller.product_uom:
                quantity_uom_seller = uom_id._compute_quantity(
                    quantity_uom_seller, seller.product_uom)

            if seller.date_start and seller.date_start > date:
                continue
            if seller.date_end and seller.date_end < date:
                continue
            if partner_id and seller.name not in [
                    partner_id, partner_id.parent_id
            ]:
                continue
            if float_compare(quantity_uom_seller,
                             seller.min_qty,
                             precision_digits=precision) == -1:
                continue
            if seller.product_id and seller.product_id != self:
                continue

            res |= seller
            break
        return res

    @api.multi
    def price_compute(self,
                      price_type,
                      uom=False,
                      currency=False,
                      company=False):
        # TDE FIXME: delegate to template or not ? fields are reencoded here ...
        # compatibility about context keys used a bit everywhere in the code
        if not uom and self._context.get('uom'):
            uom = self.env['product.uom'].browse(self._context['uom'])
        if not currency and self._context.get('currency'):
            currency = self.env['res.currency'].browse(
                self._context['currency'])

        products = self
        if price_type == 'standard_price':
            # standard_price field can only be seen by users in base.group_user
            # Thus, in order to compute the sale price from the cost for users not in this group
            # We fetch the standard price as the superuser
            products = self.with_context(
                force_company=company and company.id or self._context.get(
                    'force_company', self.env.user.company_id.id)).sudo()

        prices = dict.fromkeys(self.ids, 0.0)
        for product in products:
            prices[product.id] = product[price_type] or 0.0
            if price_type == 'list_price':
                prices[product.id] += product.price_extra

            if uom:
                prices[product.id] = product.uom_id._compute_price(
                    prices[product.id], uom)

            # Convert from current user company currency to asked one
            # This is right cause a field cannot be in more than one currency
            if currency:
                prices[product.id] = product.currency_id.compute(
                    prices[product.id], currency)

        return prices

    # compatibility to remove after v10 - DEPRECATED
    @api.multi
    def price_get(self, ptype='list_price'):
        return self.price_compute(ptype)

    @api.multi
    def _set_standard_price(self, value):
        ''' Store the standard price change in order to be able to retrieve the cost of a product for a given date'''
        PriceHistory = self.env['product.price.history']
        for product in self:
            PriceHistory.create({
                'product_id':
                product.id,
                'cost':
                value,
                'company_id':
                self._context.get('force_company',
                                  self.env.user.company_id.id),
            })

    @api.multi
    def get_history_price(self, company_id, date=None):
        history = self.env['product.price.history'].search(
            [('company_id', '=', company_id), ('product_id', 'in', self.ids),
             ('datetime', '<=', date or fields.Datetime.now())],
            order='datetime desc,id desc',
            limit=1)
        return history.cost or 0.0
예제 #22
0
class Property(models.Model):
    _name = 'ir.property'

    name = fields.Char(index=True)
    res_id = fields.Char(
        string='Resource',
        index=True,
        help="If not set, acts as a default value for new resources",
    )
    company_id = fields.Many2one('res.company', string='Company', index=True)
    fields_id = fields.Many2one('ir.model.fields',
                                string='Field',
                                ondelete='cascade',
                                required=True,
                                index=True)
    value_float = fields.Float()
    value_integer = fields.Integer()
    value_text = fields.Text()  # will contain (char, text)
    value_binary = fields.Binary()
    value_reference = fields.Char()
    value_datetime = fields.Datetime()
    type = fields.Selection([
        ('char', 'Char'),
        ('float', 'Float'),
        ('boolean', 'Boolean'),
        ('integer', 'Integer'),
        ('text', 'Text'),
        ('binary', 'Binary'),
        ('many2one', 'Many2One'),
        ('date', 'Date'),
        ('datetime', 'DateTime'),
        ('selection', 'Selection'),
    ],
                            required=True,
                            default='many2one',
                            index=True)

    @api.multi
    def _update_values(self, values):
        value = values.pop('value', None)
        if not value:
            return values

        prop = None
        type_ = values.get('type')
        if not type_:
            if self:
                prop = self[0]
                type_ = prop.type
            else:
                type_ = self._fields['type'].default(self)

        field = TYPE2FIELD.get(type_)
        if not field:
            raise UserError(_('Invalid type'))

        if field == 'value_reference':
            if isinstance(value, models.BaseModel):
                value = '%s,%d' % (value._name, value.id)
            elif isinstance(value, pycompat.integer_types):
                field_id = values.get('fields_id')
                if not field_id:
                    if not prop:
                        raise ValueError()
                    field_id = prop.fields_id
                else:
                    field_id = self.env['ir.model.fields'].browse(field_id)

                value = '%s,%d' % (field_id.sudo().relation, value)

        values[field] = value
        return values

    @api.multi
    def write(self, values):
        return super(Property, self).write(self._update_values(values))

    @api.model
    def create(self, values):
        return super(Property, self).create(self._update_values(values))

    @api.multi
    def get_by_record(self):
        self.ensure_one()
        if self.type in ('char', 'text', 'selection'):
            return self.value_text
        elif self.type == 'float':
            return self.value_float
        elif self.type == 'boolean':
            return bool(self.value_integer)
        elif self.type == 'integer':
            return self.value_integer
        elif self.type == 'binary':
            return self.value_binary
        elif self.type == 'many2one':
            if not self.value_reference:
                return False
            model, resource_id = self.value_reference.split(',')
            return self.env[model].browse(int(resource_id)).exists()
        elif self.type == 'datetime':
            return self.value_datetime
        elif self.type == 'date':
            if not self.value_datetime:
                return False
            return fields.Date.to_string(
                fields.Datetime.from_string(self.value_datetime))
        return False

    @api.model
    def get(self, name, model, res_id=False):
        domain = self._get_domain(name, model)
        if domain is not None:
            domain = [('res_id', '=', res_id)] + domain
            #make the search with company_id asc to make sure that properties specific to a company are given first
            prop = self.search(domain, limit=1, order='company_id')
            if prop:
                return prop.get_by_record()
        return False

    def _get_domain(self, prop_name, model):
        self._cr.execute(
            "SELECT id FROM ir_model_fields WHERE name=%s AND model=%s",
            (prop_name, model))
        res = self._cr.fetchone()
        if not res:
            return None
        company_id = self._context.get(
            'force_company') or self.env['res.company']._company_default_get(
                model, res[0]).id
        return [('fields_id', '=', res[0]),
                ('company_id', 'in', [company_id, False])]

    @api.model
    def get_multi(self, name, model, ids):
        """ Read the property field `name` for the records of model `model` with
            the given `ids`, and return a dictionary mapping `ids` to their
            corresponding value.
        """
        if not ids:
            return {}

        domain = self._get_domain(name, model)
        if domain is None:
            return dict.fromkeys(ids, False)

        # retrieve the values for the given ids and the default value, too
        refs = {('%s,%s' % (model, id)): id for id in ids}
        refs[False] = False
        domain += [('res_id', 'in', list(refs))]

        # note: order by 'company_id asc' will return non-null values first
        props = self.search(domain, order='company_id asc')
        result = {}
        for prop in props:
            # for a given res_id, take the first property only
            id = refs.pop(prop.res_id, None)
            if id is not None:
                result[id] = prop.get_by_record()

        # set the default value to the ids that are not in result
        default_value = result.pop(False, False)
        for id in ids:
            result.setdefault(id, default_value)

        return result

    @api.model
    def set_multi(self, name, model, values, default_value=None):
        """ Assign the property field `name` for the records of model `model`
            with `values` (dictionary mapping record ids to their value).
            If the value for a given record is the same as the default
            value, the property entry will not be stored, to avoid bloating
            the database.
            If `default_value` is provided, that value will be used instead
            of the computed default value, to determine whether the value
            for a record should be stored or not.
        """
        def clean(value):
            return value.id if isinstance(value, models.BaseModel) else value

        if not values:
            return

        if not default_value:
            domain = self._get_domain(name, model)
            if domain is None:
                raise Exception()
            # retrieve the default value for the field
            default_value = clean(self.get(name, model))

        # retrieve the properties corresponding to the given record ids
        self._cr.execute(
            "SELECT id FROM ir_model_fields WHERE name=%s AND model=%s",
            (name, model))
        field_id = self._cr.fetchone()[0]
        company_id = self.env.context.get(
            'force_company') or self.env['res.company']._company_default_get(
                model, field_id).id
        refs = {('%s,%s' % (model, id)): id for id in values}
        props = self.search([
            ('fields_id', '=', field_id),
            ('company_id', '=', company_id),
            ('res_id', 'in', list(refs)),
        ])

        # modify existing properties
        for prop in props:
            id = refs.pop(prop.res_id)
            value = clean(values[id])
            if value == default_value:
                # avoid prop.unlink(), as it clears the record cache that can
                # contain the value of other properties to set on record!
                prop.check_access_rights('unlink')
                prop.check_access_rule('unlink')
                self._cr.execute("DELETE FROM ir_property WHERE id=%s",
                                 [prop.id])
            elif value != clean(prop.get_by_record()):
                prop.write({'value': value})

        # create new properties for records that do not have one yet
        for ref, id in refs.items():
            value = clean(values[id])
            if value != default_value:
                self.create({
                    'fields_id': field_id,
                    'company_id': company_id,
                    'res_id': ref,
                    'name': name,
                    'value': value,
                    'type': self.env[model]._fields[name].type,
                })

    @api.model
    def search_multi(self, name, model, operator, value):
        """ Return a domain for the records that match the given condition. """
        default_matches = False
        include_zero = False

        field = self.env[model]._fields[name]
        if field.type == 'many2one':
            comodel = field.comodel_name

            def makeref(value):
                return value and '%s,%s' % (comodel, value)

            if operator == "=":
                value = makeref(value)
                # if searching properties not set, search those not in those set
                if value is False:
                    default_matches = True
            elif operator in ('!=', '<=', '<', '>', '>='):
                value = makeref(value)
            elif operator in ('in', 'not in'):
                value = [makeref(v) for v in value]
            elif operator in ('=like', '=ilike', 'like', 'not like', 'ilike',
                              'not ilike'):
                # most probably inefficient... but correct
                target = self.env[comodel]
                target_names = target.name_search(value,
                                                  operator=operator,
                                                  limit=None)
                target_ids = [n[0] for n in target_names]
                operator, value = 'in', [makeref(v) for v in target_ids]
        elif field.type in ('integer', 'float'):
            # No record is created in ir.property if the field's type is float or integer with a value
            # equal to 0. Then to match with the records that are linked to a property field equal to 0,
            # the negation of the operator must be taken  to compute the goods and the domain returned
            # to match the searched records is just the opposite.
            if value == 0 and operator == '=':
                operator = '!='
                include_zero = True
            elif value <= 0 and operator == '>=':
                operator = '<'
                include_zero = True
            elif value < 0 and operator == '>':
                operator = '<='
                include_zero = True
            elif value >= 0 and operator == '<=':
                operator = '>'
                include_zero = True
            elif value > 0 and operator == '<':
                operator = '>='
                include_zero = True

        # retrieve the properties that match the condition
        domain = self._get_domain(name, model)
        if domain is None:
            raise Exception()
        props = self.search(domain +
                            [(TYPE2FIELD[field.type], operator, value)])

        # retrieve the records corresponding to the properties that match
        good_ids = []
        for prop in props:
            if prop.res_id:
                res_model, res_id = prop.res_id.split(',')
                good_ids.append(int(res_id))
            else:
                default_matches = True

        if include_zero:
            return [('id', 'not in', good_ids)]
        elif default_matches:
            # exclude all records with a property that does not match
            all_ids = []
            props = self.search(domain + [('res_id', '!=', False)])
            for prop in props:
                res_model, res_id = prop.res_id.split(',')
                all_ids.append(int(res_id))
            bad_ids = list(set(all_ids) - set(good_ids))
            return [('id', 'not in', bad_ids)]
        else:
            return [('id', 'in', good_ids)]
예제 #23
0
class IrAttachment(models.Model):
    """Attachments are used to link binary files or url to any openerp document.

    External attachment storage
    ---------------------------

    The computed field ``datas`` is implemented using ``_file_read``,
    ``_file_write`` and ``_file_delete``, which can be overridden to implement
    other storage engines. Such methods should check for other location pseudo
    uri (example: hdfs://hadoopserver).

    The default implementation is the file:dirname location that stores files
    on the local filesystem using name based on their sha1 hash
    """
    _name = 'ir.attachment'
    _order = 'id desc'

    @api.depends('res_model', 'res_id')
    def _compute_res_name(self):
        for attachment in self:
            if attachment.res_model and attachment.res_id:
                record = self.env[attachment.res_model].browse(
                    attachment.res_id)
                attachment.res_name = record.display_name

    @api.model
    def _storage(self):
        return self.env['ir.config_parameter'].sudo().get_param(
            'ir_attachment.location', 'file')

    @api.model
    def _filestore(self):
        return config.filestore(self._cr.dbname)

    @api.model
    def force_storage(self):
        """Force all attachments to be stored in the currently configured storage"""
        if not self.env.user._is_admin():
            raise AccessError(
                _('Only administrators can execute this action.'))

        # domain to retrieve the attachments to migrate
        domain = {
            'db': [('store_fname', '!=', False)],
            'file': [('db_datas', '!=', False)],
        }[self._storage()]

        for attach in self.search(domain):
            attach.write({'datas': attach.datas})
        return True

    @api.model
    def _full_path(self, path):
        # sanitize path
        path = re.sub('[.]', '', path)
        path = path.strip('/\\')
        return os.path.join(self._filestore(), path)

    @api.model
    def _get_path(self, bin_data, sha):
        # retro compatibility
        fname = sha[:3] + '/' + sha
        full_path = self._full_path(fname)
        if os.path.isfile(full_path):
            return fname, full_path  # keep existing path

        # scatter files across 256 dirs
        # we use '/' in the db (even on windows)
        fname = sha[:2] + '/' + sha
        full_path = self._full_path(fname)
        dirname = os.path.dirname(full_path)
        if not os.path.isdir(dirname):
            os.makedirs(dirname)
        return fname, full_path

    @api.model
    def _file_read(self, fname, bin_size=False):
        full_path = self._full_path(fname)
        r = ''
        try:
            if bin_size:
                r = human_size(os.path.getsize(full_path))
            else:
                r = base64.b64encode(open(full_path, 'rb').read())
        except (IOError, OSError):
            _logger.info("_read_file reading %s", full_path, exc_info=True)
        return r

    @api.model
    def _file_write(self, value, checksum):
        bin_value = base64.b64decode(value)
        fname, full_path = self._get_path(bin_value, checksum)
        if not os.path.exists(full_path):
            try:
                with open(full_path, 'wb') as fp:
                    fp.write(bin_value)
                # add fname to checklist, in case the transaction aborts
                self._mark_for_gc(fname)
            except IOError:
                _logger.info("_file_write writing %s",
                             full_path,
                             exc_info=True)
        return fname

    @api.model
    def _file_delete(self, fname):
        # simply add fname to checklist, it will be garbage-collected later
        self._mark_for_gc(fname)

    def _mark_for_gc(self, fname):
        """ Add ``fname`` in a checklist for the filestore garbage collection. """
        # we use a spooldir: add an empty file in the subdirectory 'checklist'
        full_path = os.path.join(self._full_path('checklist'), fname)
        if not os.path.exists(full_path):
            dirname = os.path.dirname(full_path)
            if not os.path.isdir(dirname):
                with tools.ignore(OSError):
                    os.makedirs(dirname)
            open(full_path, 'ab').close()

    @api.model
    def _file_gc(self):
        """ Perform the garbage collection of the filestore. """
        if self._storage() != 'file':
            return

        # Continue in a new transaction. The LOCK statement below must be the
        # first one in the current transaction, otherwise the database snapshot
        # used by it may not contain the most recent changes made to the table
        # ir_attachment! Indeed, if concurrent transactions create attachments,
        # the LOCK statement will wait until those concurrent transactions end.
        # But this transaction will not see the new attachements if it has done
        # other requests before the LOCK (like the method _storage() above).
        cr = self._cr
        cr.commit()

        # prevent all concurrent updates on ir_attachment while collecting!
        cr.execute("LOCK ir_attachment IN SHARE MODE")

        # retrieve the file names from the checklist
        checklist = {}
        for dirpath, _, filenames in os.walk(self._full_path('checklist')):
            dirname = os.path.basename(dirpath)
            for filename in filenames:
                fname = "%s/%s" % (dirname, filename)
                checklist[fname] = os.path.join(dirpath, filename)

        # determine which files to keep among the checklist
        whitelist = set()
        for names in cr.split_for_in_conditions(checklist):
            cr.execute(
                "SELECT store_fname FROM ir_attachment WHERE store_fname IN %s",
                [names])
            whitelist.update(row[0] for row in cr.fetchall())

        # remove garbage files, and clean up checklist
        removed = 0
        for fname, filepath in checklist.items():
            if fname not in whitelist:
                try:
                    os.unlink(self._full_path(fname))
                    removed += 1
                except (OSError, IOError):
                    _logger.info("_file_gc could not unlink %s",
                                 self._full_path(fname),
                                 exc_info=True)
            with tools.ignore(OSError):
                os.unlink(filepath)

        # commit to release the lock
        cr.commit()
        _logger.info("filestore gc %d checked, %d removed", len(checklist),
                     removed)

    @api.depends('store_fname', 'db_datas')
    def _compute_datas(self):
        bin_size = self._context.get('bin_size')
        for attach in self:
            if attach.store_fname:
                attach.datas = self._file_read(attach.store_fname, bin_size)
            else:
                attach.datas = attach.db_datas

    def _inverse_datas(self):
        location = self._storage()
        for attach in self:
            # compute the fields that depend on datas
            value = attach.datas
            bin_data = base64.b64decode(value) if value else b''
            vals = {
                'file_size':
                len(bin_data),
                'checksum':
                self._compute_checksum(bin_data),
                'index_content':
                self._index(bin_data, attach.datas_fname, attach.mimetype),
                'store_fname':
                False,
                'db_datas':
                value,
            }
            if value and location != 'db':
                # save it to the filestore
                vals['store_fname'] = self._file_write(value, vals['checksum'])
                vals['db_datas'] = False

            # take current location in filestore to possibly garbage-collect it
            fname = attach.store_fname
            # write as superuser, as user probably does not have write access
            super(IrAttachment, attach.sudo()).write(vals)
            if fname:
                self._file_delete(fname)

    def _compute_checksum(self, bin_data):
        """ compute the checksum for the given datas
            :param bin_data : datas in its binary form
        """
        # an empty file has a checksum too (for caching)
        return hashlib.sha1(bin_data or b'').hexdigest()

    def _compute_mimetype(self, values):
        """ compute the mimetype of the given values
            :param values : dict of values to create or write an ir_attachment
            :return mime : string indicating the mimetype, or application/octet-stream by default
        """
        mimetype = None
        if values.get('mimetype'):
            mimetype = values['mimetype']
        if not mimetype and values.get('datas_fname'):
            mimetype = mimetypes.guess_type(values['datas_fname'])[0]
        if not mimetype and values.get('url'):
            mimetype = mimetypes.guess_type(values['url'])[0]
        if values.get('datas') and (not mimetype
                                    or mimetype == 'application/octet-stream'):
            mimetype = guess_mimetype(base64.b64decode(values['datas']))
        return mimetype or 'application/octet-stream'

    def _check_contents(self, values):
        mimetype = values['mimetype'] = self._compute_mimetype(values)
        xml_like = 'ht' in mimetype or 'xml' in mimetype  # hta, html, xhtml, etc.
        force_text = (xml_like and
                      (not self.env.user._is_admin()
                       or self.env.context.get('attachments_mime_plainxml')))
        if force_text:
            values['mimetype'] = 'text/plain'
        return values

    @api.model
    def _index(self, bin_data, datas_fname, file_type):
        """ compute the index content of the given filename, or binary data.
            This is a python implementation of the unix command 'strings'.
            :param bin_data : datas in binary form
            :return index_content : string containing all the printable character of the binary data
        """
        index_content = False
        if file_type:
            index_content = file_type.split('/')[0]
            if index_content == 'text':  # compute index_content only for text type
                words = re.findall(b"[\x20-\x7E]{4,}", bin_data)
                index_content = b"\n".join(words).decode('ascii')
        return index_content

    name = fields.Char('Attachment Name', required=True)
    datas_fname = fields.Char('File Name')
    description = fields.Text('Description')
    res_name = fields.Char('Resource Name',
                           compute='_compute_res_name',
                           store=True)
    res_model = fields.Char(
        'Resource Model',
        readonly=True,
        help="The database object this attachment will be attached to.")
    res_field = fields.Char('Resource Field', readonly=True)
    res_id = fields.Integer('Resource ID',
                            readonly=True,
                            help="The record id this is attached to.")
    create_date = fields.Datetime('Date Created', readonly=True)
    create_uid = fields.Many2one('res.users', string='Owner', readonly=True)
    company_id = fields.Many2one('res.company',
                                 string='Company',
                                 change_default=True,
                                 default=lambda self: self.env['res.company'].
                                 _company_default_get('ir.attachment'))
    type = fields.Selection(
        [('url', 'URL'), ('binary', 'File')],
        string='Type',
        required=True,
        default='binary',
        change_default=True,
        help=
        "You can either upload a file from your computer or copy/paste an internet link to your file."
    )
    url = fields.Char('Url', index=True, size=1024)
    public = fields.Boolean('Is public document')

    # for external access
    access_token = fields.Char('Access Token', groups="base.group_user")

    # the field 'datas' is computed and may use the other fields below
    datas = fields.Binary(string='File Content',
                          compute='_compute_datas',
                          inverse='_inverse_datas')
    db_datas = fields.Binary('Database Data')
    store_fname = fields.Char('Stored Filename')
    file_size = fields.Integer('File Size', readonly=True)
    checksum = fields.Char("Checksum/SHA1", size=40, index=True, readonly=True)
    mimetype = fields.Char('Mime Type', readonly=True)
    index_content = fields.Text('Indexed Content',
                                readonly=True,
                                prefetch=False)

    @api.model_cr_context
    def _auto_init(self):
        res = super(IrAttachment, self)._auto_init()
        tools.create_index(self._cr, 'ir_attachment_res_idx', self._table,
                           ['res_model', 'res_id'])
        return res

    @api.model
    def check(self, mode, values=None):
        """Restricts the access to an ir.attachment, according to referred model
        In the 'document' module, it is overriden to relax this hard rule, since
        more complex ones apply there.
        """
        # collect the records to check (by model)
        model_ids = defaultdict(set)  # {model_name: set(ids)}
        require_employee = False
        if self:
            self._cr.execute(
                'SELECT res_model, res_id, create_uid, public FROM ir_attachment WHERE id IN %s',
                [tuple(self.ids)])
            for res_model, res_id, create_uid, public in self._cr.fetchall():
                if public and mode == 'read':
                    continue
                if not (res_model and res_id):
                    if create_uid != self._uid:
                        require_employee = True
                    continue
                model_ids[res_model].add(res_id)
        if values and values.get('res_model') and values.get('res_id'):
            model_ids[values['res_model']].add(values['res_id'])

        # check access rights on the records
        for res_model, res_ids in model_ids.items():
            # ignore attachments that are not attached to a resource anymore
            # when checking access rights (resource was deleted but attachment
            # was not)
            if res_model not in self.env:
                require_employee = True
                continue
            records = self.env[res_model].browse(res_ids).exists()
            if len(records) < len(res_ids):
                require_employee = True
            # For related models, check if we can write to the model, as unlinking
            # and creating attachments can be seen as an update to the model
            records.check_access_rights('write' if mode in (
                'create', 'unlink') else mode)
            records.check_access_rule(mode)

        if require_employee:
            if not (self.env.user._is_admin()
                    or self.env.user.has_group('base.group_user')):
                raise AccessError(
                    _("Sorry, you are not allowed to access this document."))

    @api.model
    def _search(self,
                args,
                offset=0,
                limit=None,
                order=None,
                count=False,
                access_rights_uid=None):
        # add res_field=False in domain if not present; the arg[0] trick below
        # works for domain items and '&'/'|'/'!' operators too
        if not any(arg[0] in ('id', 'res_field') for arg in args):
            args.insert(0, ('res_field', '=', False))

        ids = super(IrAttachment,
                    self)._search(args,
                                  offset=offset,
                                  limit=limit,
                                  order=order,
                                  count=False,
                                  access_rights_uid=access_rights_uid)

        if self._uid == SUPERUSER_ID:
            # rules do not apply for the superuser
            return len(ids) if count else ids

        if not ids:
            return 0 if count else []

        # Work with a set, as list.remove() is prohibitive for large lists of documents
        # (takes 20+ seconds on a db with 100k docs during search_count()!)
        orig_ids = ids
        ids = set(ids)

        # For attachments, the permissions of the document they are attached to
        # apply, so we must remove attachments for which the user cannot access
        # the linked document.
        # Use pure SQL rather than read() as it is about 50% faster for large dbs (100k+ docs),
        # and the permissions are checked in super() and below anyway.
        model_attachments = defaultdict(
            lambda: defaultdict(set))  # {res_model: {res_id: set(ids)}}
        self._cr.execute(
            """SELECT id, res_model, res_id, public FROM ir_attachment WHERE id IN %s""",
            [tuple(ids)])
        for row in self._cr.dictfetchall():
            if not row['res_model'] or row['public']:
                continue
            # model_attachments = {res_model: {res_id: set(ids)}}
            model_attachments[row['res_model']][row['res_id']].add(row['id'])

        # To avoid multiple queries for each attachment found, checks are
        # performed in batch as much as possible.
        for res_model, targets in model_attachments.items():
            if res_model not in self.env:
                continue
            if not self.env[res_model].check_access_rights('read', False):
                # remove all corresponding attachment ids
                ids.difference_update(itertools.chain(*targets.values()))
                continue
            # filter ids according to what access rules permit
            target_ids = list(targets)
            allowed = self.env[res_model].with_context(
                active_test=False).search([('id', 'in', target_ids)])
            for res_id in set(target_ids).difference(allowed.ids):
                ids.difference_update(targets[res_id])

        # sort result according to the original sort ordering
        result = [id for id in orig_ids if id in ids]
        return len(result) if count else list(result)

    @api.multi
    def read(self, fields=None, load='_classic_read'):
        self.check('read')
        return super(IrAttachment, self).read(fields, load=load)

    @api.multi
    def write(self, vals):
        self.check('write', values=vals)
        # remove computed field depending of datas
        for field in ('file_size', 'checksum'):
            vals.pop(field, False)
        if 'mimetype' in vals or 'datas' in vals:
            vals = self._check_contents(vals)
        return super(IrAttachment, self).write(vals)

    @api.multi
    def copy(self, default=None):
        self.check('write')
        return super(IrAttachment, self).copy(default)

    @api.multi
    def unlink(self):
        self.check('unlink')

        # First delete in the database, *then* in the filesystem if the
        # database allowed it. Helps avoid errors when concurrent transactions
        # are deleting the same file, and some of the transactions are
        # rolled back by PostgreSQL (due to concurrent updates detection).
        to_delete = set(attach.store_fname for attach in self
                        if attach.store_fname)
        res = super(IrAttachment, self).unlink()
        for file_path in to_delete:
            self._file_delete(file_path)

        return res

    @api.model
    def create(self, values):
        # remove computed field depending of datas
        for field in ('file_size', 'checksum'):
            values.pop(field, False)
        values = self._check_contents(values)
        self.browse().check('write', values=values)
        return super(IrAttachment, self).create(values)

    @api.one
    def generate_access_token(self):
        if self.access_token:
            return self.access_token
        access_token = str(uuid.uuid4())
        self.write({'access_token': access_token})
        return access_token

    @api.model
    def action_get(self):
        return self.env['ir.actions.act_window'].for_xml_id(
            'base', 'action_attachment')
예제 #24
0
class MrpWorkorder(models.Model):
    _name = 'mrp.workorder'
    _description = 'Work Order'
    _inherit = ['mail.thread']

    name = fields.Char('Work Order',
                       required=True,
                       states={
                           'done': [('readonly', True)],
                           'cancel': [('readonly', True)]
                       })

    workcenter_id = fields.Many2one('mrp.workcenter',
                                    'Work Center',
                                    required=True,
                                    states={
                                        'done': [('readonly', True)],
                                        'cancel': [('readonly', True)]
                                    })
    working_state = fields.Selection('Workcenter Status',
                                     related='workcenter_id.working_state',
                                     help='Technical: used in views only')

    production_id = fields.Many2one('mrp.production',
                                    'Manufacturing Order',
                                    index=True,
                                    ondelete='cascade',
                                    required=True,
                                    track_visibility='onchange',
                                    states={
                                        'done': [('readonly', True)],
                                        'cancel': [('readonly', True)]
                                    })
    product_id = fields.Many2one('product.product',
                                 'Product',
                                 related='production_id.product_id',
                                 readonly=True,
                                 help='Technical: used in views only.',
                                 store=True)
    product_uom_id = fields.Many2one('product.uom',
                                     'Unit of Measure',
                                     related='production_id.product_uom_id',
                                     readonly=True,
                                     help='Technical: used in views only.')
    production_availability = fields.Selection(
        'Stock Availability',
        readonly=True,
        related='production_id.availability',
        store=True,
        help='Technical: used in views and domains only.')
    production_state = fields.Selection('Production State',
                                        readonly=True,
                                        related='production_id.state',
                                        help='Technical: used in views only.')
    product_tracking = fields.Selection(
        'Product Tracking',
        related='production_id.product_id.tracking',
        help='Technical: used in views only.')
    qty_production = fields.Float('Original Production Quantity',
                                  readonly=True,
                                  related='production_id.product_qty')
    qty_remaining = fields.Float(
        'Quantity To Be Produced',
        compute='_compute_qty_remaining',
        digits=dp.get_precision('Product Unit of Measure'))
    qty_produced = fields.Float(
        'Quantity',
        default=0.0,
        readonly=True,
        digits=dp.get_precision('Product Unit of Measure'),
        help="The number of products already handled by this work order")
    qty_producing = fields.Float(
        'Currently Produced Quantity',
        default=1.0,
        digits=dp.get_precision('Product Unit of Measure'),
        states={
            'done': [('readonly', True)],
            'cancel': [('readonly', True)]
        })
    is_produced = fields.Boolean(string="Has Been Produced",
                                 compute='_compute_is_produced')

    state = fields.Selection([('pending', 'Pending'), ('ready', 'Ready'),
                              ('progress', 'In Progress'),
                              ('done', 'Finished'), ('cancel', 'Cancelled')],
                             string='Status',
                             default='pending')
    date_planned_start = fields.Datetime('Scheduled Date Start',
                                         states={
                                             'done': [('readonly', True)],
                                             'cancel': [('readonly', True)]
                                         })
    date_planned_finished = fields.Datetime('Scheduled Date Finished',
                                            states={
                                                'done': [('readonly', True)],
                                                'cancel': [('readonly', True)]
                                            })
    date_start = fields.Datetime('Effective Start Date',
                                 states={
                                     'done': [('readonly', True)],
                                     'cancel': [('readonly', True)]
                                 })
    date_finished = fields.Datetime('Effective End Date',
                                    states={
                                        'done': [('readonly', True)],
                                        'cancel': [('readonly', True)]
                                    })

    duration_expected = fields.Float('Expected Duration',
                                     digits=(16, 2),
                                     states={
                                         'done': [('readonly', True)],
                                         'cancel': [('readonly', True)]
                                     },
                                     help="Expected duration (in minutes)")
    duration = fields.Float('Real Duration',
                            compute='_compute_duration',
                            readonly=True,
                            store=True)
    duration_unit = fields.Float('Duration Per Unit',
                                 compute='_compute_duration',
                                 readonly=True,
                                 store=True)
    duration_percent = fields.Integer('Duration Deviation (%)',
                                      compute='_compute_duration',
                                      group_operator="avg",
                                      readonly=True,
                                      store=True)

    operation_id = fields.Many2one(
        'mrp.routing.workcenter', 'Operation'
    )  # Should be used differently as BoM can change in the meantime
    worksheet = fields.Binary('Worksheet',
                              related='operation_id.worksheet',
                              readonly=True)
    move_raw_ids = fields.One2many('stock.move', 'workorder_id', 'Moves')
    move_line_ids = fields.One2many(
        'stock.move.line',
        'workorder_id',
        'Moves to Track',
        domain=[('done_wo', '=', True)],
        help=
        "Inventory moves for which you must scan a lot number at this work order"
    )
    active_move_line_ids = fields.One2many('stock.move.line',
                                           'workorder_id',
                                           domain=[('done_wo', '=', False)])
    final_lot_id = fields.Many2one('stock.production.lot',
                                   'Lot/Serial Number',
                                   domain="[('product_id', '=', product_id)]",
                                   states={
                                       'done': [('readonly', True)],
                                       'cancel': [('readonly', True)]
                                   })
    tracking = fields.Selection(related='production_id.product_id.tracking')
    time_ids = fields.One2many('mrp.workcenter.productivity', 'workorder_id')
    is_user_working = fields.Boolean(
        'Is the Current User Working',
        compute='_compute_is_user_working',
        help="Technical field indicating whether the current user is working. "
    )
    production_messages = fields.Html('Workorder Message',
                                      compute='_compute_production_messages')

    next_work_order_id = fields.Many2one('mrp.workorder', "Next Work Order")
    scrap_ids = fields.One2many('stock.scrap', 'workorder_id')
    scrap_count = fields.Integer(compute='_compute_scrap_move_count',
                                 string='Scrap Move')
    production_date = fields.Datetime(
        'Production Date',
        related='production_id.date_planned_start',
        store=True)
    color = fields.Integer('Color', compute='_compute_color')
    capacity = fields.Float(
        'Capacity',
        default=1.0,
        help="Number of pieces that can be produced in parallel.")

    @api.multi
    def name_get(self):
        return [(wo.id, "%s - %s - %s" %
                 (wo.production_id.name, wo.product_id.name, wo.name))
                for wo in self]

    @api.one
    @api.depends('production_id.product_qty', 'qty_produced')
    def _compute_is_produced(self):
        rounding = self.production_id.product_uom_id.rounding
        self.is_produced = float_compare(self.qty_produced,
                                         self.production_id.product_qty,
                                         precision_rounding=rounding) >= 0

    @api.one
    @api.depends('time_ids.duration', 'qty_produced')
    def _compute_duration(self):
        self.duration = sum(self.time_ids.mapped('duration'))
        self.duration_unit = round(self.duration / max(self.qty_produced, 1),
                                   2)  # rounding 2 because it is a time
        if self.duration_expected:
            self.duration_percent = 100 * (
                self.duration_expected -
                self.duration) / self.duration_expected
        else:
            self.duration_percent = 0

    def _compute_is_user_working(self):
        """ Checks whether the current user is working """
        for order in self:
            if order.time_ids.filtered(
                    lambda x: (x.user_id.id == self.env.user.id) and
                (not x.date_end) and (x.loss_type in
                                      ('productive', 'performance'))):
                order.is_user_working = True
            else:
                order.is_user_working = False

    @api.depends('production_id', 'workcenter_id', 'production_id.bom_id')
    def _compute_production_messages(self):
        ProductionMessage = self.env['mrp.message']
        for workorder in self:
            domain = [('valid_until', '>=', fields.Date.today()), '|',
                      ('workcenter_id', '=', False),
                      ('workcenter_id', '=', workorder.workcenter_id.id),
                      '|', '|', '|',
                      ('product_id', '=', workorder.product_id.id), '&',
                      ('product_id', '=', False),
                      ('product_tmpl_id', '=',
                       workorder.product_id.product_tmpl_id.id),
                      ('bom_id', '=', workorder.production_id.bom_id.id),
                      ('routing_id', '=',
                       workorder.operation_id.routing_id.id)]
            messages = ProductionMessage.search(domain).mapped('message')
            workorder.production_messages = "<br/>".join(messages) or False

    @api.multi
    def _compute_scrap_move_count(self):
        data = self.env['stock.scrap'].read_group(
            [('workorder_id', 'in', self.ids)], ['workorder_id'],
            ['workorder_id'])
        count_data = dict((item['workorder_id'][0], item['workorder_id_count'])
                          for item in data)
        for workorder in self:
            workorder.scrap_count = count_data.get(workorder.id, 0)

    @api.multi
    @api.depends('date_planned_finished',
                 'production_id.date_planned_finished')
    def _compute_color(self):
        late_orders = self.filtered(
            lambda x: x.production_id.date_planned_finished and x.
            date_planned_finished > x.production_id.date_planned_finished)
        for order in late_orders:
            order.color = 4
        for order in (self - late_orders):
            order.color = 2

    @api.onchange('qty_producing')
    def _onchange_qty_producing(self):
        """ Update stock.move.lot records, according to the new qty currently
        produced. """
        moves = self.move_raw_ids.filtered(
            lambda move: move.state not in
            ('done', 'cancel') and move.product_id.tracking != 'none' and move.
            product_id.id != self.production_id.product_id.id)
        for move in moves:
            move_lots = self.active_move_line_ids.filtered(
                lambda move_lot: move_lot.move_id == move)
            if not move_lots:
                continue
            rounding = move.product_uom.rounding
            new_qty = float_round(move.unit_factor * self.qty_producing,
                                  precision_rounding=rounding)
            if move.product_id.tracking == 'lot':
                move_lots[0].product_qty = new_qty
                move_lots[0].qty_done = new_qty
            elif move.product_id.tracking == 'serial':
                # Create extra pseudo record
                qty_todo = float_round(new_qty -
                                       sum(move_lots.mapped('qty_done')),
                                       precision_rounding=rounding)
                if float_compare(qty_todo, 0.0,
                                 precision_rounding=rounding) > 0:
                    while float_compare(
                            qty_todo, 0.0, precision_rounding=rounding) > 0:
                        self.active_move_line_ids += self.env[
                            'stock.move.line'].new({
                                'move_id':
                                move.id,
                                'product_id':
                                move.product_id.id,
                                'lot_id':
                                False,
                                'product_uom_qty':
                                0.0,
                                'product_uom_id':
                                move.product_uom.id,
                                'qty_done':
                                min(1.0, qty_todo),
                                'workorder_id':
                                self.id,
                                'done_wo':
                                False,
                                'location_id':
                                move.location_id.id,
                                'location_dest_id':
                                move.location_dest_id.id,
                                'date':
                                move.date,
                            })
                        qty_todo -= 1
                elif float_compare(qty_todo, 0.0,
                                   precision_rounding=rounding) < 0:
                    qty_todo = abs(qty_todo)
                    for move_lot in move_lots:
                        if float_compare(
                                qty_todo, 0, precision_rounding=rounding) <= 0:
                            break
                        if not move_lot.lot_id and float_compare(
                                qty_todo,
                                move_lot.qty_done,
                                precision_rounding=rounding) >= 0:
                            qty_todo = float_round(qty_todo -
                                                   move_lot.qty_done,
                                                   precision_rounding=rounding)
                            self.active_move_line_ids -= move_lot  # Difference operator
                        else:
                            #move_lot.product_qty = move_lot.product_qty - qty_todo
                            if float_compare(move_lot.qty_done - qty_todo,
                                             0,
                                             precision_rounding=rounding) == 1:
                                move_lot.qty_done = move_lot.qty_done - qty_todo
                            else:
                                move_lot.qty_done = 0
                            qty_todo = 0

    @api.multi
    def write(self, values):
        if ('date_planned_start' in values or 'date_planned_finished'
                in values) and any(workorder.state == 'done'
                                   for workorder in self):
            raise UserError(_('You can not change the finished work order.'))
        return super(MrpWorkorder, self).write(values)

    def _generate_lot_ids(self):
        """ Generate stock move lines """
        self.ensure_one()
        MoveLine = self.env['stock.move.line']
        tracked_moves = self.move_raw_ids.filtered(
            lambda move: move.state not in
            ('done', 'cancel') and move.product_id.tracking != 'none' and move.
            product_id != self.production_id.product_id and move.bom_line_id)
        for move in tracked_moves:
            qty = move.unit_factor * self.qty_producing
            if move.product_id.tracking == 'serial':
                while float_compare(
                        qty, 0.0,
                        precision_rounding=move.product_uom.rounding) > 0:
                    MoveLine.create({
                        'move_id':
                        move.id,
                        'product_uom_qty':
                        0,
                        'product_uom_id':
                        move.product_uom.id,
                        'qty_done':
                        min(1, qty),
                        'production_id':
                        self.production_id.id,
                        'workorder_id':
                        self.id,
                        'product_id':
                        move.product_id.id,
                        'done_wo':
                        False,
                        'location_id':
                        move.location_id.id,
                        'location_dest_id':
                        move.location_dest_id.id,
                    })
                    qty -= 1
            else:
                MoveLine.create({
                    'move_id': move.id,
                    'product_uom_qty': 0,
                    'product_uom_id': move.product_uom.id,
                    'qty_done': qty,
                    'product_id': move.product_id.id,
                    'production_id': self.production_id.id,
                    'workorder_id': self.id,
                    'done_wo': False,
                    'location_id': move.location_id.id,
                    'location_dest_id': move.location_dest_id.id,
                })

    def _assign_default_final_lot_id(self):
        self.final_lot_id = self.env['stock.production.lot'].search(
            [('use_next_on_work_order_id', '=', self.id)],
            order='create_date, id',
            limit=1)

    def _get_byproduct_move_line(self, by_product_move, quantity):
        return {
            'move_id': by_product_move.id,
            'product_id': by_product_move.product_id.id,
            'product_uom_qty': quantity,
            'product_uom_id': by_product_move.product_uom.id,
            'qty_done': quantity,
            'workorder_id': self.id,
            'location_id': by_product_move.location_id.id,
            'location_dest_id': by_product_move.location_dest_id.id,
        }

    @api.multi
    def record_production(self):
        self.ensure_one()
        if self.qty_producing <= 0:
            raise UserError(
                _('Please set the quantity you are currently producing. It should be different from zero.'
                  ))

        if (self.production_id.product_id.tracking !=
                'none') and not self.final_lot_id and self.move_raw_ids:
            raise UserError(
                _('You should provide a lot/serial number for the final product'
                  ))

        # Update quantities done on each raw material line
        # For each untracked component without any 'temporary' move lines,
        # (the new workorder tablet view allows registering consumed quantities for untracked components)
        # we assume that only the theoretical quantity was used
        for move in self.move_raw_ids:
            if move.has_tracking == 'none' and (move.state not in ('done', 'cancel')) and move.bom_line_id\
                        and move.unit_factor and not move.move_line_ids.filtered(lambda ml: not ml.done_wo):
                rounding = move.product_uom.rounding
                if self.product_id.tracking != 'none':
                    qty_to_add = float_round(self.qty_producing *
                                             move.unit_factor,
                                             precision_rounding=rounding)
                    move._generate_consumed_move_line(qty_to_add,
                                                      self.final_lot_id)
                else:
                    move.quantity_done += float_round(
                        self.qty_producing * move.unit_factor,
                        precision_rounding=rounding)

        # Transfer quantities from temporary to final move lots or make them final
        for move_line in self.active_move_line_ids:
            # Check if move_line already exists
            if move_line.qty_done <= 0:  # rounding...
                move_line.sudo().unlink()
                continue
            if move_line.product_id.tracking != 'none' and not move_line.lot_id:
                raise UserError(
                    _('You should provide a lot/serial number for a component')
                )
            # Search other move_line where it could be added:
            lots = self.move_line_ids.filtered(
                lambda x: (x.lot_id.id == move_line.lot_id.id) and
                (not x.lot_produced_id) and (not x.done_move) and
                (x.product_id == move_line.product_id))
            if lots:
                lots[0].qty_done += move_line.qty_done
                lots[0].lot_produced_id = self.final_lot_id.id
                move_line.sudo().unlink()
            else:
                move_line.lot_produced_id = self.final_lot_id.id
                move_line.done_wo = True

        # One a piece is produced, you can launch the next work order
        if self.next_work_order_id.state == 'pending':
            self.next_work_order_id.state = 'ready'

        self.move_line_ids.filtered(
            lambda move_line: not move_line.done_move and not move_line.
            lot_produced_id and move_line.qty_done > 0).write({
                'lot_produced_id':
                self.final_lot_id.id,
                'lot_produced_qty':
                self.qty_producing
            })

        # If last work order, then post lots used
        # TODO: should be same as checking if for every workorder something has been done?
        if not self.next_work_order_id:
            production_move = self.production_id.move_finished_ids.filtered(
                lambda x: (x.product_id.id == self.production_id.product_id.id
                           ) and (x.state not in ('done', 'cancel')))
            if production_move.product_id.tracking != 'none':
                move_line = production_move.move_line_ids.filtered(
                    lambda x: x.lot_id.id == self.final_lot_id.id)
                if move_line:
                    move_line.product_uom_qty += self.qty_producing
                else:
                    move_line.create({
                        'move_id':
                        production_move.id,
                        'product_id':
                        production_move.product_id.id,
                        'lot_id':
                        self.final_lot_id.id,
                        'product_uom_qty':
                        self.qty_producing,
                        'product_uom_id':
                        production_move.product_uom.id,
                        'qty_done':
                        self.qty_producing,
                        'workorder_id':
                        self.id,
                        'location_id':
                        production_move.location_id.id,
                        'location_dest_id':
                        production_move.location_dest_id.id,
                    })
            else:
                production_move.quantity_done += self.qty_producing

        if not self.next_work_order_id:
            for by_product_move in self.production_id.move_finished_ids.filtered(
                    lambda x: (x.product_id.id != self.production_id.product_id
                               .id) and (x.state not in ('done', 'cancel'))):
                if by_product_move.has_tracking != 'serial':
                    values = self._get_byproduct_move_line(
                        by_product_move,
                        self.qty_producing * by_product_move.unit_factor)
                    self.env['stock.move.line'].create(values)
                elif by_product_move.has_tracking == 'serial':
                    qty_todo = by_product_move.product_uom._compute_quantity(
                        self.qty_producing * by_product_move.unit_factor,
                        by_product_move.product_id.uom_id)
                    for i in range(
                            0, int(float_round(qty_todo, precision_digits=0))):
                        values = self._get_byproduct_move_line(
                            by_product_move, 1)
                        self.env['stock.move.line'].create(values)

        # Update workorder quantity produced
        self.qty_produced += self.qty_producing

        if self.final_lot_id:
            self.final_lot_id.use_next_on_work_order_id = self.next_work_order_id
            self.final_lot_id = False

        # Set a qty producing
        rounding = self.production_id.product_uom_id.rounding
        if float_compare(self.qty_produced,
                         self.production_id.product_qty,
                         precision_rounding=rounding) >= 0:
            self.qty_producing = 0
        elif self.production_id.product_id.tracking == 'serial':
            self._assign_default_final_lot_id()
            self.qty_producing = 1.0
            self._generate_lot_ids()
        else:
            self.qty_producing = float_round(self.production_id.product_qty -
                                             self.qty_produced,
                                             precision_rounding=rounding)
            self._generate_lot_ids()

        if self.next_work_order_id and self.production_id.product_id.tracking != 'none':
            self.next_work_order_id._assign_default_final_lot_id()

        if float_compare(self.qty_produced,
                         self.production_id.product_qty,
                         precision_rounding=rounding) >= 0:
            self.button_finish()
        return True

    @api.multi
    def button_start(self):
        self.ensure_one()
        # As button_start is automatically called in the new view
        if self.state in ('done', 'cancel'):
            return True

        # Need a loss in case of the real time exceeding the expected
        timeline = self.env['mrp.workcenter.productivity']
        if self.duration < self.duration_expected:
            loss_id = self.env['mrp.workcenter.productivity.loss'].search(
                [('loss_type', '=', 'productive')], limit=1)
            if not len(loss_id):
                raise UserError(
                    _("You need to define at least one productivity loss in the category 'Productivity'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses."
                      ))
        else:
            loss_id = self.env['mrp.workcenter.productivity.loss'].search(
                [('loss_type', '=', 'performance')], limit=1)
            if not len(loss_id):
                raise UserError(
                    _("You need to define at least one productivity loss in the category 'Performance'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses."
                      ))
        for workorder in self:
            if workorder.production_id.state != 'progress':
                workorder.production_id.write({
                    'state': 'progress',
                    'date_start': datetime.now(),
                })
            timeline.create({
                'workorder_id':
                workorder.id,
                'workcenter_id':
                workorder.workcenter_id.id,
                'description':
                _('Time Tracking: ') + self.env.user.name,
                'loss_id':
                loss_id[0].id,
                'date_start':
                datetime.now(),
                'user_id':
                self.env.user.id
            })
        return self.write({
            'state': 'progress',
            'date_start': datetime.now(),
        })

    @api.multi
    def button_finish(self):
        self.ensure_one()
        self.end_all()
        return self.write({
            'state': 'done',
            'date_finished': fields.Datetime.now()
        })

    @api.multi
    def end_previous(self, doall=False):
        """
        @param: doall:  This will close all open time lines on the open work orders when doall = True, otherwise
        only the one of the current user
        """
        # TDE CLEANME
        timeline_obj = self.env['mrp.workcenter.productivity']
        domain = [('workorder_id', 'in', self.ids), ('date_end', '=', False)]
        if not doall:
            domain.append(('user_id', '=', self.env.user.id))
        not_productive_timelines = timeline_obj.browse()
        for timeline in timeline_obj.search(domain,
                                            limit=None if doall else 1):
            wo = timeline.workorder_id
            if wo.duration_expected <= wo.duration:
                if timeline.loss_type == 'productive':
                    not_productive_timelines += timeline
                timeline.write({'date_end': fields.Datetime.now()})
            else:
                maxdate = fields.Datetime.from_string(
                    timeline.date_start) + relativedelta(
                        minutes=wo.duration_expected - wo.duration)
                enddate = datetime.now()
                if maxdate > enddate:
                    timeline.write({'date_end': enddate})
                else:
                    timeline.write({'date_end': maxdate})
                    not_productive_timelines += timeline.copy({
                        'date_start':
                        maxdate,
                        'date_end':
                        enddate
                    })
        if not_productive_timelines:
            loss_id = self.env['mrp.workcenter.productivity.loss'].search(
                [('loss_type', '=', 'performance')], limit=1)
            if not len(loss_id):
                raise UserError(
                    _("You need to define at least one unactive productivity loss in the category 'Performance'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses."
                      ))
            not_productive_timelines.write({'loss_id': loss_id.id})
        return True

    @api.multi
    def end_all(self):
        return self.end_previous(doall=True)

    @api.multi
    def button_pending(self):
        self.end_previous()
        return True

    @api.multi
    def button_unblock(self):
        for order in self:
            order.workcenter_id.unblock()
        return True

    @api.multi
    def action_cancel(self):
        return self.write({'state': 'cancel'})

    @api.multi
    def button_done(self):
        if any([x.state in ('done', 'cancel') for x in self]):
            raise UserError(
                _('A Manufacturing Order is already done or cancelled!'))
        self.end_all()
        return self.write({'state': 'done', 'date_finished': datetime.now()})

    @api.multi
    def button_scrap(self):
        self.ensure_one()
        return {
            'name': _('Scrap'),
            'view_type': 'form',
            'view_mode': 'form',
            'res_model': 'stock.scrap',
            'view_id': self.env.ref('stock.stock_scrap_form_view2').id,
            'type': 'ir.actions.act_window',
            'context': {
                'default_workorder_id':
                self.id,
                'default_production_id':
                self.production_id.id,
                'product_ids':
                (self.production_id.move_raw_ids.filtered(
                    lambda x: x.state not in ('done', 'cancel'))
                 | self.production_id.move_finished_ids.filtered(
                     lambda x: x.state == 'done')).mapped('product_id').ids
            },
            # 'context': {'product_ids': self.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')).mapped('product_id').ids + [self.production_id.product_id.id]},
            'target': 'new',
        }

    @api.multi
    def action_see_move_scrap(self):
        self.ensure_one()
        action = self.env.ref('stock.action_stock_scrap').read()[0]
        action['domain'] = [('workorder_id', '=', self.id)]
        return action

    @api.multi
    @api.depends('qty_production', 'qty_produced')
    def _compute_qty_remaining(self):
        for wo in self:
            wo.qty_remaining = float_round(
                wo.qty_production - wo.qty_produced,
                precision_rounding=wo.production_id.product_uom_id.rounding)
예제 #25
0
class Import(models.TransientModel):

    _name = 'base_import.import'

    # allow imports to survive for 12h in case user is slow
    _transient_max_hours = 12.0

    res_model = fields.Char('Model')
    file = fields.Binary(
        'File', help="File to check and/or import, raw binary (not base64)")
    file_name = fields.Char('File Name')
    file_type = fields.Char('File Type')

    @api.model
    def get_fields(self, model, depth=FIELDS_RECURSION_LIMIT):
        """ Recursively get fields for the provided model (through
        fields_get) and filter them according to importability

        The output format is a list of ``Field``, with ``Field``
        defined as:

        .. class:: Field

            .. attribute:: id (str)

                A non-unique identifier for the field, used to compute
                the span of the ``required`` attribute: if multiple
                ``required`` fields have the same id, only one of them
                is necessary.

            .. attribute:: name (str)

                The field's logical (izi) name within the scope of
                its parent.

            .. attribute:: string (str)

                The field's human-readable name (``@string``)

            .. attribute:: required (bool)

                Whether the field is marked as required in the
                model. Clients must provide non-empty import values
                for all required fields or the import will error out.

            .. attribute:: fields (list(Field))

                The current field's subfields. The database and
                external identifiers for m2o and m2m fields; a
                filtered and transformed fields_get for o2m fields (to
                a variable depth defined by ``depth``).

                Fields with no sub-fields will have an empty list of
                sub-fields.

        :param str model: name of the model to get fields form
        :param int landing: depth of recursion into o2m fields
        """
        Model = self.env[model]
        importable_fields = [{
            'id': 'id',
            'name': 'id',
            'string': _("External ID"),
            'required': False,
            'fields': [],
            'type': 'id',
        }]
        model_fields = Model.fields_get()
        blacklist = models.MAGIC_COLUMNS + [Model.CONCURRENCY_CHECK_FIELD]
        for name, field in model_fields.items():
            if name in blacklist:
                continue
            # an empty string means the field is deprecated, @deprecated must
            # be absent or False to mean not-deprecated
            if field.get('deprecated', False) is not False:
                continue
            if field.get('readonly'):
                states = field.get('states')
                if not states:
                    continue
                # states = {state: [(attr, value), (attr2, value2)], state2:...}
                if not any(attr == 'readonly' and value is False
                           for attr, value in itertools.chain.from_iterable(
                               states.values())):
                    continue
            field_value = {
                'id': name,
                'name': name,
                'string': field['string'],
                # Y U NO ALWAYS HAS REQUIRED
                'required': bool(field.get('required')),
                'fields': [],
                'type': field['type'],
            }

            if field['type'] in ('many2many', 'many2one'):
                field_value['fields'] = [
                    dict(field_value,
                         name='id',
                         string=_("External ID"),
                         type='id'),
                    dict(field_value,
                         name='.id',
                         string=_("Database ID"),
                         type='id'),
                ]
            elif field['type'] == 'one2many' and depth:
                field_value['fields'] = self.get_fields(field['relation'],
                                                        depth=depth - 1)
                if self.user_has_groups('base.group_no_one'):
                    field_value['fields'].append({
                        'id': '.id',
                        'name': '.id',
                        'string': _("Database ID"),
                        'required': False,
                        'fields': [],
                        'type': 'id'
                    })

            importable_fields.append(field_value)

        # TODO: cache on model?
        return importable_fields

    @api.multi
    def _read_file(self, options):
        """ Dispatch to specific method to read file content, according to its mimetype or file type
            :param options : dict of reading options (quoting, separator, ...)
        """
        self.ensure_one()
        # guess mimetype from file content
        mimetype = guess_mimetype(self.file)
        (file_extension, handler,
         req) = FILE_TYPE_DICT.get(mimetype, (None, None, None))
        if handler:
            try:
                return getattr(self, '_read_' + file_extension)(options)
            except Exception:
                _logger.warn(
                    "Failed to read file '%s' (transient id %d) using guessed mimetype %s",
                    self.file_name or '<unknown>', self.id, mimetype)

        # try reading with user-provided mimetype
        (file_extension, handler,
         req) = FILE_TYPE_DICT.get(self.file_type, (None, None, None))
        if handler:
            try:
                return getattr(self, '_read_' + file_extension)(options)
            except Exception:
                _logger.warn(
                    "Failed to read file '%s' (transient id %d) using user-provided mimetype %s",
                    self.file_name or '<unknown>', self.id, self.file_type)

        # fallback on file extensions as mime types can be unreliable (e.g.
        # software setting incorrect mime types, or non-installed software
        # leading to browser not sending mime types)
        if self.file_name:
            p, ext = os.path.splitext(self.file_name)
            if ext in EXTENSIONS:
                try:
                    return getattr(self, '_read_' + ext[1:])(options)
                except Exception:
                    _logger.warn(
                        "Failed to read file '%s' (transient id %s) using file extension",
                        self.file_name, self.id)

        if req:
            raise ImportError(
                _("Unable to load \"{extension}\" file: requires Python module \"{modname}\""
                  ).format(extension=file_extension, modname=req))
        raise ValueError(
            _("Unsupported file format \"{}\", import only supports CSV, ODS, XLS and XLSX"
              ).format(self.file_type))

    @api.multi
    def _read_xls(self, options):
        """ Read file content, using xlrd lib """
        book = xlrd.open_workbook(file_contents=self.file)
        return self._read_xls_book(book)

    def _read_xls_book(self, book):
        sheet = book.sheet_by_index(0)
        # emulate Sheet.get_rows for pre-0.9.4
        for row in pycompat.imap(sheet.row, range(sheet.nrows)):
            values = []
            for cell in row:
                if cell.ctype is xlrd.XL_CELL_NUMBER:
                    is_float = cell.value % 1 != 0.0
                    values.append(
                        pycompat.text_type(cell.value)
                        if is_float else pycompat.text_type(int(cell.value)))
                elif cell.ctype is xlrd.XL_CELL_DATE:
                    is_datetime = cell.value % 1 != 0.0
                    # emulate xldate_as_datetime for pre-0.9.3
                    dt = datetime.datetime(*xlrd.xldate.xldate_as_tuple(
                        cell.value, book.datemode))
                    values.append(
                        dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT
                                    ) if is_datetime else dt.
                        strftime(DEFAULT_SERVER_DATE_FORMAT))
                elif cell.ctype is xlrd.XL_CELL_BOOLEAN:
                    values.append(u'True' if cell.value else u'False')
                elif cell.ctype is xlrd.XL_CELL_ERROR:
                    raise ValueError(
                        _("Error cell found while reading XLS/XLSX file: %s") %
                        xlrd.error_text_from_code.get(
                            cell.value, "unknown error code %s" % cell.value))
                else:
                    values.append(cell.value)
            if any(x for x in values if x.strip()):
                yield values

    # use the same method for xlsx and xls files
    _read_xlsx = _read_xls

    @api.multi
    def _read_ods(self, options):
        """ Read file content using ODSReader custom lib """
        doc = odf_ods_reader.ODSReader(file=io.BytesIO(self.file))

        return (row for row in doc.getFirstSheet()
                if any(x for x in row if x.strip()))

    @api.multi
    def _read_csv(self, options):
        """ Returns a CSV-parsed iterator of all empty lines in the file
            :throws csv.Error: if an error is detected during CSV parsing
            :throws UnicodeDecodeError: if ``options.encoding`` is incorrect
        """
        csv_data = self.file

        # TODO: guess encoding with chardet? Or https://github.com/aadsm/jschardet
        encoding = options.get('encoding', 'utf-8')
        if encoding != 'utf-8':
            # csv module expect utf-8, see http://docs.python.org/2/library/csv.html
            csv_data = csv_data.decode(encoding).encode('utf-8')

        csv_iterator = pycompat.csv_reader(io.BytesIO(csv_data),
                                           quotechar=str(options['quoting']),
                                           delimiter=str(options['separator']))

        return (row for row in csv_iterator
                if any(x for x in row if x.strip()))

    @api.model
    def _try_match_column(self, preview_values, options):
        """ Returns the potential field types, based on the preview values, using heuristics
            :param preview_values : list of value for the column to determine
            :param options : parsing options
        """
        # If all values are empty in preview than can be any field
        if all([v == '' for v in preview_values]):
            return ['all']
        # If all values starts with __export__ this is probably an id
        if all(v.startswith('__export__') for v in preview_values):
            return ['id', 'many2many', 'many2one', 'one2many']
        # If all values can be cast to int type is either id, float or monetary
        # Exception: if we only have 1 and 0, it can also be a boolean
        try:
            field_type = [
                'id', 'integer', 'char', 'float', 'monetary', 'many2one',
                'many2many', 'one2many'
            ]
            res = set(int(v) for v in preview_values if v)
            if {0, 1}.issuperset(res):
                field_type.append('boolean')
            return field_type
        except ValueError:
            pass
        # If all values are either True or False, type is boolean
        if all(val.lower() in ('true', 'false', 't', 'f', '')
               for val in preview_values):
            return ['boolean']
        # If all values can be cast to float, type is either float or monetary
        # Or a date/datetime if it matches the pattern
        results = []
        try:
            thousand_separator = decimal_separator = False
            for val in preview_values:
                val = val.strip()
                if not val:
                    continue
                # value might have the currency symbol left or right from the value
                val = self._remove_currency_symbol(val)
                if val:
                    if options.get('float_thousand_separator') and options.get(
                            'float_decimal_separator'):
                        val = val.replace(
                            options['float_thousand_separator'],
                            '').replace(options['float_decimal_separator'],
                                        '.')
                    # We are now sure that this is a float, but we still need to find the
                    # thousand and decimal separator
                    else:
                        if val.count('.') > 1:
                            options['float_thousand_separator'] = '.'
                            options['float_decimal_separator'] = ','
                        elif val.count(',') > 1:
                            options['float_thousand_separator'] = ','
                            options['float_decimal_separator'] = '.'
                        elif val.find('.') > val.find(','):
                            thousand_separator = ','
                            decimal_separator = '.'
                        elif val.find(',') > val.find('.'):
                            thousand_separator = '.'
                            decimal_separator = ','
                else:
                    # This is not a float so exit this try
                    float('a')
            if thousand_separator and not options.get(
                    'float_decimal_separator'):
                options['float_thousand_separator'] = thousand_separator
                options['float_decimal_separator'] = decimal_separator
            results = ['float', 'monetary']
        except ValueError:
            pass
        # Try to see if all values are a date or datetime
        dt = datetime.datetime
        separator = [' ', '/', '-']
        date_format = ['%mr%dr%Y', '%dr%mr%Y', '%Yr%mr%d', '%Yr%dr%m']
        date_patterns = [options['date_format']
                         ] if options.get('date_format') else []
        if not date_patterns:
            date_patterns = [
                pattern.replace('r', sep) for sep in separator
                for pattern in date_format
            ]
            date_patterns.extend([p.replace('Y', 'y') for p in date_patterns])
        datetime_patterns = [options['datetime_format']
                             ] if options.get('datetime_format') else []
        if not datetime_patterns:
            datetime_patterns = [
                pattern + ' %H:%M:%S' for pattern in date_patterns
            ]

        current_date_pattern = False
        current_datetime_pattern = False

        def check_patterns(patterns, preview_values):
            for pattern in patterns:
                match = True
                for val in preview_values:
                    if not val:
                        continue
                    try:
                        dt.strptime(val, pattern)
                    except ValueError:
                        match = False
                        break
                if match:
                    return pattern
            return False

        current_date_pattern = check_patterns(date_patterns, preview_values)
        if current_date_pattern:
            options['date_format'] = current_date_pattern
            results += ['date']

        current_datetime_pattern = check_patterns(datetime_patterns,
                                                  preview_values)
        if current_datetime_pattern:
            options['datetime_format'] = current_datetime_pattern
            results += ['datetime']

        if results:
            return results
        return [
            'id', 'text', 'char', 'datetime', 'selection', 'many2one',
            'one2many', 'many2many', 'html'
        ]

    @api.model
    def _find_type_from_preview(self, options, preview):
        type_fields = []
        if preview:
            for column in range(0, len(preview[0])):
                preview_values = [value[column].strip() for value in preview]
                type_field = self._try_match_column(preview_values, options)
                type_fields.append(type_field)
        return type_fields

    def _match_header(self, header, fields, options):
        """ Attempts to match a given header to a field of the
            imported model.

            :param str header: header name from the CSV file
            :param fields:
            :param dict options:
            :returns: an empty list if the header couldn't be matched, or
                      all the fields to traverse
            :rtype: list(Field)
        """
        string_match = None
        for field in fields:
            # FIXME: should match all translations & original
            # TODO: use string distance (levenshtein? hamming?)
            if header.lower() == field['name'].lower():
                return [field]
            if header.lower() == field['string'].lower():
                # matching string are not reliable way because
                # strings have no unique constraint
                string_match = field
        if string_match:
            # this behavior is only applied if there is no matching field['name']
            return [string_match]

        if '/' not in header:
            return []

        # relational field path
        traversal = []
        subfields = fields
        # Iteratively dive into fields tree
        for section in header.split('/'):
            # Strip section in case spaces are added around '/' for
            # readability of paths
            match = self._match_header(section.strip(), subfields, options)
            # Any match failure, exit
            if not match:
                return []
            # prep subfields for next iteration within match[0]
            field = match[0]
            subfields = field['fields']
            traversal.append(field)
        return traversal

    def _match_headers(self, rows, fields, options):
        """ Attempts to match the imported model's fields to the
            titles of the parsed CSV file, if the file is supposed to have
            headers.

            Will consume the first line of the ``rows`` iterator.

            Returns a pair of (None, None) if headers were not requested
            or the list of headers and a dict mapping cell indices
            to key paths in the ``fields`` tree

            :param Iterator rows:
            :param dict fields:
            :param dict options:
            :rtype: (None, None) | (list(str), dict(int: list(str)))
        """
        if not options.get('headers'):
            return [], {}

        headers = next(rows)
        return headers, {
            index: [
                field['name']
                for field in self._match_header(header, fields, options)
            ] or None
            for index, header in enumerate(headers)
        }

    @api.multi
    def parse_preview(self, options, count=10):
        """ Generates a preview of the uploaded files, and performs
            fields-matching between the import's file data and the model's
            columns.

            If the headers are not requested (not options.headers),
            ``matches`` and ``headers`` are both ``False``.

            :param int count: number of preview lines to generate
            :param options: format-specific options.
                            CSV: {encoding, quoting, separator, headers}
            :type options: {str, str, str, bool}
            :returns: {fields, matches, headers, preview} | {error, preview}
            :rtype: {dict(str: dict(...)), dict(int, list(str)), list(str), list(list(str))} | {str, str}
        """
        self.ensure_one()
        fields = self.get_fields(self.res_model)
        try:
            rows = self._read_file(options)
            headers, matches = self._match_headers(rows, fields, options)
            # Match should have consumed the first row (iif headers), get
            # the ``count`` next rows for preview
            preview = list(itertools.islice(rows, count))
            assert preview, "CSV file seems to have no content"
            header_types = self._find_type_from_preview(options, preview)
            if options.get('keep_matches', False) and len(
                    options.get('fields', [])):
                matches = {}
                for index, match in enumerate(options.get('fields')):
                    if match:
                        matches[index] = match.split('/')

            return {
                'fields': fields,
                'matches': matches or False,
                'headers': headers or False,
                'headers_type': header_types or False,
                'preview': preview,
                'options': options,
                'debug': self.user_has_groups('base.group_no_one'),
            }
        except Exception as error:
            # Due to lazy generators, UnicodeDecodeError (for
            # instance) may only be raised when serializing the
            # preview to a list in the return.
            _logger.debug("Error during parsing preview", exc_info=True)
            preview = None
            if self.file_type == 'text/csv':
                preview = self.file[:ERROR_PREVIEW_BYTES].decode('iso-8859-1')
            return {
                'error': str(error),
                # iso-8859-1 ensures decoding will always succeed,
                # even if it yields non-printable characters. This is
                # in case of UnicodeDecodeError (or csv.Error
                # compounded with UnicodeDecodeError)
                'preview': preview,
            }

    @api.model
    def _convert_import_data(self, fields, options):
        """ Extracts the input BaseModel and fields list (with
            ``False``-y placeholders for fields to *not* import) into a
            format Model.import_data can use: a fields list without holes
            and the precisely matching data matrix

            :param list(str|bool): fields
            :returns: (data, fields)
            :rtype: (list(list(str)), list(str))
            :raises ValueError: in case the import data could not be converted
        """
        # Get indices for non-empty fields
        indices = [index for index, field in enumerate(fields) if field]
        if not indices:
            raise ValueError(
                _("You must configure at least one field to import"))
        # If only one index, itemgetter will return an atom rather
        # than a 1-tuple
        if len(indices) == 1:
            mapper = lambda row: [row[indices[0]]]
        else:
            mapper = operator.itemgetter(*indices)
        # Get only list of actually imported fields
        import_fields = [f for f in fields if f]

        rows_to_import = self._read_file(options)
        if options.get('headers'):
            rows_to_import = itertools.islice(rows_to_import, 1, None)
        data = [
            list(row) for row in pycompat.imap(mapper, rows_to_import)
            # don't try inserting completely empty rows (e.g. from
            # filtering out o2m fields)
            if any(row)
        ]

        return data, import_fields

    @api.model
    def _remove_currency_symbol(self, value):
        value = value.strip()
        negative = False
        # Careful that some countries use () for negative so replace it by - sign
        if value.startswith('(') and value.endswith(')'):
            value = value[1:-1]
            negative = True
        float_regex = re.compile(r'([-]?[0-9.,]+)')
        split_value = [g for g in float_regex.split(value) if g]
        if len(split_value) > 2:
            # This is probably not a float
            return False
        if len(split_value) == 1:
            if float_regex.search(split_value[0]) is not None:
                return split_value[0] if not negative else '-' + split_value[0]
            return False
        else:
            # String has been split in 2, locate which index contains the float and which does not
            currency_index = 0
            if float_regex.search(split_value[0]) is not None:
                currency_index = 1
            # Check that currency exists
            currency = self.env['res.currency'].search([
                ('symbol', '=', split_value[currency_index].strip())
            ])
            if len(currency):
                return split_value[(currency_index + 1) %
                                   2] if not negative else '-' + split_value[
                                       (currency_index + 1) % 2]
            # Otherwise it is not a float with a currency symbol
            return False

    @api.model
    def _parse_float_from_data(self, data, index, name, options):
        thousand_separator = options.get('float_thousand_separator', ' ')
        decimal_separator = options.get('float_decimal_separator', '.')
        for line in data:
            line[index] = line[index].strip()
            if not line[index]:
                continue
            line[index] = line[index].replace(thousand_separator, '').replace(
                decimal_separator, '.')
            old_value = line[index]
            line[index] = self._remove_currency_symbol(line[index])
            if line[index] is False:
                raise ValueError(
                    _("Column %s contains incorrect values (value: %s)" %
                      (name, old_value)))

    @api.multi
    def _parse_import_data(self, data, import_fields, options):
        """ Lauch first call to _parse_import_data_recursive with an
        empty prefix. _parse_import_data_recursive will be run
        recursively for each relational field.
        """
        return self._parse_import_data_recursive(self.res_model, '', data,
                                                 import_fields, options)

    @api.multi
    def _parse_import_data_recursive(self, model, prefix, data, import_fields,
                                     options):
        # Get fields of type date/datetime
        all_fields = self.env[model].fields_get()
        for name, field in all_fields.items():
            name = prefix + name
            if field['type'] in ('date', 'datetime') and name in import_fields:
                # Parse date
                index = import_fields.index(name)
                dt = datetime.datetime
                server_format = DEFAULT_SERVER_DATE_FORMAT if field[
                    'type'] == 'date' else DEFAULT_SERVER_DATETIME_FORMAT

                if options.get('%s_format' % field['type'],
                               server_format) != server_format:
                    # datetime.str[fp]time takes *native strings* in both
                    # versions, for both data and pattern
                    user_format = pycompat.to_native(
                        options.get('%s_format' % field['type']))
                    for num, line in enumerate(data):
                        if line[index]:
                            line[index] = line[index].strip()
                        if line[index]:
                            try:
                                line[index] = dt.strftime(
                                    dt.strptime(
                                        pycompat.to_native(line[index]),
                                        user_format), server_format)
                            except ValueError as e:
                                raise ValueError(
                                    _("Column %s contains incorrect values. Error in line %d: %s"
                                      ) % (name, num + 1, e))
                            except Exception as e:
                                raise ValueError(
                                    _("Error Parsing Date [%s:L%d]: %s") %
                                    (name, num + 1, e))
            # Check if the field is in import_field and is a relational (followed by /)
            # Also verify that the field name exactly match the import_field at the correct level.
            elif any(name + '/' in import_field
                     and name == import_field.split('/')[prefix.count('/')]
                     for import_field in import_fields):
                # Recursive call with the relational as new model and add the field name to the prefix
                self._parse_import_data_recursive(field['relation'],
                                                  name + '/', data,
                                                  import_fields, options)
            elif field['type'] in ('float',
                                   'monetary') and name in import_fields:
                # Parse float, sometimes float values from file have currency symbol or () to denote a negative value
                # We should be able to manage both case
                index = import_fields.index(name)
                self._parse_float_from_data(data, index, name, options)
        return data

    @api.multi
    def do(self, fields, options, dryrun=False):
        """ Actual execution of the import

        :param fields: import mapping: maps each column to a field,
                       ``False`` for the columns to ignore
        :type fields: list(str|bool)
        :param dict options:
        :param bool dryrun: performs all import operations (and
                            validations) but rollbacks writes, allows
                            getting as much errors as possible without
                            the risk of clobbering the database.
        :returns: A list of errors. If the list is empty the import
                  executed fully and correctly. If the list is
                  non-empty it contains dicts with 3 keys ``type`` the
                  type of error (``error|warning``); ``message`` the
                  error message associated with the error (a string)
                  and ``record`` the data which failed to import (or
                  ``false`` if that data isn't available or provided)
        :rtype: list({type, message, record})
        """
        self.ensure_one()
        self._cr.execute('SAVEPOINT import')

        try:
            data, import_fields = self._convert_import_data(fields, options)
            # Parse date and float field
            data = self._parse_import_data(data, import_fields, options)
        except ValueError as error:
            return [{
                'type': 'error',
                'message': pycompat.text_type(error),
                'record': False,
            }]

        _logger.info('importing %d rows...', len(data))

        model = self.env[self.res_model].with_context(import_file=True)
        defer_parent_store = self.env.context.get(
            'defer_parent_store_computation', True)
        if defer_parent_store and model._parent_store:
            model = model.with_context(defer_parent_store_computation=True)

        import_result = model.load(import_fields, data)
        _logger.info('done')

        # If transaction aborted, RELEASE SAVEPOINT is going to raise
        # an InternalError (ROLLBACK should work, maybe). Ignore that.
        # TODO: to handle multiple errors, create savepoint around
        #       write and release it in case of write error (after
        #       adding error to errors array) => can keep on trying to
        #       import stuff, and rollback at the end if there is any
        #       error in the results.
        try:
            if dryrun:
                self._cr.execute('ROLLBACK TO SAVEPOINT import')
            else:
                self._cr.execute('RELEASE SAVEPOINT import')
        except psycopg2.InternalError:
            pass

        return import_result['messages']
예제 #26
0
class ImLivechatChannel(models.Model):
    """ Livechat Channel
        Define a communication channel, which can be accessed with 'script_external' (script tag to put on
        external website), 'script_internal' (code to be integrated with izi website) or via 'web_page' link.
        It provides rating tools, and access rules for anonymous people.
    """

    _name = 'im_livechat.channel'
    _description = 'Livechat Channel'

    def _default_image(self):
        image_path = modules.get_module_resource('im_livechat',
                                                 'static/src/img',
                                                 'default.png')
        return tools.image_resize_image_big(
            base64.b64encode(open(image_path, 'rb').read()))

    def _default_user_ids(self):
        return [(6, 0, [self._uid])]

    # attribute fields
    name = fields.Char('Name', required=True, help="The name of the channel")
    button_text = fields.Char(
        'Text of the Button',
        default='Have a Question? Chat with us.',
        help="Default text displayed on the Livechat Support Button")
    default_message = fields.Char(
        'Welcome Message',
        default='How may I help you?',
        help=
        "This is an automated 'welcome' message that your visitor will see when they initiate a new conversation."
    )
    input_placeholder = fields.Char('Chat Input Placeholder')

    # computed fields
    web_page = fields.Char(
        'Web Page',
        compute='_compute_web_page_link',
        store=False,
        readonly=True,
        help=
        "URL to a static page where you client can discuss with the operator of the channel."
    )
    are_you_inside = fields.Boolean(string='Are you inside the matrix?',
                                    compute='_are_you_inside',
                                    store=False,
                                    readonly=True)
    script_external = fields.Text('Script (external)',
                                  compute='_compute_script_external',
                                  store=False,
                                  readonly=True)
    nbr_channel = fields.Integer('Number of conversation',
                                 compute='_compute_nbr_channel',
                                 store=False,
                                 readonly=True)
    rating_percentage_satisfaction = fields.Integer(
        '% Happy',
        compute='_compute_percentage_satisfaction',
        store=False,
        default=-1,
        help="Percentage of happy ratings over the past 7 days")

    # images fields
    image = fields.Binary(
        'Image',
        default=_default_image,
        attachment=True,
        help=
        "This field holds the image used as photo for the group, limited to 1024x1024px."
    )
    image_medium = fields.Binary('Medium', attachment=True,
        help="Medium-sized photo of the group. It is automatically "\
             "resized as a 128x128px image, with aspect ratio preserved. "\
             "Use this field in form views or some kanban views.")
    image_small = fields.Binary('Thumbnail', attachment=True,
        help="Small-sized photo of the group. It is automatically "\
             "resized as a 64x64px image, with aspect ratio preserved. "\
             "Use this field anywhere a small image is required.")

    # relationnal fields
    user_ids = fields.Many2many('res.users',
                                'im_livechat_channel_im_user',
                                'channel_id',
                                'user_id',
                                string='Operators',
                                default=_default_user_ids)
    channel_ids = fields.One2many('mail.channel', 'livechat_channel_id',
                                  'Sessions')
    rule_ids = fields.One2many('im_livechat.channel.rule', 'channel_id',
                               'Rules')

    @api.one
    def _are_you_inside(self):
        self.are_you_inside = bool(
            self.env.uid in [u.id for u in self.user_ids])

    @api.multi
    def _compute_script_external(self):
        view = self.env['ir.model.data'].get_object('im_livechat',
                                                    'external_loader')
        values = {
            "url":
            self.env['ir.config_parameter'].sudo().get_param('web.base.url'),
            "dbname": self._cr.dbname,
        }
        for record in self:
            values["channel_id"] = record.id
            record.script_external = view.render(values)

    @api.multi
    def _compute_web_page_link(self):
        base_url = self.env['ir.config_parameter'].sudo().get_param(
            'web.base.url')
        for record in self:
            record.web_page = "%s/im_livechat/support/%i" % (base_url,
                                                             record.id)

    @api.multi
    @api.depends('channel_ids')
    def _compute_nbr_channel(self):
        for record in self:
            record.nbr_channel = len(record.channel_ids)

    @api.multi
    @api.depends('channel_ids.rating_ids')
    def _compute_percentage_satisfaction(self):
        for record in self:
            dt = fields.Datetime.to_string(datetime.utcnow() -
                                           timedelta(days=7))
            repartition = record.channel_ids.rating_get_grades([('create_date',
                                                                 '>=', dt)])
            total = sum(repartition.values())
            if total > 0:
                happy = repartition['great']
                record.rating_percentage_satisfaction = (
                    (happy * 100) / total) if happy > 0 else 0
            else:
                record.rating_percentage_satisfaction = -1

    @api.model
    def create(self, vals):
        tools.image_resize_images(vals)
        return super(ImLivechatChannel, self).create(vals)

    @api.multi
    def write(self, vals):
        tools.image_resize_images(vals)
        return super(ImLivechatChannel, self).write(vals)

    # --------------------------
    # Action Methods
    # --------------------------
    @api.multi
    def action_join(self):
        self.ensure_one()
        return self.write({'user_ids': [(4, self._uid)]})

    @api.multi
    def action_quit(self):
        self.ensure_one()
        return self.write({'user_ids': [(3, self._uid)]})

    @api.multi
    def action_view_rating(self):
        """ Action to display the rating relative to the channel, so all rating of the
            sessions of the current channel
            :returns : the ir.action 'action_view_rating' with the correct domain
        """
        self.ensure_one()
        action = self.env['ir.actions.act_window'].for_xml_id(
            'im_livechat', 'rating_rating_action_view_livechat_rating')
        action['domain'] = [('parent_res_id', '=', self.id),
                            ('parent_res_model', '=', 'im_livechat.channel')]
        return action

    # --------------------------
    # Channel Methods
    # --------------------------
    @api.multi
    def get_available_users(self):
        """ get available user of a given channel
            :retuns : return the res.users having their im_status online
        """
        self.ensure_one()
        return self.sudo().user_ids.filtered(
            lambda user: user.im_status == 'online')

    @api.model
    def get_mail_channel(self, livechat_channel_id, anonymous_name):
        """ Return a mail.channel given a livechat channel. It creates one with a connected operator, or return false otherwise
            :param livechat_channel_id : the identifier if the im_livechat.channel
            :param anonymous_name : the name of the anonymous person of the channel
            :type livechat_channel_id : int
            :type anonymous_name : str
            :return : channel header
            :rtype : dict
        """
        # get the avalable user of the channel
        users = self.sudo().browse(livechat_channel_id).get_available_users()
        if len(users) == 0:
            return False
        # choose the res.users operator and get its partner id
        user = random.choice(users)
        operator_partner_id = user.partner_id.id
        # partner to add to the mail.channel
        channel_partner_to_add = [(4, operator_partner_id)]
        if self.env.user and self.env.user.active:  # valid session user (not public)
            channel_partner_to_add.append((4, self.env.user.partner_id.id))
        # create the session, and add the link with the given channel
        mail_channel = self.env["mail.channel"].with_context(
            mail_create_nosubscribe=False).sudo().create({
                'channel_partner_ids':
                channel_partner_to_add,
                'livechat_channel_id':
                livechat_channel_id,
                'anonymous_name':
                anonymous_name,
                'channel_type':
                'livechat',
                'name':
                ', '.join([anonymous_name, user.name]),
                'public':
                'private',
                'email_send':
                False,
            })
        return mail_channel.sudo().with_context(
            im_livechat_operator_partner_id=operator_partner_id).channel_info(
            )[0]

    @api.model
    def get_channel_infos(self, channel_id):
        channel = self.browse(channel_id)
        return {
            'button_text': channel.button_text,
            'input_placeholder': channel.input_placeholder,
            'default_message': channel.default_message,
            "channel_name": channel.name,
            "channel_id": channel.id,
        }

    @api.model
    def get_livechat_info(self, channel_id, username='******'):
        info = {}
        info['available'] = len(
            self.browse(channel_id).get_available_users()) > 0
        info['server_url'] = self.env['ir.config_parameter'].sudo().get_param(
            'web.base.url')
        if info['available']:
            info['options'] = self.sudo().get_channel_infos(channel_id)
            info['options']["default_username"] = username
        return info
예제 #27
0
class BlogPost(models.Model):
    _name = "blog.post"
    _description = "Blog Post"
    _inherit = [
        'mail.thread', 'website.seo.metadata', 'website.published.mixin'
    ]
    _order = 'id DESC'
    _mail_post_access = 'read'

    @api.multi
    def _compute_website_url(self):
        super(BlogPost, self)._compute_website_url()
        for blog_post in self:
            blog_post.website_url = "/blog/%s/post/%s" % (slug(
                blog_post.blog_id), slug(blog_post))

    @api.multi
    @api.depends('post_date', 'visits')
    def _compute_ranking(self):
        res = {}
        for blog_post in self:
            if blog_post.id:  # avoid to rank one post not yet saved and so withtout post_date in case of an onchange.
                age = datetime.now() - fields.Datetime.from_string(
                    blog_post.post_date)
                res[blog_post.id] = blog_post.visits * (
                    0.5 + random.random()) / max(3, age.days)
        return res

    def _default_content(self):
        return '''
            <section class="s_text_block">
                <div class="container">
                    <div class="row">
                        <div class="col-md-12 mb16 mt16">
                            <p class="o_default_snippet_text">''' + _(
            "Start writing here...") + '''</p>
                        </div>
                    </div>
                </div>
            </section>
        '''

    name = fields.Char('Title', required=True, translate=True, default='')
    subtitle = fields.Char('Sub Title', translate=True)
    author_id = fields.Many2one('res.partner',
                                'Author',
                                default=lambda self: self.env.user.partner_id)
    active = fields.Boolean('Active', default=True)
    cover_properties = fields.Text(
        'Cover Properties',
        default=
        '{"background-image": "none", "background-color": "oe_black", "opacity": "0.2", "resize_class": ""}'
    )
    blog_id = fields.Many2one('blog.blog',
                              'Blog',
                              required=True,
                              ondelete='cascade')
    tag_ids = fields.Many2many('blog.tag', string='Tags')
    content = fields.Html('Content',
                          default=_default_content,
                          translate=html_translate,
                          sanitize=False)
    teaser = fields.Text('Teaser',
                         compute='_compute_teaser',
                         inverse='_set_teaser')
    teaser_manual = fields.Text(string='Teaser Content')

    website_message_ids = fields.One2many(
        domain=lambda self: [('model', '=', self._name),
                             ('message_type', '=', 'comment'),
                             ('path', '=', False)])

    # creation / update stuff
    create_date = fields.Datetime('Created on', index=True, readonly=True)
    published_date = fields.Datetime('Published Date')
    post_date = fields.Datetime(
        'Publishing date',
        compute='_compute_post_date',
        inverse='_set_post_date',
        store=True,
        help=
        "The blog post will be visible for your visitors as of this date on the website if it is set as published."
    )
    create_uid = fields.Many2one('res.users',
                                 'Created by',
                                 index=True,
                                 readonly=True)
    write_date = fields.Datetime('Last Modified on', index=True, readonly=True)
    write_uid = fields.Many2one('res.users',
                                'Last Contributor',
                                index=True,
                                readonly=True)
    author_avatar = fields.Binary(related='author_id.image_small',
                                  string="Avatar")
    visits = fields.Integer('No of Views', copy=False)
    ranking = fields.Float(compute='_compute_ranking', string='Ranking')

    @api.multi
    @api.depends('content', 'teaser_manual')
    def _compute_teaser(self):
        for blog_post in self:
            if blog_post.teaser_manual:
                blog_post.teaser = blog_post.teaser_manual
            else:
                content = html2plaintext(blog_post.content).replace('\n', ' ')
                blog_post.teaser = content[:150] + '...'

    @api.multi
    def _set_teaser(self):
        for blog_post in self:
            blog_post.teaser_manual = blog_post.teaser

    @api.multi
    @api.depends('create_date', 'published_date')
    def _compute_post_date(self):
        for blog_post in self:
            if blog_post.published_date:
                blog_post.post_date = blog_post.published_date
            else:
                blog_post.post_date = blog_post.create_date

    @api.multi
    def _set_post_date(self):
        for blog_post in self:
            blog_post.published_date = blog_post.post_date
            if not blog_post.published_date:
                blog_post._write(dict(post_date=blog_post.create_date)
                                 )  # dont trigger inverse function

    def _check_for_publication(self, vals):
        if vals.get('website_published'):
            for post in self:
                post.blog_id.message_post_with_view(
                    'website_blog.blog_post_template_new_post',
                    subject=post.name,
                    values={'post': post},
                    subtype_id=self.env['ir.model.data'].xmlid_to_res_id(
                        'website_blog.mt_blog_blog_published'))
            return True
        return False

    @api.model
    def create(self, vals):
        post_id = super(BlogPost,
                        self.with_context(mail_create_nolog=True)).create(vals)
        post_id._check_for_publication(vals)
        return post_id

    @api.multi
    def write(self, vals):
        result = True
        for post in self:
            copy_vals = dict(vals)
            if 'website_published' in vals and 'published_date' not in vals and (
                    post.published_date or '') <= fields.Datetime.now():
                copy_vals['published_date'] = vals[
                    'website_published'] and fields.Datetime.now() or False
            result &= super(BlogPost, self).write(copy_vals)
        self._check_for_publication(vals)
        return result

    @api.multi
    def get_access_action(self, access_uid=None):
        """ Instead of the classic form view, redirect to the post on website
        directly if user is an employee or if the post is published. """
        self.ensure_one()
        user = access_uid and self.env['res.users'].sudo().browse(
            access_uid) or self.env.user
        if user.share and not self.sudo().website_published:
            return super(BlogPost, self).get_access_action(access_uid)
        return {
            'type': 'ir.actions.act_url',
            'url': self.url,
            'target': 'self',
            'target_type': 'public',
            'res_id': self.id,
        }

    @api.multi
    def _notification_recipients(self, message, groups):
        groups = super(BlogPost,
                       self)._notification_recipients(message, groups)

        for group_name, group_method, group_data in groups:
            group_data['has_button_access'] = True

        return groups

    @api.multi
    def message_get_message_notify_values(self, message, message_values):
        """ Override to avoid keeping all notified recipients of a comment.
        We avoid tracking needaction on post comments. Only emails should be
        sufficient. """
        if message.message_type == 'comment':
            return {
                'needaction_partner_ids': [],
            }
        return {}
예제 #28
0
class Company(models.Model):
    _name = "res.company"
    _description = 'Companies'
    _order = 'sequence, name'

    @api.multi
    def copy(self, default=None):
        raise UserError(_('Duplicating a company is not allowed. Please create a new company instead.'))

    def _get_logo(self):
        return base64.b64encode(open(os.path.join(tools.config['root_path'], 'addons', 'base', 'res', 'res_company_logo.png'), 'rb') .read())

    @api.model
    def _get_euro(self):
        return self.env['res.currency.rate'].search([('rate', '=', 1)], limit=1).currency_id

    @api.model
    def _get_user_currency(self):
        currency_id = self.env['res.users'].browse(self._uid).company_id.currency_id
        return currency_id or self._get_euro()

    name = fields.Char(related='partner_id.name', string='Company Name', required=True, store=True)
    sequence = fields.Integer(help='Used to order Companies in the company switcher', default=10)
    parent_id = fields.Many2one('res.company', string='Parent Company', index=True)
    child_ids = fields.One2many('res.company', 'parent_id', string='Child Companies')
    partner_id = fields.Many2one('res.partner', string='Partner', required=True)
    report_header = fields.Text(string='Company Tagline', help="Appears by default on the top right corner of your printed documents (report header).")
    report_footer = fields.Text(string='Report Footer', translate=True, help="Footer text displayed at the bottom of all reports.")
    logo = fields.Binary(related='partner_id.image', default=_get_logo, string="Company Logo")
    # logo_web: do not store in attachments, since the image is retrieved in SQL for
    # performance reasons (see addons/web/controllers/main.py, Binary.company_logo)
    logo_web = fields.Binary(compute='_compute_logo_web', store=True)
    currency_id = fields.Many2one('res.currency', string='Currency', required=True, default=lambda self: self._get_user_currency())
    user_ids = fields.Many2many('res.users', 'res_company_users_rel', 'cid', 'user_id', string='Accepted Users')
    account_no = fields.Char(string='Account No.')
    street = fields.Char(compute='_compute_address', inverse='_inverse_street')
    street2 = fields.Char(compute='_compute_address', inverse='_inverse_street2')
    zip = fields.Char(compute='_compute_address', inverse='_inverse_zip')
    city = fields.Char(compute='_compute_address', inverse='_inverse_city')
    state_id = fields.Many2one('res.country.state', compute='_compute_address', inverse='_inverse_state', string="Fed. State")
    bank_ids = fields.One2many('res.partner.bank', 'company_id', string='Bank Accounts', help='Bank accounts related to this company')
    country_id = fields.Many2one('res.country', compute='_compute_address', inverse='_inverse_country', string="Country")
    email = fields.Char(related='partner_id.email', store=True)
    phone = fields.Char(related='partner_id.phone', store=True)
    website = fields.Char(related='partner_id.website')
    vat = fields.Char(related='partner_id.vat', string="TIN")
    company_registry = fields.Char()
    paperformat_id = fields.Many2one('report.paperformat', 'Paper format', default=lambda self: self.env.ref('base.paperformat_euro', raise_if_not_found=False))
    external_report_layout = fields.Selection([
        ('background', 'Background'),
        ('boxed', 'Boxed'),
        ('clean', 'Clean'),
        ('standard', 'Standard'),
    ], string='Document Template')

    _sql_constraints = [
        ('name_uniq', 'unique (name)', 'The company name must be unique !')
    ]

    @api.model_cr
    def init(self):
        for company in self.search([('paperformat_id', '=', False)]):
            paperformat_euro = self.env.ref('base.paperformat_euro', False)
            if paperformat_euro:
                company.write({'paperformat_id': paperformat_euro.id})
        sup = super(Company, self)
        if hasattr(sup, 'init'):
            sup.init()

    def _get_company_address_fields(self, partner):
        return {
            'street'     : partner.street,
            'street2'    : partner.street2,
            'city'       : partner.city,
            'zip'        : partner.zip,
            'state_id'   : partner.state_id,
            'country_id' : partner.country_id,
        }

    # TODO @api.depends(): currently now way to formulate the dependency on the
    # partner's contact address
    def _compute_address(self):
        for company in self.filtered(lambda company: company.partner_id):
            address_data = company.partner_id.sudo().address_get(adr_pref=['contact'])
            if address_data['contact']:
                partner = company.partner_id.browse(address_data['contact']).sudo()
                company.update(company._get_company_address_fields(partner))

    def _inverse_street(self):
        for company in self:
            company.partner_id.street = company.street

    def _inverse_street2(self):
        for company in self:
            company.partner_id.street2 = company.street2

    def _inverse_zip(self):
        for company in self:
            company.partner_id.zip = company.zip

    def _inverse_city(self):
        for company in self:
            company.partner_id.city = company.city

    def _inverse_state(self):
        for company in self:
            company.partner_id.state_id = company.state_id

    def _inverse_country(self):
        for company in self:
            company.partner_id.country_id = company.country_id

    @api.depends('partner_id', 'partner_id.image')
    def _compute_logo_web(self):
        for company in self:
            company.logo_web = tools.image_resize_image(company.partner_id.image, (180, None))

    @api.onchange('state_id')
    def _onchange_state(self):
        self.country_id = self.state_id.country_id

    @api.multi
    def on_change_country(self, country_id):
        # This function is called from account/models/chart_template.py, hence decorated with `multi`.
        self.ensure_one()
        currency_id = self._get_user_currency()
        if country_id:
            currency_id = self.env['res.country'].browse(country_id).currency_id
        return {'value': {'currency_id': currency_id.id}}

    @api.onchange('country_id')
    def _onchange_country_id_wrapper(self):
        res = {'domain': {'state_id': []}}
        if self.country_id:
            res['domain']['state_id'] = [('country_id', '=', self.country_id.id)]
        values = self.on_change_country(self.country_id.id)['value']
        for fname, value in values.items():
            setattr(self, fname, value)
        return res

    @api.model
    def name_search(self, name='', args=None, operator='ilike', limit=100):
        context = dict(self.env.context)
        newself = self
        if context.pop('user_preference', None):
            # We browse as superuser. Otherwise, the user would be able to
            # select only the currently visible companies (according to rules,
            # which are probably to allow to see the child companies) even if
            # she belongs to some other companies.
            companies = self.env.user.company_id + self.env.user.company_ids
            args = (args or []) + [('id', 'in', companies.ids)]
            newself = newself.sudo()
        return super(Company, newself.with_context(context)).name_search(name=name, args=args, operator=operator, limit=limit)

    @api.model
    @api.returns('self', lambda value: value.id)
    def _company_default_get(self, object=False, field=False):
        """ Returns the default company (usually the user's company).
        The 'object' and 'field' arguments are ignored but left here for
        backward compatibility and potential override.
        """
        return self.env['res.users']._get_company()

    @api.model
    @tools.ormcache('self.env.uid', 'company')
    def _get_company_children(self, company=None):
        if not company:
            return []
        return self.search([('parent_id', 'child_of', [company])]).ids

    @api.multi
    def _get_partner_hierarchy(self):
        self.ensure_one()
        parent = self.parent_id
        if parent:
            return parent._get_partner_hierarchy()
        else:
            return self._get_partner_descendance([])

    @api.multi
    def _get_partner_descendance(self, descendance):
        self.ensure_one()
        descendance.append(self.partner_id.id)
        for child_id in self._get_company_children(self.id):
            if child_id != self.id:
                descendance = self.browse(child_id)._get_partner_descendance(descendance)
        return descendance

    # deprecated, use clear_caches() instead
    def cache_restart(self):
        self.clear_caches()

    @api.model
    def create(self, vals):
        if not vals.get('name') or vals.get('partner_id'):
            self.clear_caches()
            return super(Company, self).create(vals)
        partner = self.env['res.partner'].create({
            'name': vals['name'],
            'is_company': True,
            'image': vals.get('logo'),
            'customer': False,
            'email': vals.get('email'),
            'phone': vals.get('phone'),
            'website': vals.get('website'),
            'vat': vals.get('vat'),
        })
        vals['partner_id'] = partner.id
        self.clear_caches()
        company = super(Company, self).create(vals)
        partner.write({'company_id': company.id})
        return company

    @api.multi
    def write(self, values):
        self.clear_caches()
        return super(Company, self).write(values)

    @api.constrains('parent_id')
    def _check_parent_id(self):
        if not self._check_recursion():
            raise ValidationError(_('Error ! You cannot create recursive companies.'))

    @api.multi
    def open_company_edit_report(self):
        self.ensure_one()
        return self.env['res.config.settings'].open_company()

    @api.multi
    def write_company_and_print_report(self, values):
        res = self.write(values)

        report_name = values.get('default_report_name')
        active_ids = values.get('active_ids')
        active_model = values.get('active_model')
        if report_name and active_ids and active_model:
            docids = self.env[active_model].browse(active_ids)
            return (self.env['ir.actions.report'].search([('report_name', '=', report_name)], limit=1)
                        .with_context(values)
                        .report_action(docids))
        else:
            return res
예제 #29
0
파일: badge.py 프로젝트: vituocgia/izi
class GamificationBadge(models.Model):
    """Badge object that users can send and receive"""

    CAN_GRANT = 1
    NOBODY_CAN_GRANT = 2
    USER_NOT_VIP = 3
    BADGE_REQUIRED = 4
    TOO_MANY = 5

    _name = 'gamification.badge'
    _description = 'Gamification badge'
    _inherit = ['mail.thread']

    name = fields.Char('Badge', required=True, translate=True)
    active = fields.Boolean('Active', default=True)
    description = fields.Text('Description', translate=True)
    image = fields.Binary(
        "Image",
        attachment=True,
        help="This field holds the image used for the badge, limited to 256x256"
    )

    rule_auth = fields.Selection([
        ('everyone', 'Everyone'),
        ('users', 'A selected list of users'),
        ('having', 'People having some badges'),
        ('nobody', 'No one, assigned through challenges'),
    ],
                                 default='everyone',
                                 string="Allowance to Grant",
                                 help="Who can grant this badge",
                                 required=True)
    rule_auth_user_ids = fields.Many2many(
        'res.users',
        'rel_badge_auth_users',
        string='Authorized Users',
        help="Only these people can give this badge")
    rule_auth_badge_ids = fields.Many2many(
        'gamification.badge',
        'gamification_badge_rule_badge_rel',
        'badge1_id',
        'badge2_id',
        string='Required Badges',
        help="Only the people having these badges can give this badge")

    rule_max = fields.Boolean(
        'Monthly Limited Sending',
        help="Check to set a monthly limit per person of sending this badge")
    rule_max_number = fields.Integer(
        'Limitation Number',
        help=
        "The maximum number of time this badge can be sent per month per person."
    )
    challenge_ids = fields.One2many('gamification.challenge',
                                    'reward_id',
                                    string="Reward of Challenges")

    goal_definition_ids = fields.Many2many(
        'gamification.goal.definition',
        'badge_unlocked_definition_rel',
        string='Rewarded by',
        help=
        "The users that have succeeded theses goals will receive automatically the badge."
    )

    owner_ids = fields.One2many(
        'gamification.badge.user',
        'badge_id',
        string='Owners',
        help='The list of instances of this badge granted to users')

    stat_count = fields.Integer(
        "Total",
        compute='_get_owners_info',
        help="The number of time this badge has been received.")
    stat_count_distinct = fields.Integer(
        "Number of users",
        compute='_get_owners_info',
        help="The number of time this badge has been received by unique users."
    )
    unique_owner_ids = fields.Many2many(
        'res.users',
        string="Unique Owners",
        compute='_get_owners_info',
        help="The list of unique users having received this badge.")

    stat_this_month = fields.Integer(
        "Monthly total",
        compute='_get_badge_user_stats',
        help="The number of time this badge has been received this month.")
    stat_my = fields.Integer(
        "My Total",
        compute='_get_badge_user_stats',
        help="The number of time the current user has received this badge.")
    stat_my_this_month = fields.Integer(
        "My Monthly Total",
        compute='_get_badge_user_stats',
        help=
        "The number of time the current user has received this badge this month."
    )
    stat_my_monthly_sending = fields.Integer(
        'My Monthly Sending Total',
        compute='_get_badge_user_stats',
        help=
        "The number of time the current user has sent this badge this month.")

    remaining_sending = fields.Integer("Remaining Sending Allowed",
                                       compute='_remaining_sending_calc',
                                       help="If a maximum is set")

    @api.depends('owner_ids')
    def _get_owners_info(self):
        """Return:
            the list of unique res.users ids having received this badge
            the total number of time this badge was granted
            the total number of users this badge was granted to
        """
        self.env.cr.execute(
            """
            SELECT badge_id, count(user_id) as stat_count,
                count(distinct(user_id)) as stat_count_distinct,
                array_agg(distinct(user_id)) as unique_owner_ids
            FROM gamification_badge_user
            WHERE badge_id in %s
            GROUP BY badge_id
            """, [tuple(self.ids)])

        defaults = {
            'stat_count': 0,
            'stat_count_distinct': 0,
            'unique_owner_ids': [],
        }
        mapping = {
            badge_id: {
                'stat_count': count,
                'stat_count_distinct': distinct_count,
                'unique_owner_ids': owner_ids,
            }
            for (badge_id, count, distinct_count,
                 owner_ids) in self.env.cr._obj
        }
        for badge in self:
            badge.update(mapping.get(badge.id, defaults))

    @api.depends('owner_ids.badge_id', 'owner_ids.create_date',
                 'owner_ids.user_id')
    def _get_badge_user_stats(self):
        """Return stats related to badge users"""
        first_month_day = fields.Date.to_string(date.today().replace(day=1))

        for badge in self:
            owners = badge.owner_ids
            badge.stats_my = sum(o.user_id == self.env.user for o in owners)
            badge.stats_this_month = sum(o.create_date >= first_month_day
                                         for o in owners)
            badge.stats_my_this_month = sum(
                o.user_id == self.env.user and o.create_date >= first_month_day
                for o in owners)
            badge.stats_my_monthly_sending = sum(
                o.create_uid == self.env.user
                and o.create_date >= first_month_day for o in owners)

    @api.depends(
        'rule_auth',
        'rule_auth_user_ids',
        'rule_auth_badge_ids',
        'rule_max',
        'rule_max_number',
        'stat_my_monthly_sending',
    )
    def _remaining_sending_calc(self):
        """Computes the number of badges remaining the user can send

        0 if not allowed or no remaining
        integer if limited sending
        -1 if infinite (should not be displayed)
        """
        for badge in self:
            if badge._can_grant_badge() != self.CAN_GRANT:
                # if the user cannot grant this badge at all, result is 0
                badge.remaining_sending = 0
            elif not badge.rule_max:
                # if there is no limitation, -1 is returned which means 'infinite'
                badge.remaining_sending = -1
            else:
                badge.remaining_sending = badge.rule_max_number - badge.stat_my_monthly_sending

    def check_granting(self):
        """Check the user 'uid' can grant the badge 'badge_id' and raise the appropriate exception
        if not

        Do not check for SUPERUSER_ID
        """
        status_code = self._can_grant_badge()
        if status_code == self.CAN_GRANT:
            return True
        elif status_code == self.NOBODY_CAN_GRANT:
            raise exceptions.UserError(
                _('This badge can not be sent by users.'))
        elif status_code == self.USER_NOT_VIP:
            raise exceptions.UserError(
                _('You are not in the user allowed list.'))
        elif status_code == self.BADGE_REQUIRED:
            raise exceptions.UserError(
                _('You do not have the required badges.'))
        elif status_code == self.TOO_MANY:
            raise exceptions.UserError(
                _('You have already sent this badge too many time this month.')
            )
        else:
            _logger.error("Unknown badge status code: %s" % status_code)
        return False

    def _can_grant_badge(self):
        """Check if a user can grant a badge to another user

        :param uid: the id of the res.users trying to send the badge
        :param badge_id: the granted badge id
        :return: integer representing the permission.
        """
        if self.env.user._is_admin():
            return self.CAN_GRANT

        if self.rule_auth == 'nobody':
            return self.NOBODY_CAN_GRANT
        elif self.rule_auth == 'users' and self.env.user not in self.rule_auth_user_ids:
            return self.USER_NOT_VIP
        elif self.rule_auth == 'having':
            all_user_badges = self.env['gamification.badge.user'].search([
                ('user_id', '=', self.env.uid)
            ])
            if self.rule_auth_badge_ids - all_user_badges:
                return self.BADGE_REQUIRED

        if self.rule_max and self.stat_my_monthly_sending >= self.rule_max_number:
            return self.TOO_MANY

        # badge.rule_auth == 'everyone' -> no check
        return self.CAN_GRANT
예제 #30
0
class Employee(models.Model):
    _name = "hr.employee"
    _description = "Employee"
    _order = 'name'
    _inherit = ['mail.thread', 'resource.mixin']

    _mail_post_access = 'read'

    @api.model
    def _default_image(self):
        image_path = get_module_resource('hr', 'static/src/img',
                                         'default_image.png')
        return tools.image_resize_image_big(
            base64.b64encode(open(image_path, 'rb').read()))

    # resource and user
    # required on the resource, make sure required="True" set in the view
    name = fields.Char(related='resource_id.name',
                       store=True,
                       oldname='name_related')
    user_id = fields.Many2one('res.users',
                              'User',
                              related='resource_id.user_id')
    active = fields.Boolean('Active',
                            related='resource_id.active',
                            default=True,
                            store=True)
    # private partner
    address_home_id = fields.Many2one(
        'res.partner',
        'Private Address',
        help=
        'Enter here the private address of the employee, not the one linked to your company.',
        groups="hr.group_hr_user")
    is_address_home_a_company = fields.Boolean(
        'The employee adress has a company linked',
        compute='_compute_is_address_home_a_company',
    )
    country_id = fields.Many2one('res.country',
                                 'Nationality (Country)',
                                 groups="hr.group_hr_user")
    gender = fields.Selection([('male', 'Male'), ('female', 'Female'),
                               ('other', 'Other')],
                              groups="hr.group_hr_user",
                              default="male")
    marital = fields.Selection([('single', 'Single'), ('married', 'Married'),
                                ('cohabitant', 'Legal Cohabitant'),
                                ('widower', 'Widower'),
                                ('divorced', 'Divorced')],
                               string='Marital Status',
                               groups="hr.group_hr_user",
                               default='single')
    birthday = fields.Date('Date of Birth', groups="hr.group_hr_user")
    ssnid = fields.Char('SSN No',
                        help='Social Security Number',
                        groups="hr.group_hr_user")
    sinid = fields.Char('SIN No',
                        help='Social Insurance Number',
                        groups="hr.group_hr_user")
    identification_id = fields.Char(string='Identification No',
                                    groups="hr.group_hr_user")
    passport_id = fields.Char('Passport No', groups="hr.group_hr_user")
    bank_account_id = fields.Many2one(
        'res.partner.bank',
        'Bank Account Number',
        domain="[('partner_id', '=', address_home_id)]",
        groups="hr.group_hr_user",
        help='Employee bank salary account')
    permit_no = fields.Char('Work Permit No', groups="hr.group_hr_user")
    visa_no = fields.Char('Visa No', groups="hr.group_hr_user")
    visa_expire = fields.Date('Visa Expire Date', groups="hr.group_hr_user")

    # image: all image fields are base64 encoded and PIL-supported
    image = fields.Binary(
        "Photo",
        default=_default_image,
        attachment=True,
        help=
        "This field holds the image used as photo for the employee, limited to 1024x1024px."
    )
    image_medium = fields.Binary(
        "Medium-sized photo",
        attachment=True,
        help="Medium-sized photo of the employee. It is automatically "
        "resized as a 128x128px image, with aspect ratio preserved. "
        "Use this field in form views or some kanban views.")
    image_small = fields.Binary(
        "Small-sized photo",
        attachment=True,
        help="Small-sized photo of the employee. It is automatically "
        "resized as a 64x64px image, with aspect ratio preserved. "
        "Use this field anywhere a small image is required.")
    # work
    address_id = fields.Many2one('res.partner', 'Work Address')
    work_phone = fields.Char('Work Phone')
    mobile_phone = fields.Char('Work Mobile')
    work_email = fields.Char('Work Email')
    work_location = fields.Char('Work Location')
    # employee in company
    job_id = fields.Many2one('hr.job', 'Job Position')
    department_id = fields.Many2one('hr.department', 'Department')
    parent_id = fields.Many2one('hr.employee', 'Manager')
    child_ids = fields.One2many('hr.employee',
                                'parent_id',
                                string='Subordinates')
    coach_id = fields.Many2one('hr.employee', 'Coach')
    category_ids = fields.Many2many('hr.employee.category',
                                    'employee_category_rel',
                                    'emp_id',
                                    'category_id',
                                    string='Tags')
    # misc
    notes = fields.Text('Notes')
    color = fields.Integer('Color Index', default=0)

    @api.constrains('parent_id')
    def _check_parent_id(self):
        for employee in self:
            if not employee._check_recursion():
                raise ValidationError(
                    _('Error! You cannot create recursive hierarchy of Employee(s).'
                      ))

    @api.onchange('address_id')
    def _onchange_address(self):
        self.work_phone = self.address_id.phone
        self.mobile_phone = self.address_id.mobile

    @api.onchange('company_id')
    def _onchange_company(self):
        address = self.company_id.partner_id.address_get(['default'])
        self.address_id = address['default'] if address else False

    @api.onchange('department_id')
    def _onchange_department(self):
        self.parent_id = self.department_id.manager_id

    @api.onchange('user_id')
    def _onchange_user(self):
        if self.user_id:
            self.update(self._sync_user(self.user_id))

    def _sync_user(self, user):
        return dict(
            name=user.name,
            image=user.image,
            work_email=user.email,
        )

    @api.model
    def create(self, vals):
        if vals.get('user_id'):
            vals.update(
                self._sync_user(self.env['res.users'].browse(vals['user_id'])))
        tools.image_resize_images(vals)
        return super(Employee, self).create(vals)

    @api.multi
    def write(self, vals):
        if 'address_home_id' in vals:
            account_id = vals.get('bank_account_id') or self.bank_account_id.id
            if account_id:
                self.env['res.partner.bank'].browse(
                    account_id).partner_id = vals['address_home_id']
        tools.image_resize_images(vals)
        return super(Employee, self).write(vals)

    @api.multi
    def unlink(self):
        resources = self.mapped('resource_id')
        super(Employee, self).unlink()
        return resources.unlink()

    @api.multi
    def action_follow(self):
        """ Wrapper because message_subscribe_users take a user_ids=None
            that receive the context without the wrapper.
        """
        return self.message_subscribe_users()

    @api.multi
    def action_unfollow(self):
        """ Wrapper because message_unsubscribe_users take a user_ids=None
            that receive the context without the wrapper.
        """
        return self.message_unsubscribe_users()

    @api.model
    def _message_get_auto_subscribe_fields(self,
                                           updated_fields,
                                           auto_follow_fields=None):
        """ Overwrite of the original method to always follow user_id field,
            even when not track_visibility so that a user will follow it's employee
        """
        if auto_follow_fields is None:
            auto_follow_fields = ['user_id']
        user_field_lst = []
        for name, field in self._fields.items():
            if name in auto_follow_fields and name in updated_fields and field.comodel_name == 'res.users':
                user_field_lst.append(name)
        return user_field_lst

    @api.multi
    def _message_auto_subscribe_notify(self, partner_ids):
        # Do not notify user it has been marked as follower of its employee.
        return

    @api.depends('address_home_id.parent_id')
    def _compute_is_address_home_a_company(self):
        """Checks that choosen address (res.partner) is not linked to a company.
        """
        for employee in self:
            try:
                employee.is_address_home_a_company = employee.address_home_id.parent_id.id is not False
            except AccessError:
                employee.is_address_home_a_company = False