class FleetVehicleModelBrand(models.Model): _name = 'fleet.vehicle.model.brand' _description = 'Brand of the vehicle' _order = 'name asc' name = fields.Char('Make', required=True) image = fields.Binary( "Logo", attachment=True, help= "This field holds the image used as logo for the brand, limited to 1024x1024px." ) image_medium = fields.Binary( "Medium-sized image", attachment=True, help="Medium-sized logo of the brand. It is automatically " "resized as a 128x128px image, with aspect ratio preserved. " "Use this field in form views or some kanban views.") image_small = fields.Binary( "Small-sized image", attachment=True, help="Small-sized logo of the brand. It is automatically " "resized as a 64x64px image, with aspect ratio preserved. " "Use this field anywhere a small image is required.") @api.model def create(self, vals): tools.image_resize_images(vals) return super(FleetVehicleModelBrand, self).create(vals) @api.multi def write(self, vals): tools.image_resize_images(vals) return super(FleetVehicleModelBrand, self).write(vals)
class IrActionsActClient(models.Model): _name = 'ir.actions.client' _inherit = 'ir.actions.actions' _table = 'ir_act_client' _sequence = 'ir_actions_id_seq' _order = 'name' name = fields.Char(string='Action Name', translate=True) type = fields.Char(default='ir.actions.client') tag = fields.Char(string='Client action tag', required=True, help="An arbitrary string, interpreted by the client" " according to its own needs and wishes. There " "is no central tag repository across clients.") target = fields.Selection([('current', 'Current Window'), ('new', 'New Window'), ('fullscreen', 'Full Screen'), ('main', 'Main action of Current Window')], default="current", string='Target Window') res_model = fields.Char( string='Destination Model', help="Optional model, mostly used for needactions.") context = fields.Char( string='Context Value', default="{}", required=True, help= "Context dictionary as Python expression, empty by default (Default: {})" ) params = fields.Binary(compute='_compute_params', inverse='_inverse_params', string='Supplementary arguments', help="Arguments sent to the client along with " "the view tag") params_store = fields.Binary(string='Params storage', readonly=True) @api.depends('params_store') def _compute_params(self): self_bin = self.with_context(bin_size=False, bin_size_params_store=False) for record, record_bin in pycompat.izip(self, self_bin): record.params = record_bin.params_store and safe_eval( record_bin.params_store, {'uid': self._uid}) def _inverse_params(self): for record in self: params = record.params record.params_store = repr(params) if isinstance(params, dict) else params
class BaseLanguageImport(models.TransientModel): _name = "base.language.import" _description = "Language Import" name = fields.Char('Language Name', required=True) code = fields.Char('ISO Code', size=5, required=True, help="ISO Language and Country code, e.g. en_US") data = fields.Binary('File', required=True) filename = fields.Char('File Name', required=True) overwrite = fields.Boolean('Overwrite Existing Terms', help="If you enable this option, existing translations (including custom ones) " "will be overwritten and replaced by those in this file") @api.multi def import_lang(self): this = self[0] this = this.with_context(overwrite=this.overwrite) with TemporaryFile('wb+') as buf: try: buf.write(base64.decodestring(this.data)) # now we determine the file format buf.seek(0) fileformat = os.path.splitext(this.filename)[-1][1:].lower() tools.trans_load_data(this._cr, buf, fileformat, this.code, lang_name=this.name, context=this._context) except Exception as e: _logger.exception('File unsuccessfully imported, due to format mismatch.') raise UserError(_('File not imported due to format mismatch or a malformed file. (Valid formats are .csv, .po, .pot)\n\nTechnical Details:\n%s') % tools.ustr(e)) return True
class test_model(models.Model): _name = 'test_converter.test_model' char = fields.Char() integer = fields.Integer() float = fields.Float() numeric = fields.Float(digits=(16, 2)) many2one = fields.Many2one('test_converter.test_model.sub', group_expand='_gbf_m2o') binary = fields.Binary() date = fields.Date() datetime = fields.Datetime() selection = fields.Selection([ (1, "réponse A"), (2, "réponse B"), (3, "réponse C"), (4, "réponse <D>"), ]) selection_str = fields.Selection([ ('A', u"Qu'il n'est pas arrivé à Toronto"), ('B', u"Qu'il était supposé arriver à Toronto"), ('C', u"Qu'est-ce qu'il fout ce maudit pancake, tabernacle ?"), ('D', u"La réponse D"), ], string=u"Lorsqu'un pancake prend l'avion à destination de Toronto et " u"qu'il fait une escale technique à St Claude, on dit:") html = fields.Html() text = fields.Text() # `base` module does not contains any model that implement the functionality # `group_expand`; test this feature here... @api.model def _gbf_m2o(self, subs, domain, order): sub_ids = subs._search([], order=order, access_rights_uid=SUPERUSER_ID) return subs.browse(sub_ids)
class ConverterTest(models.Model): _name = 'web_editor.converter.test' # disable translation export for those brilliant field labels and values _translate = False char = fields.Char() integer = fields.Integer() float = fields.Float() numeric = fields.Float(digits=(16, 2)) many2one = fields.Many2one('web_editor.converter.test.sub') binary = fields.Binary() date = fields.Date() datetime = fields.Datetime() selection = fields.Selection([ (1, "réponse A"), (2, "réponse B"), (3, "réponse C"), (4, "réponse <D>"), ]) selection_str = fields.Selection( [ ('A', "Qu'il n'est pas arrivé à Toronto"), ('B', "Qu'il était supposé arriver à Toronto"), ('C', "Qu'est-ce qu'il fout ce maudit pancake, tabernacle ?"), ('D', "La réponse D"), ], string=u"Lorsqu'un pancake prend l'avion à destination de Toronto et " u"qu'il fait une escale technique à St Claude, on dit:") html = fields.Html() text = fields.Text()
class BaseLanguageExport(models.TransientModel): _name = "base.language.export" @api.model def _get_languages(self): langs = self.env['res.lang'].search([('translatable', '=', True)]) return [(NEW_LANG_KEY, _('New Language (Empty translation template)'))] + \ [(lang.code, lang.name) for lang in langs] name = fields.Char('File Name', readonly=True) lang = fields.Selection(_get_languages, string='Language', required=True, default=NEW_LANG_KEY) format = fields.Selection([('csv', 'CSV File'), ('po', 'PO File'), ('tgz', 'TGZ Archive')], string='File Format', required=True, default='csv') modules = fields.Many2many('ir.module.module', 'rel_modules_langexport', 'wiz_id', 'module_id', string='Apps To Export', domain=[('state', '=', 'installed')]) data = fields.Binary('File', readonly=True) state = fields.Selection( [('choose', 'choose'), ('get', 'get')], # choose language or get the file default='choose') @api.multi def act_getfile(self): this = self[0] lang = this.lang if this.lang != NEW_LANG_KEY else False mods = sorted(this.mapped('modules.name')) or ['all'] with contextlib.closing(io.BytesIO()) as buf: tools.trans_export(lang, mods, buf, this.format, self._cr) out = base64.encodestring(buf.getvalue()) filename = 'new' if lang: filename = tools.get_iso_codes(lang) elif len(mods) == 1: filename = mods[0] extension = this.format if not lang and extension == 'po': extension = 'pot' name = "%s.%s" % (filename, extension) this.write({'state': 'get', 'data': out, 'name': name}) return { 'type': 'ir.actions.act_window', 'res_model': 'base.language.export', 'view_mode': 'form', 'view_type': 'form', 'res_id': this.id, 'views': [(False, 'form')], 'target': 'new', }
class ProductImage(models.Model): _name = 'product.image' name = fields.Char('Name') image = fields.Binary('Image', attachment=True) product_tmpl_id = fields.Many2one('product.template', 'Related Product', copy=True)
class RestaurantFloor(models.Model): _name = 'restaurant.floor' name = fields.Char('Floor Name', required=True, help='An internal identification of the restaurant floor') pos_config_id = fields.Many2one('pos.config', string='Point of Sale') background_image = fields.Binary('Background Image', attachment=True, help='A background image used to display a floor layout in the point of sale interface') background_color = fields.Char('Background Color', help='The background color of the floor layout, (must be specified in a html-compatible format)', default='rgb(210, 210, 210)') table_ids = fields.One2many('restaurant.table', 'floor_id', string='Tables', help='The list of tables in this floor') sequence = fields.Integer('Sequence', help='Used to sort Floors', default=1)
class BaseImportModule(models.TransientModel): """ Import Module """ _name = "base.import.module" _description = "Import Module" module_file = fields.Binary(string='Module .ZIP file', required=True) state = fields.Selection([('init', 'init'), ('done', 'done')], string='Status', readonly=True, default='init') import_message = fields.Char() force = fields.Boolean( string='Force init', help= "Force init mode even if installed. (will update `noupdate='1'` records)" ) @api.multi def import_module(self): self.ensure_one() IrModule = self.env['ir.module.module'] zip_data = base64.decodestring(self.module_file) fp = BytesIO() fp.write(zip_data) res = IrModule.import_zipfile(fp, force=self.force) self.write({'state': 'done', 'import_message': res[0]}) context = dict(self.env.context, module_name=res[1]) # Return wizard otherwise it will close wizard and will not show result message to user. return { 'name': 'Import Module', 'view_type': 'form', 'view_mode': 'form', 'target': 'new', 'res_id': self.id, 'res_model': 'base.import.module', 'type': 'ir.actions.act_window', 'context': context, } @api.multi def action_module_open(self): self.ensure_one() return { 'domain': [('name', 'in', self.env.context.get('module_name', []))], 'name': 'Modules', 'view_type': 'form', 'view_mode': 'tree,form', 'res_model': 'ir.module.module', 'view_id': False, 'type': 'ir.actions.act_window', }
class FleetVehicleModel(models.Model): _name = 'fleet.vehicle.model' _description = 'Model of a vehicle' _order = 'name asc' name = fields.Char('Model name', required=True) brand_id = fields.Many2one('fleet.vehicle.model.brand', 'Make', required=True, help='Make of the vehicle') vendors = fields.Many2many('res.partner', 'fleet_vehicle_model_vendors', 'model_id', 'partner_id', string='Vendors') image = fields.Binary(related='brand_id.image', string="Logo") image_medium = fields.Binary(related='brand_id.image_medium', string="Logo (medium)") image_small = fields.Binary(related='brand_id.image_small', string="Logo (small)") @api.multi @api.depends('name', 'brand_id') def name_get(self): res = [] for record in self: name = record.name if record.brand_id.name: name = record.brand_id.name + '/' + name res.append((record.id, name)) return res @api.onchange('brand_id') def _onchange_brand(self): if self.brand_id: self.image_medium = self.brand_id.image else: self.image_medium = False
class PaymentIcon(models.Model): _name = 'payment.icon' _description = 'Payment Icon' name = fields.Char(string='Name') acquirer_ids = fields.Many2many( 'payment.acquirer', string="Acquirers", help="List of Acquirers supporting this payment icon.") image = fields.Binary( "Image", attachment=True, help= "This field holds the image used for this payment icon, limited to 1024x1024px" ) image_payment_form = fields.Binary("Image displayed on the payment form", attachment=True) @api.model def create(self, vals): if 'image' in vals: image = ustr(vals['image'] or '').encode('utf-8') vals['image_payment_form'] = image_resize_image(image, size=(45, 30)) vals['image'] = image_resize_image(image, size=(64, 64)) return super(PaymentIcon, self).create(vals) @api.multi def write(self, vals): if 'image' in vals: image = ustr(vals['image'] or '').encode('utf-8') vals['image_payment_form'] = image_resize_image(image, size=(45, 30)) vals['image'] = image_resize_image(image, size=(64, 64)) return super(PaymentIcon, self).write(vals)
class Sponsor(models.Model): _name = "event.sponsor" _description = 'Event Sponsor' _order = "sequence" event_id = fields.Many2one('event.event', 'Event', required=True) sponsor_type_id = fields.Many2one('event.sponsor.type', 'Sponsoring Type', required=True) partner_id = fields.Many2one('res.partner', 'Sponsor/Customer', required=True) url = fields.Char('Sponsor Website') sequence = fields.Integer('Sequence', store=True, related='sponsor_type_id.sequence') image_medium = fields.Binary(string='Logo', related='partner_id.image_medium', store=True, attachment=True)
class XmlDeclaration(models.TransientModel): """ Intrastat XML Declaration """ _name = "l10n_be_intrastat_xml.xml_decl" _description = 'Intrastat XML Declaration' def _default_get_month(self): return fields.Date.from_string( fields.Date.context_today(self)).strftime('%m') def _default_get_year(self): return fields.Date.from_string( fields.Date.context_today(self)).strftime('%Y') name = fields.Char(string='File Name', default='intrastat.xml') month = fields.Selection([('01', 'January'), ('02', 'February'), ('03', 'March'), ('04', 'April'), ('05', 'May'), ('06', 'June'), ('07', 'July'), ('08', 'August'), ('09', 'September'), ('10', 'October'), ('11', 'November'), ('12', 'December')], string='Month', required=True, default=_default_get_month) year = fields.Char(size=4, required=True, default=_default_get_year) company_id = fields.Many2one('res.company', string='Company', required=True, default=lambda self: self.env.user.company_id) arrivals = fields.Selection([('be-exempt', 'Exempt'), ('be-standard', 'Standard'), ('be-extended', 'Extended')], required=True, default='be-standard') dispatches = fields.Selection([('be-exempt', 'Exempt'), ('be-standard', 'Standard'), ('be-extended', 'Extended')], required=True, default='be-standard') file_save = fields.Binary(string='Intrastat Report File', readonly=True) state = fields.Selection([('draft', 'Draft'), ('download', 'Download')], default='draft') @api.model def _company_warning(self, translated_msg): """ Raise a error with custom message, asking user to configure company settings """ raise exceptions.RedirectWarning( translated_msg, self.env.ref('base.action_res_company_form').id, _('Go to company configuration screen')) @api.multi def create_xml(self): """Creates xml that is to be exported and sent to estate for partner vat intra. :return: Value for next action. :rtype: dict """ self.ensure_one() company = self.company_id if not (company.partner_id and company.partner_id.country_id and company.partner_id.country_id.id): self._company_warning( _('The country of your company is not set, ' 'please make sure to configure it first.')) if not company.company_registry: self._company_warning( _('The registry number of your company is not set, ' 'please make sure to configure it first.')) if len(self.year) != 4: raise exceptions.Warning(_('Year must be 4 digits number (YYYY)')) #Create root declaration decl = ET.Element('DeclarationReport') decl.set('xmlns', INTRASTAT_XMLNS) #Add Administration elements admin = ET.SubElement(decl, 'Administration') fromtag = ET.SubElement(admin, 'From') fromtag.text = company.company_registry fromtag.set('declarerType', 'KBO') ET.SubElement(admin, 'To').text = "NBB" ET.SubElement(admin, 'Domain').text = "SXX" if self.arrivals == 'be-standard': decl.append(self.sudo()._get_lines(dispatchmode=False, extendedmode=False)) elif self.arrivals == 'be-extended': decl.append(self.sudo()._get_lines(dispatchmode=False, extendedmode=True)) if self.dispatches == 'be-standard': decl.append(self.sudo()._get_lines(dispatchmode=True, extendedmode=False)) elif self.dispatches == 'be-extended': decl.append(self.sudo()._get_lines(dispatchmode=True, extendedmode=True)) #Get xml string with declaration data_file = ET.tostring(decl, encoding='UTF-8', method='xml') #change state of the wizard self.write({ 'name': 'intrastat_%s%s.xml' % (self.year, self.month), 'file_save': base64.encodestring(data_file), 'state': 'download' }) return { 'name': _('Save'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'l10n_be_intrastat_xml.xml_decl', 'type': 'ir.actions.act_window', 'target': 'new', 'res_id': self.id, } @api.multi def _get_lines(self, dispatchmode=False, extendedmode=False): company = self.company_id IntrastatRegion = self.env['l10n_be_intrastat.region'] if dispatchmode: mode1 = 'out_invoice' mode2 = 'in_refund' declcode = "29" else: mode1 = 'in_invoice' mode2 = 'out_refund' declcode = "19" decl = ET.Element('Report') if not extendedmode: decl.set('code', 'EX%sS' % declcode) else: decl.set('code', 'EX%sE' % declcode) decl.set('date', '%s-%s' % (self.year, self.month)) datas = ET.SubElement(decl, 'Data') if not extendedmode: datas.set('form', 'EXF%sS' % declcode) else: datas.set('form', 'EXF%sE' % declcode) datas.set('close', 'true') intrastatkey = namedtuple( "intrastatkey", ['EXTRF', 'EXCNT', 'EXTTA', 'EXREG', 'EXGO', 'EXTPC', 'EXDELTRM']) entries = {} query = """ SELECT inv_line.id FROM account_invoice_line inv_line JOIN account_invoice inv ON inv_line.invoice_id=inv.id LEFT JOIN res_country ON res_country.id = inv.intrastat_country_id LEFT JOIN res_partner ON res_partner.id = inv.partner_id LEFT JOIN res_country countrypartner ON countrypartner.id = res_partner.country_id JOIN product_product ON inv_line.product_id=product_product.id JOIN product_template ON product_product.product_tmpl_id=product_template.id WHERE inv.state IN ('open','paid') AND inv.company_id=%s AND not product_template.type='service' AND (res_country.intrastat=true OR (inv.intrastat_country_id is NULL AND countrypartner.intrastat=true)) AND ((res_country.code IS NOT NULL AND not res_country.code=%s) OR (res_country.code is NULL AND countrypartner.code IS NOT NULL AND not countrypartner.code=%s)) AND inv.type IN (%s, %s) AND to_char(inv.date_invoice, 'YYYY')=%s AND to_char(inv.date_invoice, 'MM')=%s """ self.env.cr.execute(query, (company.id, company.partner_id.country_id.code, company.partner_id.country_id.code, mode1, mode2, self.year, self.month)) lines = self.env.cr.fetchall() invoicelines_ids = [rec[0] for rec in lines] invoicelines = self.env['account.invoice.line'].browse( invoicelines_ids) for inv_line in invoicelines: #Check type of transaction if inv_line.intrastat_transaction_id: extta = inv_line.intrastat_transaction_id.code else: extta = "1" #Check country if inv_line.invoice_id.intrastat_country_id: excnt = inv_line.invoice_id.intrastat_country_id.code else: excnt = inv_line.invoice_id.partner_shipping_id.country_id.code or inv_line.invoice_id.partner_id.country_id.code #Check region #If purchase, comes from purchase order, linked to a location, #which is linked to the warehouse #if sales, the sales order is linked to the warehouse #if sales, from a delivery order, linked to a location, #which is linked to the warehouse #If none found, get the company one. exreg = None if inv_line.invoice_id.type in ('in_invoice', 'in_refund'): #comes from purchase po_lines = self.env['purchase.order.line'].search( [('invoice_lines', 'in', inv_line.id)], limit=1) if po_lines: if self._is_situation_triangular(company, po_line=po_lines): continue location = self.env['stock.location'].browse( po_lines.order_id._get_destination_location()) region_id = self.env[ 'stock.warehouse'].get_regionid_from_locationid( location) if region_id: exreg = IntrastatRegion.browse(region_id).code elif inv_line.invoice_id.type in ('out_invoice', 'out_refund'): #comes from sales so_lines = self.env['sale.order.line'].search( [('invoice_lines', 'in', inv_line.id)], limit=1) if so_lines: if self._is_situation_triangular(company, so_line=so_lines): continue saleorder = so_lines.order_id if saleorder and saleorder.warehouse_id and saleorder.warehouse_id.region_id: exreg = IntrastatRegion.browse( saleorder.warehouse_id.region_id.id).code if not exreg: if company.region_id: exreg = company.region_id.code else: self._company_warning( _('The Intrastat Region of the selected company is not set, ' 'please make sure to configure it first.')) #Check commodity codes intrastat_id = inv_line.product_id.get_intrastat_recursively() if intrastat_id: exgo = self.env['report.intrastat.code'].browse( intrastat_id).name else: raise exceptions.Warning( _('Product "%s" has no intrastat code, please configure it' ) % inv_line.product_id.display_name) #In extended mode, 2 more fields required if extendedmode: #Check means of transport if inv_line.invoice_id.transport_mode_id: extpc = inv_line.invoice_id.transport_mode_id.code elif company.transport_mode_id: extpc = company.transport_mode_id.code else: self._company_warning( _('The default Intrastat transport mode of your company ' 'is not set, please make sure to configure it first.' )) #Check incoterm if inv_line.invoice_id.incoterm_id: exdeltrm = inv_line.invoice_id.incoterm_id.code elif company.incoterm_id: exdeltrm = company.incoterm_id.code else: self._company_warning( _('The default Incoterm of your company is not set, ' 'please make sure to configure it first.')) else: extpc = "" exdeltrm = "" linekey = intrastatkey(EXTRF=declcode, EXCNT=excnt, EXTTA=extta, EXREG=exreg, EXGO=exgo, EXTPC=extpc, EXDELTRM=exdeltrm) #We have the key #calculate amounts if inv_line.price_unit and inv_line.quantity: amount = inv_line.price_unit * inv_line.quantity else: amount = 0 weight = (inv_line.product_id.weight or 0.0) * \ inv_line.uom_id._compute_quantity(inv_line.quantity, inv_line.product_id.uom_id) if not inv_line.product_id.uom_id.category_id: supply_units = inv_line.quantity else: supply_units = inv_line.quantity * inv_line.uom_id.factor amounts = entries.setdefault(linekey, (0, 0, 0)) amounts = (amounts[0] + amount, amounts[1] + weight, amounts[2] + supply_units) entries[linekey] = amounts numlgn = 0 for linekey in entries: amounts = entries[linekey] if round(amounts[0], 0) == 0: continue numlgn += 1 item = ET.SubElement(datas, 'Item') self._set_Dim(item, 'EXSEQCODE', text_type(numlgn)) self._set_Dim(item, 'EXTRF', text_type(linekey.EXTRF)) self._set_Dim(item, 'EXCNT', text_type(linekey.EXCNT)) self._set_Dim(item, 'EXTTA', text_type(linekey.EXTTA)) self._set_Dim(item, 'EXREG', text_type(linekey.EXREG)) self._set_Dim(item, 'EXTGO', text_type(linekey.EXGO)) if extendedmode: self._set_Dim(item, 'EXTPC', text_type(linekey.EXTPC)) self._set_Dim(item, 'EXDELTRM', text_type(linekey.EXDELTRM)) self._set_Dim(item, 'EXTXVAL', text_type(round(amounts[0], 0)).replace(".", ",")) self._set_Dim(item, 'EXWEIGHT', text_type(round(amounts[1], 0)).replace(".", ",")) self._set_Dim(item, 'EXUNITS', text_type(round(amounts[2], 0)).replace(".", ",")) if numlgn == 0: #no datas datas.set('action', 'nihil') return decl def _set_Dim(self, item, prop, value): dim = ET.SubElement(item, 'Dim') dim.set('prop', prop) dim.text = value def _is_situation_triangular(self, company, po_line=False, so_line=False): # Ignoring what is purchased and sold by us with a dropshipping route # outside of our country, or completely within it # https://www.nbb.be/doc/dq/f_pdf_ex/intra2017fr.pdf (§ 4.x) dropship_pick_type = self.env.ref( 'stock_dropshipping.picking_type_dropship', raise_if_not_found=False) if not dropship_pick_type: return False stock_move_domain = [('picking_type_id', '=', dropship_pick_type.id)] if po_line: stock_move_domain.append(('purchase_line_id', '=', po_line.id)) if so_line: stock_move_domain.append( ('procurement_id.sale_line_id', '=', so_line.id)) stock_move = self.env['stock.move'].search(stock_move_domain, limit=1) return stock_move and ( (stock_move.partner_id.country_id.code != company.country_id.code and stock_move.picking_partner_id.country_id.code != company.country_id.code) or (stock_move.partner_id.country_id.code == company.country_id.code and stock_move.picking_partner_id.country_id.code == company.country_id.code))
class IrUiMenu(models.Model): _name = 'ir.ui.menu' _order = "sequence,id" _parent_store = True def __init__(self, *args, **kwargs): super(IrUiMenu, self).__init__(*args, **kwargs) self.pool['ir.model.access'].register_cache_clearing_method( self._name, 'clear_caches') name = fields.Char(string='Menu', required=True, translate=True) active = fields.Boolean(default=True) sequence = fields.Integer(default=10) child_id = fields.One2many('ir.ui.menu', 'parent_id', string='Child IDs') parent_id = fields.Many2one('ir.ui.menu', string='Parent Menu', index=True, ondelete="restrict") parent_left = fields.Integer(index=True) parent_right = fields.Integer(index=True) groups_id = fields.Many2many('res.groups', 'ir_ui_menu_group_rel', 'menu_id', 'gid', string='Groups', help="If you have groups, the visibility of this menu will be based on these groups. "\ "If this field is empty, GECOERP will compute visibility based on the related object's read access.") complete_name = fields.Char(compute='_compute_complete_name', string='Full Path') web_icon = fields.Char(string='Web Icon File') action = fields.Reference(selection=[( 'ir.actions.report', 'ir.actions.report' ), ('ir.actions.act_window', 'ir.actions.act_window'), ( 'ir.actions.act_url', 'ir.actions.act_url'), ( 'ir.actions.server', 'ir.actions.server'), ('ir.actions.client', 'ir.actions.client')]) web_icon_data = fields.Binary(string='Web Icon Image', attachment=True) @api.depends('name', 'parent_id.complete_name') def _compute_complete_name(self): for menu in self: menu.complete_name = menu._get_full_name() def _get_full_name(self, level=6): """ Return the full name of ``self`` (up to a certain level). """ if level <= 0: return '...' if self.parent_id: return self.parent_id._get_full_name( level - 1) + MENU_ITEM_SEPARATOR + (self.name or "") else: return self.name def read_image(self, path): if not path: return False path_info = path.split(',') icon_path = get_module_resource(path_info[0], path_info[1]) icon_image = False if icon_path: with tools.file_open(icon_path, 'rb') as icon_file: icon_image = base64.encodestring(icon_file.read()) return icon_image @api.constrains('parent_id') def _check_parent_id(self): if not self._check_recursion(): raise ValidationError( _('Error! You cannot create recursive menus.')) @api.model @tools.ormcache('frozenset(self.env.user.groups_id.ids)', 'debug') def _visible_menu_ids(self, debug=False): """ Return the ids of the menu items visible to the user. """ # retrieve all menus, and determine which ones are visible context = {'ir.ui.menu.full_list': True} menus = self.with_context(context).search([]) groups = self.env.user.groups_id if not debug: groups = groups - self.env.ref('base.group_no_one') # first discard all menus with groups the user does not have menus = menus.filtered( lambda menu: not menu.groups_id or menu.groups_id & groups) # take apart menus that have an action action_menus = menus.filtered(lambda m: m.action and m.action.exists()) folder_menus = menus - action_menus visible = self.browse() # process action menus, check whether their action is allowed access = self.env['ir.model.access'] MODEL_GETTER = { 'ir.actions.act_window': lambda action: action.res_model, 'ir.actions.report': lambda action: action.model, 'ir.actions.server': lambda action: action.model_id.model, } for menu in action_menus: get_model = MODEL_GETTER.get(menu.action._name) if not get_model or not get_model(menu.action) or \ access.check(get_model(menu.action), 'read', False): # make menu visible, and its folder ancestors, too visible += menu menu = menu.parent_id while menu and menu in folder_menus and menu not in visible: visible += menu menu = menu.parent_id return set(visible.ids) @api.multi @api.returns('self') def _filter_visible_menus(self): """ Filter `self` to only keep the menu items that should be visible in the menu hierarchy of the current user. Uses a cache for speeding up the computation. """ visible_ids = self._visible_menu_ids( request.debug if request else False) return self.filtered(lambda menu: menu.id in visible_ids) @api.model def search(self, args, offset=0, limit=None, order=None, count=False): menus = super(IrUiMenu, self).search(args, offset=0, limit=None, order=order, count=False) if menus: # menu filtering is done only on main menu tree, not other menu lists if not self._context.get('ir.ui.menu.full_list'): menus = menus._filter_visible_menus() if offset: menus = menus[offset:] if limit: menus = menus[:limit] return len(menus) if count else menus @api.multi def name_get(self): return [(menu.id, menu._get_full_name()) for menu in self] @api.model def create(self, values): self.clear_caches() if 'web_icon' in values: values['web_icon_data'] = self._compute_web_icon_data( values.get('web_icon')) return super(IrUiMenu, self).create(values) @api.multi def write(self, values): self.clear_caches() if 'web_icon' in values: values['web_icon_data'] = self._compute_web_icon_data( values.get('web_icon')) return super(IrUiMenu, self).write(values) def _compute_web_icon_data(self, web_icon): """ Returns the image associated to `web_icon`. `web_icon` can either be: - an image icon [module, path] - a built icon [icon_class, icon_color, background_color] and it only has to call `read_image` if it's an image. """ if web_icon and len(web_icon.split(',')) == 2: return self.read_image(web_icon) @api.multi def unlink(self): # Detach children and promote them to top-level, because it would be unwise to # cascade-delete submenus blindly. We also can't use ondelete=set null because # that is not supported when _parent_store is used (would silently corrupt it). # TODO: ideally we should move them under a generic "Orphans" menu somewhere? extra = {'ir.ui.menu.full_list': True} direct_children = self.with_context(**extra).search([('parent_id', 'in', self.ids)]) direct_children.write({'parent_id': False}) self.clear_caches() return super(IrUiMenu, self).unlink() @api.multi def copy(self, default=None): record = super(IrUiMenu, self).copy(default=default) match = NUMBER_PARENS.search(record.name) if match: next_num = int(match.group(1)) + 1 record.name = NUMBER_PARENS.sub('(%d)' % next_num, record.name) else: record.name = record.name + '(1)' return record @api.model @api.returns('self') def get_user_roots(self): """ Return all root menu ids visible for the user. :return: the root menu ids :rtype: list(int) """ return self.search([('parent_id', '=', False)]) @api.model @tools.ormcache_context('self._uid', keys=('lang', )) def load_menus_root(self): fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon_data'] menu_roots = self.get_user_roots() menu_roots_data = menu_roots.read(fields) if menu_roots else [] menu_root = { 'id': False, 'name': 'root', 'parent_id': [-1, ''], 'children': menu_roots_data, 'all_menu_ids': menu_roots.ids, } menu_roots._set_menuitems_xmlids(menu_root) return menu_root @api.model @tools.ormcache_context('self._uid', 'debug', keys=('lang', )) def load_menus(self, debug): """ Loads all menu items (all applications and their sub-menus). :return: the menu root :rtype: dict('children': menu_nodes) """ fields = [ 'name', 'sequence', 'parent_id', 'action', 'web_icon', 'web_icon_data' ] menu_roots = self.get_user_roots() menu_roots_data = menu_roots.read(fields) if menu_roots else [] menu_root = { 'id': False, 'name': 'root', 'parent_id': [-1, ''], 'children': menu_roots_data, 'all_menu_ids': menu_roots.ids, } if not menu_roots_data: return menu_root # menus are loaded fully unlike a regular tree view, cause there are a # limited number of items (752 when all 6.1 addons are installed) menus = self.search([('id', 'child_of', menu_roots.ids)]) menu_items = menus.read(fields) # add roots at the end of the sequence, so that they will overwrite # equivalent menu items from full menu read when put into id:item # mapping, resulting in children being correctly set on the roots. menu_items.extend(menu_roots_data) menu_root['all_menu_ids'] = menus.ids # includes menu_roots! # make a tree using parent_id menu_items_map = { menu_item["id"]: menu_item for menu_item in menu_items } for menu_item in menu_items: parent = menu_item['parent_id'] and menu_item['parent_id'][0] if parent in menu_items_map: menu_items_map[parent].setdefault('children', []).append(menu_item) # sort by sequence a tree using parent_id for menu_item in menu_items: menu_item.setdefault('children', []).sort(key=operator.itemgetter('sequence')) (menu_roots + menus)._set_menuitems_xmlids(menu_root) return menu_root def _set_menuitems_xmlids(self, menu_root): menuitems = self.env['ir.model.data'].sudo().search([ ('res_id', 'in', self.ids), ('model', '=', 'ir.ui.menu') ]) xmlids = {menu.res_id: menu.complete_name for menu in menuitems} def _set_xmlids(tree, xmlids): tree['xmlid'] = xmlids.get(tree['id'], '') if 'children' in tree: for child in tree['children']: _set_xmlids(child, xmlids) _set_xmlids(menu_root, xmlids)
class RatingMixin(models.AbstractModel): _name = 'rating.mixin' _description = "Rating Mixin" rating_ids = fields.One2many( 'rating.rating', 'res_id', string='Rating', domain=lambda self: [('res_model', '=', self._name)], auto_join=True) rating_last_value = fields.Float('Rating Last Value', compute='_compute_rating_last_value', compute_sudo=True, store=True) rating_last_feedback = fields.Text('Rating Last Feedback', related='rating_ids.feedback') rating_last_image = fields.Binary('Rating Last Image', related='rating_ids.rating_image') rating_count = fields.Integer('Rating count', compute="_compute_rating_count") @api.multi @api.depends('rating_ids.rating') def _compute_rating_last_value(self): for record in self: ratings = self.env['rating.rating'].search( [('res_model', '=', self._name), ('res_id', '=', record.id)], limit=1) if ratings: record.rating_last_value = ratings.rating @api.multi def _compute_rating_count(self): read_group_res = self.env['rating.rating'].read_group( [('res_model', '=', self._name), ('res_id', 'in', self.ids), ('consumed', '=', True)], ['res_id'], groupby=['res_id']) result = dict.fromkeys(self.ids, 0) for data in read_group_res: result[data['res_id']] += data['res_id_count'] for record in self: record.rating_count = result.get(record.id) def write(self, values): """ If the rated ressource name is modified, we should update the rating res_name too. """ result = super(RatingMixin, self).write(values) if self._rec_name in values: self.rating_ids._compute_res_name() return result def unlink(self): """ When removing a record, its rating should be deleted too. """ record_ids = self.ids result = super(RatingMixin, self).unlink() self.env['rating.rating'].sudo().search([ ('res_model', '=', self._name), ('res_id', 'in', record_ids) ]).unlink() return result def rating_get_parent_model_name(self, vals): """ Return the parent model name """ return None def rating_get_parent_id(self): """ Return the parent record id """ return None def rating_get_partner_id(self): if hasattr(self, 'partner_id') and self.partner_id: return self.partner_id return self.env['res.partner'] def rating_get_rated_partner_id(self): if hasattr(self, 'user_id') and self.user_id.partner_id: return self.user_id.partner_id return self.env['res.partner'] def rating_get_access_token(self, partner=None): if not partner: partner = self.rating_get_partner_id() rated_partner = self.rating_get_rated_partner_id() ratings = self.rating_ids.filtered( lambda x: x.partner_id.id == partner.id and not x.consumed) if not ratings: record_model_id = self.env['ir.model'].sudo().search( [('model', '=', self._name)], limit=1).id rating = self.env['rating.rating'].create({ 'partner_id': partner.id, 'rated_partner_id': rated_partner.id, 'res_model_id': record_model_id, 'res_id': self.id }) else: rating = ratings[0] return rating.access_token @api.multi def rating_send_request(self, template, lang=False, force_send=True): """ This method send rating request by email, using a template given in parameter. """ lang = lang or 'en_US' for record in self: template.with_context(lang=lang).send_mail(record.id, force_send=force_send) @api.multi def rating_apply(self, rate, token=None, feedback=None, subtype=None): """ Apply a rating given a token. If the current model inherits from mail.thread mixing, a message is posted on its chatter. :param rate : the rating value to apply :type rate : float :param token : access token :param feedback : additional feedback :type feedback : string :param subtype : subtype for mail :type subtype : string :returns rating.rating record """ Rating, rating = self.env['rating.rating'], None if token: rating = self.env['rating.rating'].search( [('access_token', '=', token)], limit=1) else: rating = Rating.search([('res_model', '=', self._name), ('res_id', '=', self.ids[0])], limit=1) if rating: rating.write({ 'rating': rate, 'feedback': feedback, 'consumed': True }) if hasattr(self, 'message_post'): feedback = tools.plaintext2html(feedback or '') self.message_post( body= "<img src='/rating/static/src/img/rating_%s.png' alt=':rating_%s' style='width:20px;height:20px;float:left;margin-right: 5px;'/>%s" % (rate, rate, feedback), subtype=subtype or "mail.mt_comment", author_id=rating.partner_id and rating.partner_id.id or None # None will set the default author in mail_thread.py ) if hasattr(self, 'stage_id') and self.stage_id and hasattr( self.stage_id, 'auto_validation_kanban_state' ) and self.stage_id.auto_validation_kanban_state: if rating.rating > 5: self.write({'kanban_state': 'done'}) if rating.rating < 5: self.write({'kanban_state': 'blocked'}) return rating @api.multi def rating_get_repartition(self, add_stats=False, domain=None): """ get the repatition of rating grade for the given res_ids. :param add_stats : flag to add stat to the result :type add_stats : boolean :param domain : optional extra domain of the rating to include/exclude in repartition :return dictionnary if not add_stats, the dict is like - key is the rating value (integer) - value is the number of object (res_model, res_id) having the value otherwise, key is the value of the information (string) : either stat name (avg, total, ...) or 'repartition' containing the same dict if add_stats was False. """ base_domain = [('res_model', '=', self._name), ('res_id', 'in', self.ids), ('rating', '>=', 1), ('consumed', '=', True)] if domain: base_domain += domain data = self.env['rating.rating'].read_group(base_domain, ['rating'], ['rating', 'res_id']) # init dict with all posible rate value, except 0 (no value for the rating) values = dict.fromkeys(range(1, 11), 0) values.update((d['rating'], d['rating_count']) for d in data) # add other stats if add_stats: rating_number = sum(values.values()) result = { 'repartition': values, 'avg': sum(float(key * values[key]) for key in values) / rating_number if rating_number > 0 else 0, 'total': sum(it['rating_count'] for it in data), } return result return values @api.multi def rating_get_grades(self, domain=None): """ get the repatition of rating grade for the given res_ids. :param domain : optional domain of the rating to include/exclude in grades computation :return dictionnary where the key is the grade (great, okay, bad), and the value, the number of object (res_model, res_id) having the grade the grade are compute as 0-30% : Bad 31-69%: Okay 70-100%: Great """ data = self.rating_get_repartition(domain=domain) res = dict.fromkeys(['great', 'okay', 'bad'], 0) for key in data: if key >= RATING_LIMIT_SATISFIED: res['great'] += data[key] elif key > RATING_LIMIT_OK: res['okay'] += data[key] else: res['bad'] += data[key] return res @api.multi def rating_get_stats(self, domain=None): """ get the statistics of the rating repatition :param domain : optional domain of the rating to include/exclude in statistic computation :return dictionnary where - key is the the name of the information (stat name) - value is statistic value : 'percent' contains the repartition in percentage, 'avg' is the average rate and 'total' is the number of rating """ data = self.rating_get_repartition(domain=domain, add_stats=True) result = { 'avg': data['avg'], 'total': data['total'], 'percent': dict.fromkeys(range(1, 11), 0), } for rate in data['repartition']: result['percent'][rate] = ( data['repartition'][rate] * 100) / data['total'] if data['total'] > 0 else 0 return result
class Rating(models.Model): _name = "rating.rating" _description = "Rating" _order = 'write_date desc' _rec_name = 'res_name' _sql_constraints = [ ('rating_range', 'check(rating >= 0 and rating <= 10)', 'Rating should be between 0 to 10'), ] @api.one @api.depends('res_model', 'res_id') def _compute_res_name(self): name = self.env[self.res_model].sudo().browse(self.res_id).name_get() self.res_name = name and name[0][1] or ('%s/%s') % (self.res_model, self.res_id) @api.model def new_access_token(self): return uuid.uuid4().hex res_name = fields.Char(string='Resource name', compute='_compute_res_name', store=True, help="The name of the rated resource.") res_model_id = fields.Many2one('ir.model', 'Related Document Model', index=True, ondelete='cascade', help='Model of the followed resource') res_model = fields.Char(string='Document Model', related='res_model_id.model', store=True, index=True, readonly=True) res_id = fields.Integer(string='Document', required=True, help="Identifier of the rated object", index=True) parent_res_name = fields.Char('Parent Document Name', compute='_compute_parent_res_name', store=True) parent_res_model_id = fields.Many2one('ir.model', 'Parent Related Document Model', index=True) parent_res_model = fields.Char('Parent Document Model', store=True, related='parent_res_model_id.model', index=True) parent_res_id = fields.Integer('Parent Document', index=True) rated_partner_id = fields.Many2one('res.partner', string="Rated person", help="Owner of the rated resource") partner_id = fields.Many2one('res.partner', string='Customer', help="Author of the rating") rating = fields.Float(string="Rating", group_operator="avg", default=0, help="Rating value: 0=Unhappy, 10=Happy") rating_image = fields.Binary('Image', compute='_compute_rating_image') rating_text = fields.Selection( [('satisfied', 'Satisfied'), ('not_satisfied', 'Not satisfied'), ('highly_dissatisfied', 'Highly dissatisfied'), ('no_rating', 'No Rating yet')], string='Rating', store=True, compute='_compute_rating_text', readonly=True) feedback = fields.Text('Comment', help="Reason of the rating") message_id = fields.Many2one( 'mail.message', string="Linked message", help= "Associated message when posting a review. Mainly used in website addons.", index=True) access_token = fields.Char( 'Security Token', default=new_access_token, help="Access token to set the rating of the value") consumed = fields.Boolean(string="Filled Rating", help="Enabled if the rating has been filled.") @api.depends('parent_res_model', 'parent_res_id') def _compute_parent_res_name(self): for rating in self: name = False if rating.parent_res_model and rating.parent_res_id: name = self.env[rating.parent_res_model].sudo().browse( rating.parent_res_id).name_get() name = name and name[0][1] or ('%s/%s') % ( rating.parent_res_model, rating.parent_res_id) rating.parent_res_name = name @api.multi @api.depends('rating') def _compute_rating_image(self): for rating in self: try: image_path = get_resource_path( 'rating', 'static/src/img', 'rating_%s.png' % (int(rating.rating), )) rating.rating_image = base64.b64encode( open(image_path, 'rb').read()) except (IOError, OSError): rating.rating_image = False @api.depends('rating') def _compute_rating_text(self): for rating in self: if rating.rating >= RATING_LIMIT_SATISFIED: rating.rating_text = 'satisfied' elif rating.rating > RATING_LIMIT_OK: rating.rating_text = 'not_satisfied' elif rating.rating >= RATING_LIMIT_MIN: rating.rating_text = 'highly_dissatisfied' else: rating.rating_text = 'no_rating' @api.model def create(self, values): if values.get('res_model_id') and values.get('res_id'): values.update(self._find_parent_data(values)) return super(Rating, self).create(values) @api.multi def write(self, values): if values.get('res_model_id') and values.get('res_id'): values.update(self._find_parent_data(values)) return super(Rating, self).write(values) def _find_parent_data(self, values): """ Determine the parent res_model/res_id, based on the values to create or write """ current_model_name = self.env['ir.model'].sudo().browse( values['res_model_id']).model current_record = self.env[current_model_name].browse(values['res_id']) data = { 'parent_res_model_id': False, 'parent_res_id': False, } if hasattr(current_record, 'rating_get_parent_model_name'): parent_res_model = current_record.rating_get_parent_model_name( values) data['parent_res_model_id'] = self.env['ir.model']._get( parent_res_model).id data['parent_res_id'] = current_record.rating_get_parent_id() return data @api.multi def reset(self): for record in self: record.write({ 'rating': 0, 'access_token': record.new_access_token(), 'feedback': False, 'consumed': False, }) def action_open_rated_object(self): self.ensure_one() return { 'type': 'ir.actions.act_window', 'res_model': self.res_model, 'res_id': self.res_id, 'views': [[False, 'form']] }
class ProductProduct(models.Model): _name = "product.product" _description = "Product" _inherits = {'product.template': 'product_tmpl_id'} _inherit = ['mail.thread', 'mail.activity.mixin'] _order = 'default_code, name, id' price = fields.Float('Price', compute='_compute_product_price', digits=dp.get_precision('Product Price'), inverse='_set_product_price') price_extra = fields.Float( 'Variant Price Extra', compute='_compute_product_price_extra', digits=dp.get_precision('Product Price'), help="This is the sum of the extra price of all attributes") lst_price = fields.Float( 'Sale Price', compute='_compute_product_lst_price', digits=dp.get_precision('Product Price'), inverse='_set_product_lst_price', help= "The sale price is managed from the product template. Click on the 'Variant Prices' button to set the extra attribute prices." ) default_code = fields.Char('Internal Reference', index=True) code = fields.Char('Reference', compute='_compute_product_code') partner_ref = fields.Char('Customer Ref', compute='_compute_partner_ref') active = fields.Boolean( 'Active', default=True, help= "If unchecked, it will allow you to hide the product without removing it." ) product_tmpl_id = fields.Many2one('product.template', 'Product Template', auto_join=True, index=True, ondelete="cascade", required=True) barcode = fields.Char( 'Barcode', copy=False, oldname='ean13', help="International Article Number used for product identification.") attribute_value_ids = fields.Many2many('product.attribute.value', string='Attributes', ondelete='restrict') # image: all image fields are base64 encoded and PIL-supported image_variant = fields.Binary( "Variant Image", attachment=True, help= "This field holds the image used as image for the product variant, limited to 1024x1024px." ) image = fields.Binary( "Big-sized image", compute='_compute_images', inverse='_set_image', help= "Image of the product variant (Big-sized image of product template if false). It is automatically " "resized as a 1024x1024px image, with aspect ratio preserved.") image_small = fields.Binary( "Small-sized image", compute='_compute_images', inverse='_set_image_small', help= "Image of the product variant (Small-sized image of product template if false)." ) image_medium = fields.Binary( "Medium-sized image", compute='_compute_images', inverse='_set_image_medium', help= "Image of the product variant (Medium-sized image of product template if false)." ) is_product_variant = fields.Boolean(compute='_compute_is_product_variant') standard_price = fields.Float( 'Cost', company_dependent=True, digits=dp.get_precision('Product Price'), groups="base.group_user", help= "Cost used for stock valuation in standard price and as a first price to set in average/fifo. " "Also used as a base price for pricelists. " "Expressed in the default unit of measure of the product.") volume = fields.Float('Volume', help="The volume in m3.") weight = fields.Float( 'Weight', digits=dp.get_precision('Stock Weight'), help= "The weight of the contents in Kg, not including any packaging, etc.") pricelist_item_ids = fields.Many2many('product.pricelist.item', 'Pricelist Items', compute='_get_pricelist_items') packaging_ids = fields.One2many( 'product.packaging', 'product_id', 'Product Packages', help="Gives the different ways to package the same product.") _sql_constraints = [ ('barcode_uniq', 'unique(barcode)', "A barcode can only be assigned to one product !"), ] def _get_invoice_policy(self): return False def _compute_is_product_variant(self): for product in self: product.is_product_variant = True def _compute_product_price(self): prices = {} pricelist_id_or_name = self._context.get('pricelist') if pricelist_id_or_name: pricelist = None partner = self._context.get('partner', False) quantity = self._context.get('quantity', 1.0) # Support context pricelists specified as display_name or ID for compatibility if isinstance(pricelist_id_or_name, pycompat.string_types): pricelist_name_search = self.env[ 'product.pricelist'].name_search(pricelist_id_or_name, operator='=', limit=1) if pricelist_name_search: pricelist = self.env['product.pricelist'].browse( [pricelist_name_search[0][0]]) elif isinstance(pricelist_id_or_name, pycompat.integer_types): pricelist = self.env['product.pricelist'].browse( pricelist_id_or_name) if pricelist: quantities = [quantity] * len(self) partners = [partner] * len(self) prices = pricelist.get_products_price(self, quantities, partners) for product in self: product.price = prices.get(product.id, 0.0) def _set_product_price(self): for product in self: if self._context.get('uom'): value = self.env['product.uom'].browse( self._context['uom'])._compute_price( product.price, product.uom_id) else: value = product.price value -= product.price_extra product.write({'list_price': value}) def _set_product_lst_price(self): for product in self: if self._context.get('uom'): value = self.env['product.uom'].browse( self._context['uom'])._compute_price( product.lst_price, product.uom_id) else: value = product.lst_price value -= product.price_extra product.write({'list_price': value}) @api.depends('attribute_value_ids.price_ids.price_extra', 'attribute_value_ids.price_ids.product_tmpl_id') def _compute_product_price_extra(self): # TDE FIXME: do a real multi and optimize a bit ? for product in self: price_extra = 0.0 for attribute_price in product.mapped( 'attribute_value_ids.price_ids'): if attribute_price.product_tmpl_id == product.product_tmpl_id: price_extra += attribute_price.price_extra product.price_extra = price_extra @api.depends('list_price', 'price_extra') def _compute_product_lst_price(self): to_uom = None if 'uom' in self._context: to_uom = self.env['product.uom'].browse([self._context['uom']]) for product in self: if to_uom: list_price = product.uom_id._compute_price( product.list_price, to_uom) else: list_price = product.list_price product.lst_price = list_price + product.price_extra @api.one def _compute_product_code(self): for supplier_info in self.seller_ids: if supplier_info.name.id == self._context.get('partner_id'): self.code = supplier_info.product_code or self.default_code break else: self.code = self.default_code @api.one def _compute_partner_ref(self): for supplier_info in self.seller_ids: if supplier_info.name.id == self._context.get('partner_id'): product_name = supplier_info.product_name or self.default_code or self.name self.partner_ref = '%s%s' % (self.code and '[%s] ' % self.code or '', product_name) break else: self.partner_ref = self.name_get()[0][1] @api.one @api.depends('image_variant', 'product_tmpl_id.image') def _compute_images(self): if self._context.get('bin_size'): self.image_medium = self.image_variant self.image_small = self.image_variant self.image = self.image_variant else: resized_images = tools.image_get_resized_images( self.image_variant, return_big=True, avoid_resize_medium=True) self.image_medium = resized_images['image_medium'] self.image_small = resized_images['image_small'] self.image = resized_images['image'] if not self.image_medium: self.image_medium = self.product_tmpl_id.image_medium if not self.image_small: self.image_small = self.product_tmpl_id.image_small if not self.image: self.image = self.product_tmpl_id.image @api.one def _set_image(self): self._set_image_value(self.image) @api.one def _set_image_medium(self): self._set_image_value(self.image_medium) @api.one def _set_image_small(self): self._set_image_value(self.image_small) @api.one def _set_image_value(self, value): if isinstance(value, pycompat.text_type): value = value.encode('ascii') image = tools.image_resize_image_big(value) if self.product_tmpl_id.image: self.image_variant = image else: self.product_tmpl_id.image = image @api.one def _get_pricelist_items(self): self.pricelist_item_ids = self.env['product.pricelist.item'].search([ '|', ('product_id', '=', self.id), ('product_tmpl_id', '=', self.product_tmpl_id.id) ]).ids @api.constrains('attribute_value_ids') def _check_attribute_value_ids(self): for product in self: attributes = self.env['product.attribute'] for value in product.attribute_value_ids: if value.attribute_id in attributes: raise ValidationError( _('Error! It is not allowed to choose more than one value for a given attribute.' )) if value.attribute_id.create_variant: attributes |= value.attribute_id return True @api.onchange('uom_id', 'uom_po_id') def _onchange_uom(self): if self.uom_id and self.uom_po_id and self.uom_id.category_id != self.uom_po_id.category_id: self.uom_po_id = self.uom_id @api.model def create(self, vals): product = super( ProductProduct, self.with_context(create_product_product=True)).create(vals) # When a unique variant is created from tmpl then the standard price is set by _set_standard_price if not (self.env.context.get('create_from_tmpl') and len(product.product_tmpl_id.product_variant_ids) == 1): product._set_standard_price(vals.get('standard_price') or 0.0) return product @api.multi def write(self, values): ''' Store the standard price change in order to be able to retrieve the cost of a product for a given date''' res = super(ProductProduct, self).write(values) if 'standard_price' in values: self._set_standard_price(values['standard_price']) return res @api.multi def unlink(self): unlink_products = self.env['product.product'] unlink_templates = self.env['product.template'] for product in self: # Check if product still exists, in case it has been unlinked by unlinking its template if not product.exists(): continue # Check if the product is last product of this template other_products = self.search([('product_tmpl_id', '=', product.product_tmpl_id.id), ('id', '!=', product.id)]) if not other_products: unlink_templates |= product.product_tmpl_id unlink_products |= product res = super(ProductProduct, unlink_products).unlink() # delete templates after calling super, as deleting template could lead to deleting # products due to ondelete='cascade' unlink_templates.unlink() return res @api.multi def copy(self, default=None): # TDE FIXME: clean context / variant brol if default is None: default = {} if self._context.get('variant'): # if we copy a variant or create one, we keep the same template default['product_tmpl_id'] = self.product_tmpl_id.id elif 'name' not in default: default['name'] = self.name return super(ProductProduct, self).copy(default=default) @api.model def search(self, args, offset=0, limit=None, order=None, count=False): # TDE FIXME: strange if self._context.get('search_default_categ_id'): args.append((('categ_id', 'child_of', self._context['search_default_categ_id']))) return super(ProductProduct, self).search(args, offset=offset, limit=limit, order=order, count=count) @api.multi def name_get(self): # TDE: this could be cleaned a bit I think def _name_get(d): name = d.get('name', '') code = self._context.get('display_default_code', True) and d.get( 'default_code', False) or False if code: name = '[%s] %s' % (code, name) return (d['id'], name) partner_id = self._context.get('partner_id') if partner_id: partner_ids = [ partner_id, self.env['res.partner'].browse( partner_id).commercial_partner_id.id ] else: partner_ids = [] # all user don't have access to seller and partner # check access and use superuser self.check_access_rights("read") self.check_access_rule("read") result = [] for product in self.sudo(): # display only the attributes with multiple possible values on the template variable_attributes = product.attribute_line_ids.filtered( lambda l: len(l.value_ids) > 1).mapped('attribute_id') variant = product.attribute_value_ids._variant_name( variable_attributes) name = variant and "%s (%s)" % (product.name, variant) or product.name sellers = [] if partner_ids: sellers = [ x for x in product.seller_ids if (x.name.id in partner_ids) and (x.product_id == product) ] if not sellers: sellers = [ x for x in product.seller_ids if (x.name.id in partner_ids) and not x.product_id ] if sellers: for s in sellers: seller_variant = s.product_name and ( variant and "%s (%s)" % (s.product_name, variant) or s.product_name) or False mydict = { 'id': product.id, 'name': seller_variant or name, 'default_code': s.product_code or product.default_code, } temp = _name_get(mydict) if temp not in result: result.append(temp) else: mydict = { 'id': product.id, 'name': name, 'default_code': product.default_code, } result.append(_name_get(mydict)) return result @api.model def name_search(self, name='', args=None, operator='ilike', limit=100): if not args: args = [] if name: positive_operators = ['=', 'ilike', '=ilike', 'like', '=like'] products = self.env['product.product'] if operator in positive_operators: products = self.search([('default_code', '=', name)] + args, limit=limit) if not products: products = self.search([('barcode', '=', name)] + args, limit=limit) if not products and operator not in expression.NEGATIVE_TERM_OPERATORS: # Do not merge the 2 next lines into one single search, SQL search performance would be abysmal # on a database with thousands of matching products, due to the huge merge+unique needed for the # OR operator (and given the fact that the 'name' lookup results come from the ir.translation table # Performing a quick memory merge of ids in Python will give much better performance products = self.search(args + [('default_code', operator, name)], limit=limit) if not limit or len(products) < limit: # we may underrun the limit because of dupes in the results, that's fine limit2 = (limit - len(products)) if limit else False products += self.search(args + [('name', operator, name), ('id', 'not in', products.ids)], limit=limit2) elif not products and operator in expression.NEGATIVE_TERM_OPERATORS: domain = expression.OR([ [ '&', ('default_code', operator, name), ('name', operator, name) ], [ '&', ('default_code', '=', False), ('name', operator, name) ], ]) domain = expression.AND([args, domain]) products = self.search(domain, limit=limit) if not products and operator in positive_operators: ptrn = re.compile('(\[(.*?)\])') res = ptrn.search(name) if res: products = self.search( [('default_code', '=', res.group(2))] + args, limit=limit) # still no results, partner in context: search on supplier info as last hope to find something if not products and self._context.get('partner_id'): suppliers = self.env['product.supplierinfo'].search([ ('name', '=', self._context.get('partner_id')), '|', ('product_code', operator, name), ('product_name', operator, name) ]) if suppliers: products = self.search( [('product_tmpl_id.seller_ids', 'in', suppliers.ids)], limit=limit) else: products = self.search(args, limit=limit) return products.name_get() @api.model def view_header_get(self, view_id, view_type): res = super(ProductProduct, self).view_header_get(view_id, view_type) if self._context.get('categ_id'): return _('Products: ') + self.env['product.category'].browse( self._context['categ_id']).name return res @api.multi def open_product_template(self): """ Utility method used to add an "Open Template" button in product views """ self.ensure_one() return { 'type': 'ir.actions.act_window', 'res_model': 'product.template', 'view_mode': 'form', 'res_id': self.product_tmpl_id.id, 'target': 'new' } @api.multi def _select_seller(self, partner_id=False, quantity=0.0, date=None, uom_id=False): self.ensure_one() if date is None: date = fields.Date.context_today(self) precision = self.env['decimal.precision'].precision_get( 'Product Unit of Measure') res = self.env['product.supplierinfo'] for seller in self.seller_ids: # Set quantity in UoM of seller quantity_uom_seller = quantity if quantity_uom_seller and uom_id and uom_id != seller.product_uom: quantity_uom_seller = uom_id._compute_quantity( quantity_uom_seller, seller.product_uom) if seller.date_start and seller.date_start > date: continue if seller.date_end and seller.date_end < date: continue if partner_id and seller.name not in [ partner_id, partner_id.parent_id ]: continue if float_compare(quantity_uom_seller, seller.min_qty, precision_digits=precision) == -1: continue if seller.product_id and seller.product_id != self: continue res |= seller break return res @api.multi def price_compute(self, price_type, uom=False, currency=False, company=False): # TDE FIXME: delegate to template or not ? fields are reencoded here ... # compatibility about context keys used a bit everywhere in the code if not uom and self._context.get('uom'): uom = self.env['product.uom'].browse(self._context['uom']) if not currency and self._context.get('currency'): currency = self.env['res.currency'].browse( self._context['currency']) products = self if price_type == 'standard_price': # standard_price field can only be seen by users in base.group_user # Thus, in order to compute the sale price from the cost for users not in this group # We fetch the standard price as the superuser products = self.with_context( force_company=company and company.id or self._context.get( 'force_company', self.env.user.company_id.id)).sudo() prices = dict.fromkeys(self.ids, 0.0) for product in products: prices[product.id] = product[price_type] or 0.0 if price_type == 'list_price': prices[product.id] += product.price_extra if uom: prices[product.id] = product.uom_id._compute_price( prices[product.id], uom) # Convert from current user company currency to asked one # This is right cause a field cannot be in more than one currency if currency: prices[product.id] = product.currency_id.compute( prices[product.id], currency) return prices # compatibility to remove after v10 - DEPRECATED @api.multi def price_get(self, ptype='list_price'): return self.price_compute(ptype) @api.multi def _set_standard_price(self, value): ''' Store the standard price change in order to be able to retrieve the cost of a product for a given date''' PriceHistory = self.env['product.price.history'] for product in self: PriceHistory.create({ 'product_id': product.id, 'cost': value, 'company_id': self._context.get('force_company', self.env.user.company_id.id), }) @api.multi def get_history_price(self, company_id, date=None): history = self.env['product.price.history'].search( [('company_id', '=', company_id), ('product_id', 'in', self.ids), ('datetime', '<=', date or fields.Datetime.now())], order='datetime desc,id desc', limit=1) return history.cost or 0.0
class Employee(models.Model): _name = "hr.employee" _description = "Employee" _order = 'name' _inherit = ['mail.thread', 'resource.mixin'] _mail_post_access = 'read' @api.model def _default_image(self): image_path = get_module_resource('hr', 'static/src/img', 'default_image.png') return tools.image_resize_image_big( base64.b64encode(open(image_path, 'rb').read())) # resource and user # required on the resource, make sure required="True" set in the view name = fields.Char(related='resource_id.name', store=True, oldname='name_related') user_id = fields.Many2one('res.users', 'User', related='resource_id.user_id') active = fields.Boolean('Active', related='resource_id.active', default=True, store=True) # private partner address_home_id = fields.Many2one( 'res.partner', 'Private Address', help= 'Enter here the private address of the employee, not the one linked to your company.', groups="hr.group_hr_user") is_address_home_a_company = fields.Boolean( 'The employee adress has a company linked', compute='_compute_is_address_home_a_company', ) country_id = fields.Many2one('res.country', 'Nationality (Country)', groups="hr.group_hr_user") gender = fields.Selection([('male', 'Male'), ('female', 'Female'), ('other', 'Other')], groups="hr.group_hr_user", default="male") marital = fields.Selection([('single', 'Single'), ('married', 'Married'), ('cohabitant', 'Legal Cohabitant'), ('widower', 'Widower'), ('divorced', 'Divorced')], string='Marital Status', groups="hr.group_hr_user", default='single') birthday = fields.Date('Date of Birth', groups="hr.group_hr_user") ssnid = fields.Char('SSN No', help='Social Security Number', groups="hr.group_hr_user") sinid = fields.Char('SIN No', help='Social Insurance Number', groups="hr.group_hr_user") identification_id = fields.Char(string='Identification No', groups="hr.group_hr_user") passport_id = fields.Char('Passport No', groups="hr.group_hr_user") bank_account_id = fields.Many2one( 'res.partner.bank', 'Bank Account Number', domain="[('partner_id', '=', address_home_id)]", groups="hr.group_hr_user", help='Employee bank salary account') permit_no = fields.Char('Work Permit No', groups="hr.group_hr_user") visa_no = fields.Char('Visa No', groups="hr.group_hr_user") visa_expire = fields.Date('Visa Expire Date', groups="hr.group_hr_user") # image: all image fields are base64 encoded and PIL-supported image = fields.Binary( "Photo", default=_default_image, attachment=True, help= "This field holds the image used as photo for the employee, limited to 1024x1024px." ) image_medium = fields.Binary( "Medium-sized photo", attachment=True, help="Medium-sized photo of the employee. It is automatically " "resized as a 128x128px image, with aspect ratio preserved. " "Use this field in form views or some kanban views.") image_small = fields.Binary( "Small-sized photo", attachment=True, help="Small-sized photo of the employee. It is automatically " "resized as a 64x64px image, with aspect ratio preserved. " "Use this field anywhere a small image is required.") # work address_id = fields.Many2one('res.partner', 'Work Address') work_phone = fields.Char('Work Phone') mobile_phone = fields.Char('Work Mobile') work_email = fields.Char('Work Email') work_location = fields.Char('Work Location') # employee in company job_id = fields.Many2one('hr.job', 'Job Position') department_id = fields.Many2one('hr.department', 'Department') parent_id = fields.Many2one('hr.employee', 'Manager') child_ids = fields.One2many('hr.employee', 'parent_id', string='Subordinates') coach_id = fields.Many2one('hr.employee', 'Coach') category_ids = fields.Many2many('hr.employee.category', 'employee_category_rel', 'emp_id', 'category_id', string='Tags') # misc notes = fields.Text('Notes') color = fields.Integer('Color Index', default=0) @api.constrains('parent_id') def _check_parent_id(self): for employee in self: if not employee._check_recursion(): raise ValidationError( _('Error! You cannot create recursive hierarchy of Employee(s).' )) @api.onchange('address_id') def _onchange_address(self): self.work_phone = self.address_id.phone self.mobile_phone = self.address_id.mobile @api.onchange('company_id') def _onchange_company(self): address = self.company_id.partner_id.address_get(['default']) self.address_id = address['default'] if address else False @api.onchange('department_id') def _onchange_department(self): self.parent_id = self.department_id.manager_id @api.onchange('user_id') def _onchange_user(self): if self.user_id: self.update(self._sync_user(self.user_id)) def _sync_user(self, user): return dict( name=user.name, image=user.image, work_email=user.email, ) @api.model def create(self, vals): if vals.get('user_id'): vals.update( self._sync_user(self.env['res.users'].browse(vals['user_id']))) tools.image_resize_images(vals) return super(Employee, self).create(vals) @api.multi def write(self, vals): if 'address_home_id' in vals: account_id = vals.get('bank_account_id') or self.bank_account_id.id if account_id: self.env['res.partner.bank'].browse( account_id).partner_id = vals['address_home_id'] tools.image_resize_images(vals) return super(Employee, self).write(vals) @api.multi def unlink(self): resources = self.mapped('resource_id') super(Employee, self).unlink() return resources.unlink() @api.multi def action_follow(self): """ Wrapper because message_subscribe_users take a user_ids=None that receive the context without the wrapper. """ return self.message_subscribe_users() @api.multi def action_unfollow(self): """ Wrapper because message_unsubscribe_users take a user_ids=None that receive the context without the wrapper. """ return self.message_unsubscribe_users() @api.model def _message_get_auto_subscribe_fields(self, updated_fields, auto_follow_fields=None): """ Overwrite of the original method to always follow user_id field, even when not track_visibility so that a user will follow it's employee """ if auto_follow_fields is None: auto_follow_fields = ['user_id'] user_field_lst = [] for name, field in self._fields.items(): if name in auto_follow_fields and name in updated_fields and field.comodel_name == 'res.users': user_field_lst.append(name) return user_field_lst @api.multi def _message_auto_subscribe_notify(self, partner_ids): # Do not notify user it has been marked as follower of its employee. return @api.depends('address_home_id.parent_id') def _compute_is_address_home_a_company(self): """Checks that choosen address (res.partner) is not linked to a company. """ for employee in self: try: employee.is_address_home_a_company = employee.address_home_id.parent_id.id is not False except AccessError: employee.is_address_home_a_company = False
class Website(models.Model): _name = "website" # Avoid website.website convention for conciseness (for new api). Got a special authorization from xmo and rco _description = "Website" def _active_languages(self): return self.env['res.lang'].search([]).ids def _default_language(self): lang_code = self.env['ir.default'].get('res.partner', 'lang') def_lang = self.env['res.lang'].search([('code', '=', lang_code)], limit=1) return def_lang.id if def_lang else self._active_languages()[0] name = fields.Char('Website Name') domain = fields.Char('Website Domain') company_id = fields.Many2one( 'res.company', string="Company", default=lambda self: self.env.ref('base.main_company').id) language_ids = fields.Many2many('res.lang', 'website_lang_rel', 'website_id', 'lang_id', 'Languages', default=_active_languages) default_lang_id = fields.Many2one('res.lang', string="Default Language", default=_default_language, required=True) default_lang_code = fields.Char(related='default_lang_id.code', string="Default language code", store=True) auto_redirect_lang = fields.Boolean( 'Autoredirect Language', default=True, help="Should users be redirected to their browser's language") social_twitter = fields.Char(related="company_id.social_twitter") social_facebook = fields.Char(related="company_id.social_facebook") social_github = fields.Char(related="company_id.social_github") social_linkedin = fields.Char(related="company_id.social_linkedin") social_youtube = fields.Char(related="company_id.social_youtube") social_googleplus = fields.Char(related="company_id.social_googleplus") google_analytics_key = fields.Char('Google Analytics Key') google_management_client_id = fields.Char('Google Client ID') google_management_client_secret = fields.Char('Google Client Secret') user_id = fields.Many2one( 'res.users', string='Public User', required=True, default=lambda self: self.env.ref('base.public_user').id) cdn_activated = fields.Boolean('Activate CDN for assets') cdn_url = fields.Char('CDN Base URL', default='') cdn_filters = fields.Text( 'CDN Filters', default=lambda s: '\n'.join(DEFAULT_CDN_FILTERS), help= "URL matching those filters will be rewritten using the CDN Base URL") partner_id = fields.Many2one(related='user_id.partner_id', relation='res.partner', string='Public Partner') menu_id = fields.Many2one('website.menu', compute='_compute_menu', string='Main Menu') homepage_id = fields.Many2one('website.page', string='Homepage') favicon = fields.Binary( string="Website Favicon", help= "This field holds the image used to display a favicon on the website.") @api.onchange('language_ids') def _onchange_language_ids(self): if self.language_ids and self.default_lang_id not in self.language_ids: self.default_lang_id = self.language_ids[0] @api.multi def _compute_menu(self): Menu = self.env['website.menu'] for website in self: website.menu_id = Menu.search([('parent_id', '=', False), ('website_id', '=', website.id)], order='id', limit=1).id # cf. Wizard hack in website_views.xml def noop(self, *args, **kwargs): pass @api.multi def write(self, values): self._get_languages.clear_cache(self) result = super(Website, self).write(values) if 'cdn_activated' in values or 'cdn_url' in values or 'cdn_filters' in values: # invalidate the caches from static node at compile time self.env['ir.qweb'].clear_caches() return result #---------------------------------------------------------- # Page Management #---------------------------------------------------------- @api.model def new_page(self, name=False, add_menu=False, template='website.default_page', ispage=True, namespace=None): """ Create a new website page, and assign it a xmlid based on the given one :param name : the name of the page :param template : potential xml_id of the page to create :param namespace : module part of the xml_id if none, the template module name is used """ if namespace: template_module = namespace else: template_module, _ = template.split('.') page_url = '/' + slugify(name, max_length=1024, path=True) page_url = self.get_unique_path(page_url) page_key = slugify(name) result = dict({'url': page_url, 'view_id': False}) if not name: name = 'Home' page_key = 'home' template_record = self.env.ref(template) website_id = self._context.get('website_id') key = self.get_unique_key(page_key, template_module) view = template_record.copy({'website_id': website_id, 'key': key}) view.with_context(lang=None).write({ 'arch': template_record.arch.replace(template, key), 'name': name, }) if view.arch_fs: view.arch_fs = False if ispage: page = self.env['website.page'].create({ 'url': page_url, 'website_ids': [(6, None, [self.get_current_website().id])], 'view_id': view.id }) result['view_id'] = view.id if add_menu: self.env['website.menu'].create({ 'name': name, 'url': page_url, 'parent_id': self.get_current_website().menu_id.id, 'page_id': page.id, 'website_id': self.get_current_website().id, }) return result @api.model def guess_mimetype(self): return _guess_mimetype() def get_unique_path(self, page_url): """ Given an url, return that url suffixed by counter if it already exists :param page_url : the url to be checked for uniqueness """ website_id = self.get_current_website().id inc = 0 domain_static = [ '|', ('website_ids', '=', False), ('website_ids', 'in', website_id) ] page_temp = page_url while self.env['website.page'].with_context( active_test=False).sudo().search([('url', '=', page_temp)] + domain_static): inc += 1 page_temp = page_url + (inc and "-%s" % inc or "") return page_temp def get_unique_key(self, string, template_module=False): """ Given a string, return an unique key including module prefix. It will be suffixed by a counter if it already exists to garantee uniqueness. :param string : the key to be checked for uniqueness, you can pass it with 'website.' or not :param template_module : the module to be prefixed on the key, if not set, we will use website """ website_id = self.get_current_website().id if template_module: string = template_module + '.' + string else: if not string.startswith('website.'): string = 'website.' + string #Look for unique key key_copy = string inc = 0 domain_static = [ '|', ('website_ids', '=', False), ('website_ids', 'in', website_id) ] while self.env['website.page'].with_context( active_test=False).sudo().search([('key', '=', key_copy)] + domain_static): inc += 1 key_copy = string + (inc and "-%s" % inc or "") return key_copy def key_to_view_id(self, view_id): return self.env['ir.ui.view'].search([ ('id', '=', view_id), '|', ('website_id', '=', self._context.get('website_id')), ('website_id', '=', False), ('type', '=', 'qweb') ]) @api.model def page_search_dependencies(self, page_id=False): """ Search dependencies just for information. It will not catch 100% of dependencies and False positive is more than possible Each module could add dependences in this dict :returns a dictionnary where key is the 'categorie' of object related to the given view, and the value is the list of text and link to the resource using given page """ dependencies = {} if not page_id: return dependencies page = self.env['website.page'].browse(int(page_id)) website_id = self._context.get('website_id') url = page.url # search for website_page with link website_page_search_dom = [ '|', ('website_ids', 'in', website_id), ('website_ids', '=', False), ('view_id.arch_db', 'ilike', url) ] pages = self.env['website.page'].search(website_page_search_dom) page_key = _('Page') if len(pages) > 1: page_key = _('Pages') page_view_ids = [] for page in pages: dependencies.setdefault(page_key, []) dependencies[page_key].append({ 'text': _('Page <b>%s</b> contains a link to this page') % page.url, 'item': page.name, 'link': page.url, }) page_view_ids.append(page.view_id.id) # search for ir_ui_view (not from a website_page) with link page_search_dom = [ '|', ('website_id', '=', website_id), ('website_id', '=', False), ('arch_db', 'ilike', url), ('id', 'not in', page_view_ids) ] views = self.env['ir.ui.view'].search(page_search_dom) view_key = _('Template') if len(views) > 1: view_key = _('Templates') for view in views: dependencies.setdefault(view_key, []) dependencies[view_key].append({ 'text': _('Template <b>%s (id:%s)</b> contains a link to this page') % (view.key or view.name, view.id), 'link': '/web#id=%s&view_type=form&model=ir.ui.view' % view.id, 'item': _('%s (id:%s)') % (view.key or view.name, view.id), }) # search for menu with link menu_search_dom = [ '|', ('website_id', '=', website_id), ('website_id', '=', False), ('url', 'ilike', '%s' % url) ] menus = self.env['website.menu'].search(menu_search_dom) menu_key = _('Menu') if len(menus) > 1: menu_key = _('Menus') for menu in menus: dependencies.setdefault(menu_key, []).append({ 'text': _('This page is in the menu <b>%s</b>') % menu.name, 'link': '/web#id=%s&view_type=form&model=website.menu' % menu.id, 'item': menu.name, }) return dependencies @api.model def page_search_key_dependencies(self, page_id=False): """ Search dependencies just for information. It will not catch 100% of dependencies and False positive is more than possible Each module could add dependences in this dict :returns a dictionnary where key is the 'categorie' of object related to the given view, and the value is the list of text and link to the resource using given page """ dependencies = {} if not page_id: return dependencies page = self.env['website.page'].browse(int(page_id)) website_id = self._context.get('website_id') key = page.key # search for website_page with link website_page_search_dom = [ '|', ('website_ids', 'in', website_id), ('website_ids', '=', False), ('view_id.arch_db', 'ilike', key), ('id', '!=', page.id), ] pages = self.env['website.page'].search(website_page_search_dom) page_key = _('Page') if len(pages) > 1: page_key = _('Pages') page_view_ids = [] for p in pages: dependencies.setdefault(page_key, []) dependencies[page_key].append({ 'text': _('Page <b>%s</b> is calling this file') % p.url, 'item': p.name, 'link': p.url, }) page_view_ids.append(p.view_id.id) # search for ir_ui_view (not from a website_page) with link page_search_dom = [ '|', ('website_id', '=', website_id), ('website_id', '=', False), ('arch_db', 'ilike', key), ('id', 'not in', page_view_ids), ('id', '!=', page.view_id.id), ] views = self.env['ir.ui.view'].search(page_search_dom) view_key = _('Template') if len(views) > 1: view_key = _('Templates') for view in views: dependencies.setdefault(view_key, []) dependencies[view_key].append({ 'text': _('Template <b>%s (id:%s)</b> is calling this file') % (view.key or view.name, view.id), 'item': _('%s (id:%s)') % (view.key or view.name, view.id), 'link': '/web#id=%s&view_type=form&model=ir.ui.view' % view.id, }) return dependencies @api.model def page_exists(self, name, module='website'): try: name = (name or "").replace("/website.", "").replace("/", "") if not name: return False return self.env.ref('%s.%s' % module, name) except Exception: return False #---------------------------------------------------------- # Languages #---------------------------------------------------------- @api.multi def get_languages(self): self.ensure_one() return self._get_languages() @tools.cache('self.id') def _get_languages(self): return [(lg.code, lg.name) for lg in self.language_ids] @api.multi def get_alternate_languages(self, req=None): langs = [] if req is None: req = request.httprequest default = self.get_current_website().default_lang_code shorts = [] def get_url_localized(router, lang): arguments = dict(request.endpoint_arguments) for key, val in list(arguments.items()): if isinstance(val, models.BaseModel): arguments[key] = val.with_context(lang=lang) return router.build(request.endpoint, arguments) router = request.httprequest.app.get_db_router(request.db).bind('') for code, dummy in self.get_languages(): lg_path = ('/' + code) if code != default else '' lg_codes = code.split('_') shorts.append(lg_codes[0]) uri = get_url_localized( router, code) if request.endpoint else request.httprequest.path if req.query_string: uri += u'?' + req.query_string.decode('utf-8') lang = { 'hreflang': ('-'.join(lg_codes)).lower(), 'short': lg_codes[0], 'href': req.url_root[0:-1] + lg_path + uri, } langs.append(lang) for lang in langs: if shorts.count(lang['short']) == 1: lang['hreflang'] = lang['short'] return langs #---------------------------------------------------------- # Utilities #---------------------------------------------------------- @api.model def get_current_website(self): domain_name = request and request.httprequest.environ.get( 'HTTP_HOST', '').split(':')[0] or None website_id = self._get_current_website_id(domain_name) if request: request.context = dict(request.context, website_id=website_id) return self.browse(website_id) @tools.cache('domain_name') def _get_current_website_id(self, domain_name): """ Reminder : cached method should be return record, since they will use a closed cursor. """ website = self.search([('domain', '=', domain_name)], limit=1) if not website: website = self.search([], limit=1) return website.id @api.model def is_publisher(self): return self.env['ir.model.access'].check('ir.ui.view', 'write', False) @api.model def is_user(self): return self.env['ir.model.access'].check('ir.ui.menu', 'read', False) @api.model def is_public_user(self): return request.env.user.id == request.website.user_id.id @api.model def get_template(self, template): View = self.env['ir.ui.view'] if isinstance(template, pycompat.integer_types): view_id = template else: if '.' not in template: template = 'website.%s' % template view_id = View.get_view_id(template) if not view_id: raise NotFound return View.browse(view_id) @api.model def pager(self, url, total, page=1, step=30, scope=5, url_args=None): return pager(url, total, page=page, step=step, scope=scope, url_args=url_args) def rule_is_enumerable(self, rule): """ Checks that it is possible to generate sensible GET queries for a given rule (if the endpoint matches its own requirements) :type rule: werkzeug.routing.Rule :rtype: bool """ endpoint = rule.endpoint methods = endpoint.routing.get('methods') or ['GET'] converters = list(rule._converters.values()) if not ('GET' in methods and endpoint.routing['type'] == 'http' and endpoint.routing['auth'] in ('none', 'public') and endpoint.routing.get('website', False) and all( hasattr(converter, 'generate') for converter in converters) and endpoint.routing.get('website')): return False # dont't list routes without argument having no default value or converter spec = inspect.getargspec(endpoint.method.original_func) # remove self and arguments having a default value defaults_count = len(spec.defaults or []) args = spec.args[1:(-defaults_count or None)] # check that all args have a converter return all((arg in rule._converters) for arg in args) @api.multi def enumerate_pages(self, query_string=None, force=False): """ Available pages in the website/CMS. This is mostly used for links generation and can be overridden by modules setting up new HTML controllers for dynamic pages (e.g. blog). By default, returns template views marked as pages. :param str query_string: a (user-provided) string, fetches pages matching the string :returns: a list of mappings with two keys: ``name`` is the displayable name of the resource (page), ``url`` is the absolute URL of the same. :rtype: list({name: str, url: str}) """ router = request.httprequest.app.get_db_router(request.db) # Force enumeration to be performed as public user url_set = set() sitemap_endpoint_done = set() for rule in router.iter_rules(): if 'sitemap' in rule.endpoint.routing: if rule.endpoint in sitemap_endpoint_done: continue sitemap_endpoint_done.add(rule.endpoint) func = rule.endpoint.routing['sitemap'] if func is False: continue for loc in func(self.env, rule, query_string): yield loc continue if not self.rule_is_enumerable(rule): continue converters = rule._converters or {} if query_string and not converters and ( query_string not in rule.build([{}], append_unknown=False)[1]): continue values = [{}] # converters with a domain are processed after the other ones convitems = sorted( converters.items(), key=lambda x: (hasattr(x[1], 'domain') and (x[1].domain != '[]'), rule._trace.index((True, x[0])))) for (i, (name, converter)) in enumerate(convitems): newval = [] for val in values: query = i == len(convitems) - 1 and query_string if query: r = "".join([ x[1] for x in rule._trace[1:] if not x[0] ]) # remove model converter from route query = sitemap_qs2dom( query, r, self.env[converter.model]._rec_name) if query == FALSE_DOMAIN: continue for value_dict in converter.generate(uid=self.env.uid, dom=query, args=val): newval.append(val.copy()) value_dict[name] = value_dict['loc'] del value_dict['loc'] newval[-1].update(value_dict) values = newval for value in values: domain_part, url = rule.build(value, append_unknown=False) if not query_string or query_string.lower() in url.lower(): page = {'loc': url} for key, val in value.items(): if key.startswith('__'): page[key[2:]] = val if url in ('/sitemap.xml', ): continue if url in url_set: continue url_set.add(url) yield page # '/' already has a http.route & is in the routing_map so it will already have an entry in the xml domain = [('url', '!=', '/')] if not force: domain += [('website_indexed', '=', True)] #is_visible domain += [('website_published', '=', True), '|', ('date_publish', '=', False), ('date_publish', '<=', fields.Datetime.now())] if query_string: domain += [('url', 'like', query_string)] pages = self.get_website_pages(domain) for page in pages: record = { 'loc': page['url'], 'id': page['id'], 'name': page['name'] } if page.view_id and page.view_id.priority != 16: record['__priority'] = min( round(page.view_id.priority / 32.0, 1), 1) if page['write_date']: record['__lastmod'] = page['write_date'][:10] yield record @api.multi def get_website_pages(self, domain=[], order='name', limit=None): domain += [ '|', ('website_ids', 'in', self.get_current_website().id), ('website_ids', '=', False) ] pages = request.env['website.page'].search(domain, order='name', limit=limit) return pages @api.multi def search_pages(self, needle=None, limit=None): name = slugify(needle, max_length=50, path=True) res = [] for page in self.enumerate_pages(query_string=name, force=True): res.append(page) if len(res) == limit: break return res @api.model def image_url(self, record, field, size=None): """ Returns a local url that points to the image field of a given browse record. """ sudo_record = record.sudo() sha = hashlib.sha1( getattr(sudo_record, '__last_update').encode('utf-8')).hexdigest()[0:7] size = '' if size is None else '/%s' % size return '/web/image/%s/%s/%s%s?unique=%s' % (record._name, record.id, field, size, sha) def get_cdn_url(self, uri): self.ensure_one() if not uri: return '' cdn_url = self.cdn_url cdn_filters = (self.cdn_filters or '').splitlines() for flt in cdn_filters: if flt and re.match(flt, uri): return urls.url_join(cdn_url, uri) return uri @api.model def action_dashboard_redirect(self): if self.env.user.has_group( 'base.group_system') or self.env.user.has_group( 'website.group_website_designer'): return self.env.ref('website.backend_dashboard').read()[0] return self.env.ref('website.action_website').read()[0]
class Company(models.Model): _name = "res.company" _description = 'Companies' _order = 'sequence, name' @api.multi def copy(self, default=None): raise UserError( _('Duplicating a company is not allowed. Please create a new company instead.' )) def _get_logo(self): return base64.b64encode( open( os.path.join(tools.config['root_path'], 'addons', 'base', 'res', 'res_company_logo.png'), 'rb').read()) @api.model def _get_euro(self): return self.env['res.currency.rate'].search([('rate', '=', 1)], limit=1).currency_id @api.model def _get_user_currency(self): currency_id = self.env['res.users'].browse( self._uid).company_id.currency_id return currency_id or self._get_euro() name = fields.Char(related='partner_id.name', string='Company Name', required=True, store=True) sequence = fields.Integer( help='Used to order Companies in the company switcher', default=10) parent_id = fields.Many2one('res.company', string='Parent Company', index=True) child_ids = fields.One2many('res.company', 'parent_id', string='Child Companies') partner_id = fields.Many2one('res.partner', string='Partner', required=True) report_header = fields.Text( string='Company Tagline', help= "Appears by default on the top right corner of your printed documents (report header)." ) report_footer = fields.Text( string='Report Footer', translate=True, help="Footer text displayed at the bottom of all reports.") logo = fields.Binary(related='partner_id.image', default=_get_logo, string="Company Logo") # logo_web: do not store in attachments, since the image is retrieved in SQL for # performance reasons (see addons/web/controllers/main.py, Binary.company_logo) logo_web = fields.Binary(compute='_compute_logo_web', store=True) currency_id = fields.Many2one( 'res.currency', string='Currency', required=True, default=lambda self: self._get_user_currency()) user_ids = fields.Many2many('res.users', 'res_company_users_rel', 'cid', 'user_id', string='Accepted Users') account_no = fields.Char(string='Account No.') street = fields.Char(compute='_compute_address', inverse='_inverse_street') street2 = fields.Char(compute='_compute_address', inverse='_inverse_street2') zip = fields.Char(compute='_compute_address', inverse='_inverse_zip') city = fields.Char(compute='_compute_address', inverse='_inverse_city') state_id = fields.Many2one('res.country.state', compute='_compute_address', inverse='_inverse_state', string="Fed. State") bank_ids = fields.One2many('res.partner.bank', 'company_id', string='Bank Accounts', help='Bank accounts related to this company') country_id = fields.Many2one('res.country', compute='_compute_address', inverse='_inverse_country', string="Country") email = fields.Char(related='partner_id.email', store=True) phone = fields.Char(related='partner_id.phone', store=True) website = fields.Char(related='partner_id.website') vat = fields.Char(related='partner_id.vat', string="TIN") company_registry = fields.Char() paperformat_id = fields.Many2one( 'report.paperformat', 'Paper format', default=lambda self: self.env.ref('base.paperformat_euro', raise_if_not_found=False)) external_report_layout = fields.Selection([ ('background', 'Background'), ('boxed', 'Boxed'), ('clean', 'Clean'), ('standard', 'Standard'), ], string='Document Template') _sql_constraints = [('name_uniq', 'unique (name)', 'The company name must be unique !')] @api.model_cr def init(self): for company in self.search([('paperformat_id', '=', False)]): paperformat_euro = self.env.ref('base.paperformat_euro', False) if paperformat_euro: company.write({'paperformat_id': paperformat_euro.id}) sup = super(Company, self) if hasattr(sup, 'init'): sup.init() def _get_company_address_fields(self, partner): return { 'street': partner.street, 'street2': partner.street2, 'city': partner.city, 'zip': partner.zip, 'state_id': partner.state_id, 'country_id': partner.country_id, } # TODO @api.depends(): currently now way to formulate the dependency on the # partner's contact address def _compute_address(self): for company in self.filtered(lambda company: company.partner_id): address_data = company.partner_id.sudo().address_get( adr_pref=['contact']) if address_data['contact']: partner = company.partner_id.browse( address_data['contact']).sudo() company.update(company._get_company_address_fields(partner)) def _inverse_street(self): for company in self: company.partner_id.street = company.street def _inverse_street2(self): for company in self: company.partner_id.street2 = company.street2 def _inverse_zip(self): for company in self: company.partner_id.zip = company.zip def _inverse_city(self): for company in self: company.partner_id.city = company.city def _inverse_state(self): for company in self: company.partner_id.state_id = company.state_id def _inverse_country(self): for company in self: company.partner_id.country_id = company.country_id @api.depends('partner_id', 'partner_id.image') def _compute_logo_web(self): for company in self: company.logo_web = tools.image_resize_image( company.partner_id.image, (180, None)) @api.onchange('state_id') def _onchange_state(self): self.country_id = self.state_id.country_id @api.multi def on_change_country(self, country_id): # This function is called from account/models/chart_template.py, hence decorated with `multi`. self.ensure_one() currency_id = self._get_user_currency() if country_id: currency_id = self.env['res.country'].browse( country_id).currency_id return {'value': {'currency_id': currency_id.id}} @api.onchange('country_id') def _onchange_country_id_wrapper(self): res = {'domain': {'state_id': []}} if self.country_id: res['domain']['state_id'] = [('country_id', '=', self.country_id.id)] values = self.on_change_country(self.country_id.id)['value'] for fname, value in values.items(): setattr(self, fname, value) return res @api.model def name_search(self, name='', args=None, operator='ilike', limit=100): context = dict(self.env.context) newself = self if context.pop('user_preference', None): # We browse as superuser. Otherwise, the user would be able to # select only the currently visible companies (according to rules, # which are probably to allow to see the child companies) even if # she belongs to some other companies. companies = self.env.user.company_id + self.env.user.company_ids args = (args or []) + [('id', 'in', companies.ids)] newself = newself.sudo() return super(Company, newself.with_context(context)).name_search( name=name, args=args, operator=operator, limit=limit) @api.model @api.returns('self', lambda value: value.id) def _company_default_get(self, object=False, field=False): """ Returns the default company (usually the user's company). The 'object' and 'field' arguments are ignored but left here for backward compatibility and potential override. """ return self.env['res.users']._get_company() @api.model @tools.ormcache('self.env.uid', 'company') def _get_company_children(self, company=None): if not company: return [] return self.search([('parent_id', 'child_of', [company])]).ids @api.multi def _get_partner_hierarchy(self): self.ensure_one() parent = self.parent_id if parent: return parent._get_partner_hierarchy() else: return self._get_partner_descendance([]) @api.multi def _get_partner_descendance(self, descendance): self.ensure_one() descendance.append(self.partner_id.id) for child_id in self._get_company_children(self.id): if child_id != self.id: descendance = self.browse(child_id)._get_partner_descendance( descendance) return descendance # deprecated, use clear_caches() instead def cache_restart(self): self.clear_caches() @api.model def create(self, vals): if not vals.get('name') or vals.get('partner_id'): self.clear_caches() return super(Company, self).create(vals) partner = self.env['res.partner'].create({ 'name': vals['name'], 'is_company': True, 'image': vals.get('logo'), 'customer': False, 'email': vals.get('email'), 'phone': vals.get('phone'), 'website': vals.get('website'), 'vat': vals.get('vat'), }) vals['partner_id'] = partner.id self.clear_caches() company = super(Company, self).create(vals) partner.write({'company_id': company.id}) return company @api.multi def write(self, values): self.clear_caches() return super(Company, self).write(values) @api.constrains('parent_id') def _check_parent_id(self): if not self._check_recursion(): raise ValidationError( _('Error ! You cannot create recursive companies.')) @api.multi def open_company_edit_report(self): self.ensure_one() return self.env['res.config.settings'].open_company() @api.multi def write_company_and_print_report(self, values): res = self.write(values) report_name = values.get('default_report_name') active_ids = values.get('active_ids') active_model = values.get('active_model') if report_name and active_ids and active_model: docids = self.env[active_model].browse(active_ids) return (self.env['ir.actions.report'].search( [('report_name', '=', report_name)], limit=1).with_context(values).report_action(docids)) else: return res
class ProductTemplate(models.Model): _name = "product.template" _inherit = ['mail.thread', 'mail.activity.mixin'] _description = "Product Template" _order = "name" def _get_default_category_id(self): if self._context.get('categ_id') or self._context.get( 'default_categ_id'): return self._context.get('categ_id') or self._context.get( 'default_categ_id') category = self.env.ref('product.product_category_all', raise_if_not_found=False) if not category: category = self.env['product.category'].search([], limit=1) if category: return category.id else: err_msg = _( 'You must define at least one product category in order to be able to create products.' ) redir_msg = _('Go to Internal Categories') raise RedirectWarning( err_msg, self.env.ref('product.product_category_action_form').id, redir_msg) def _get_default_uom_id(self): return self.env["product.uom"].search([], limit=1, order='id').id name = fields.Char('Name', index=True, required=True, translate=True) sequence = fields.Integer( 'Sequence', default=1, help='Gives the sequence order when displaying a product list') description = fields.Text( 'Description', translate=True, help= "A precise description of the Product, used only for internal information purposes." ) description_purchase = fields.Text( 'Purchase Description', translate=True, help= "A description of the Product that you want to communicate to your vendors. " "This description will be copied to every Purchase Order, Receipt and Vendor Bill/Credit Note." ) description_sale = fields.Text( 'Sale Description', translate=True, help= "A description of the Product that you want to communicate to your customers. " "This description will be copied to every Sales Order, Delivery Order and Customer Invoice/Credit Note" ) type = fields.Selection( [('consu', 'Consumable'), ('service', 'Service')], string='Product Type', default='consu', required=True, help= 'A stockable product is a product for which you manage stock. The "Inventory" app has to be installed.\n' 'A consumable product, on the other hand, is a product for which stock is not managed.\n' 'A service is a non-material product you provide.\n' 'A digital content is a non-material product you sell online. The files attached to the products are the one that are sold on ' 'the e-commerce such as e-books, music, pictures,... The "Digital Product" module has to be installed.' ) rental = fields.Boolean('Can be Rent') categ_id = fields.Many2one('product.category', 'Internal Category', change_default=True, default=_get_default_category_id, required=True, help="Select category for the current product") currency_id = fields.Many2one('res.currency', 'Currency', compute='_compute_currency_id') # price fields price = fields.Float('Price', compute='_compute_template_price', inverse='_set_template_price', digits=dp.get_precision('Product Price')) list_price = fields.Float( 'Sales Price', default=1.0, digits=dp.get_precision('Product Price'), help= "Base price to compute the customer price. Sometimes called the catalog price." ) lst_price = fields.Float('Public Price', related='list_price', digits=dp.get_precision('Product Price')) standard_price = fields.Float( 'Cost', compute='_compute_standard_price', inverse='_set_standard_price', search='_search_standard_price', digits=dp.get_precision('Product Price'), groups="base.group_user", help= "Cost used for stock valuation in standard price and as a first price to set in average/fifo. " "Also used as a base price for pricelists. " "Expressed in the default unit of measure of the product. ") volume = fields.Float('Volume', compute='_compute_volume', inverse='_set_volume', help="The volume in m3.", store=True) weight = fields.Float( 'Weight', compute='_compute_weight', digits=dp.get_precision('Stock Weight'), inverse='_set_weight', store=True, help= "The weight of the contents in Kg, not including any packaging, etc.") sale_ok = fields.Boolean( 'Can be Sold', default=True, help="Specify if the product can be selected in a sales order line.") purchase_ok = fields.Boolean('Can be Purchased', default=True) pricelist_id = fields.Many2one( 'product.pricelist', 'Pricelist', store=False, help= 'Technical field. Used for searching on pricelists, not stored in database.' ) uom_id = fields.Many2one( 'product.uom', 'Unit of Measure', default=_get_default_uom_id, required=True, help="Default Unit of Measure used for all stock operation.") uom_po_id = fields.Many2one( 'product.uom', 'Purchase Unit of Measure', default=_get_default_uom_id, required=True, help= "Default Unit of Measure used for purchase orders. It must be in the same category than the default unit of measure." ) company_id = fields.Many2one('res.company', 'Company', default=lambda self: self.env['res.company']. _company_default_get('product.template'), index=1) packaging_ids = fields.One2many( 'product.packaging', string="Product Packages", compute="_compute_packaging_ids", inverse="_set_packaging_ids", help="Gives the different ways to package the same product.") seller_ids = fields.One2many('product.supplierinfo', 'product_tmpl_id', 'Vendors') variant_seller_ids = fields.One2many('product.supplierinfo', 'product_tmpl_id') active = fields.Boolean( 'Active', default=True, help= "If unchecked, it will allow you to hide the product without removing it." ) color = fields.Integer('Color Index') is_product_variant = fields.Boolean(string='Is a product variant', compute='_compute_is_product_variant') attribute_line_ids = fields.One2many('product.attribute.line', 'product_tmpl_id', 'Product Attributes') product_variant_ids = fields.One2many('product.product', 'product_tmpl_id', 'Products', required=True) # performance: product_variant_id provides prefetching on the first product variant only product_variant_id = fields.Many2one('product.product', 'Product', compute='_compute_product_variant_id') product_variant_count = fields.Integer( '# Product Variants', compute='_compute_product_variant_count') # related to display product product information if is_product_variant barcode = fields.Char('Barcode', oldname='ean13', related='product_variant_ids.barcode') default_code = fields.Char('Internal Reference', compute='_compute_default_code', inverse='_set_default_code', store=True) item_ids = fields.One2many('product.pricelist.item', 'product_tmpl_id', 'Pricelist Items') # image: all image fields are base64 encoded and PIL-supported image = fields.Binary( "Image", attachment=True, help= "This field holds the image used as image for the product, limited to 1024x1024px." ) image_medium = fields.Binary( "Medium-sized image", attachment=True, help="Medium-sized image of the product. It is automatically " "resized as a 128x128px image, with aspect ratio preserved, " "only when the image exceeds one of those sizes. Use this field in form views or some kanban views." ) image_small = fields.Binary( "Small-sized image", attachment=True, help="Small-sized image of the product. It is automatically " "resized as a 64x64px image, with aspect ratio preserved. " "Use this field anywhere a small image is required.") @api.depends('product_variant_ids') def _compute_product_variant_id(self): for p in self: p.product_variant_id = p.product_variant_ids[:1].id @api.multi def _compute_currency_id(self): try: main_company = self.sudo().env.ref('base.main_company') except ValueError: main_company = self.env['res.company'].sudo().search([], limit=1, order="id") for template in self: template.currency_id = template.company_id.sudo( ).currency_id.id or main_company.currency_id.id @api.multi def _compute_template_price(self): prices = {} pricelist_id_or_name = self._context.get('pricelist') if pricelist_id_or_name: pricelist = None partner = self._context.get('partner') quantity = self._context.get('quantity', 1.0) # Support context pricelists specified as display_name or ID for compatibility if isinstance(pricelist_id_or_name, pycompat.string_types): pricelist_data = self.env['product.pricelist'].name_search( pricelist_id_or_name, operator='=', limit=1) if pricelist_data: pricelist = self.env['product.pricelist'].browse( pricelist_data[0][0]) elif isinstance(pricelist_id_or_name, pycompat.integer_types): pricelist = self.env['product.pricelist'].browse( pricelist_id_or_name) if pricelist: quantities = [quantity] * len(self) partners = [partner] * len(self) prices = pricelist.get_products_price(self, quantities, partners) for template in self: template.price = prices.get(template.id, 0.0) @api.multi def _set_template_price(self): if self._context.get('uom'): for template in self: value = self.env['product.uom'].browse( self._context['uom'])._compute_price( template.price, template.uom_id) template.write({'list_price': value}) else: self.write({'list_price': self.price}) @api.depends('product_variant_ids', 'product_variant_ids.standard_price') def _compute_standard_price(self): unique_variants = self.filtered( lambda template: len(template.product_variant_ids) == 1) for template in unique_variants: template.standard_price = template.product_variant_ids.standard_price for template in (self - unique_variants): template.standard_price = 0.0 @api.one def _set_standard_price(self): if len(self.product_variant_ids) == 1: self.product_variant_ids.standard_price = self.standard_price def _search_standard_price(self, operator, value): products = self.env['product.product'].search( [('standard_price', operator, value)], limit=None) return [('id', 'in', products.mapped('product_tmpl_id').ids)] @api.depends('product_variant_ids', 'product_variant_ids.volume') def _compute_volume(self): unique_variants = self.filtered( lambda template: len(template.product_variant_ids) == 1) for template in unique_variants: template.volume = template.product_variant_ids.volume for template in (self - unique_variants): template.volume = 0.0 @api.one def _set_volume(self): if len(self.product_variant_ids) == 1: self.product_variant_ids.volume = self.volume @api.depends('product_variant_ids', 'product_variant_ids.weight') def _compute_weight(self): unique_variants = self.filtered( lambda template: len(template.product_variant_ids) == 1) for template in unique_variants: template.weight = template.product_variant_ids.weight for template in (self - unique_variants): template.weight = 0.0 def _compute_is_product_variant(self): for template in self: template.is_product_variant = False @api.one def _set_weight(self): if len(self.product_variant_ids) == 1: self.product_variant_ids.weight = self.weight @api.one @api.depends('product_variant_ids.product_tmpl_id') def _compute_product_variant_count(self): # do not pollute variants to be prefetched when counting variants self.product_variant_count = len( self.with_prefetch().product_variant_ids) @api.depends('product_variant_ids', 'product_variant_ids.default_code') def _compute_default_code(self): unique_variants = self.filtered( lambda template: len(template.product_variant_ids) == 1) for template in unique_variants: template.default_code = template.product_variant_ids.default_code for template in (self - unique_variants): template.default_code = '' @api.one def _set_default_code(self): if len(self.product_variant_ids) == 1: self.product_variant_ids.default_code = self.default_code @api.depends('product_variant_ids', 'product_variant_ids.packaging_ids') def _compute_packaging_ids(self): for p in self: if len(p.product_variant_ids) == 1: p.packaging_ids = p.product_variant_ids.packaging_ids def _set_packaging_ids(self): for p in self: if len(p.product_variant_ids) == 1: p.product_variant_ids.packaging_ids = p.packaging_ids @api.constrains('uom_id', 'uom_po_id') def _check_uom(self): if any(template.uom_id and template.uom_po_id and template.uom_id.category_id != template.uom_po_id.category_id for template in self): raise ValidationError( _('Error: The default Unit of Measure and the purchase Unit of Measure must be in the same category.' )) return True @api.onchange('uom_id') def _onchange_uom_id(self): if self.uom_id: self.uom_po_id = self.uom_id.id @api.model def create(self, vals): ''' Store the initial standard price in order to be able to retrieve the cost of a product template for a given date''' # TDE FIXME: context brol tools.image_resize_images(vals) template = super(ProductTemplate, self).create(vals) if "create_product_product" not in self._context: template.with_context(create_from_tmpl=True).create_variant_ids() # This is needed to set given values to first variant after creation related_vals = {} if vals.get('barcode'): related_vals['barcode'] = vals['barcode'] if vals.get('default_code'): related_vals['default_code'] = vals['default_code'] if vals.get('standard_price'): related_vals['standard_price'] = vals['standard_price'] if vals.get('volume'): related_vals['volume'] = vals['volume'] if vals.get('weight'): related_vals['weight'] = vals['weight'] if related_vals: template.write(related_vals) return template @api.multi def write(self, vals): tools.image_resize_images(vals) res = super(ProductTemplate, self).write(vals) if 'attribute_line_ids' in vals or vals.get('active'): self.create_variant_ids() if 'active' in vals and not vals.get('active'): self.with_context( active_test=False).mapped('product_variant_ids').write( {'active': vals.get('active')}) return res @api.multi def copy(self, default=None): # TDE FIXME: should probably be copy_data self.ensure_one() if default is None: default = {} if 'name' not in default: default['name'] = _("%s (copy)") % self.name return super(ProductTemplate, self).copy(default=default) @api.multi def name_get(self): return [(template.id, '%s%s' % (template.default_code and '[%s] ' % template.default_code or '', template.name)) for template in self] @api.model def name_search(self, name='', args=None, operator='ilike', limit=100): # Only use the product.product heuristics if there is a search term and the domain # does not specify a match on `product.template` IDs. if not name or any(term[0] == 'id' for term in (args or [])): return super(ProductTemplate, self).name_search(name=name, args=args, operator=operator, limit=limit) Product = self.env['product.product'] templates = self.browse([]) while True: domain = templates and [ ('product_tmpl_id', 'not in', templates.ids) ] or [] args = args if args is not None else [] products_ns = Product.name_search(name, args + domain, operator=operator) products = Product.browse([x[0] for x in products_ns]) templates |= products.mapped('product_tmpl_id') if (not products) or (limit and (len(templates) > limit)): break # re-apply product.template order + name_get return super(ProductTemplate, self).name_search('', args=[('id', 'in', list(set(templates.ids)))], operator='ilike', limit=limit) @api.multi def price_compute(self, price_type, uom=False, currency=False, company=False): # TDE FIXME: delegate to template or not ? fields are reencoded here ... # compatibility about context keys used a bit everywhere in the code if not uom and self._context.get('uom'): uom = self.env['product.uom'].browse(self._context['uom']) if not currency and self._context.get('currency'): currency = self.env['res.currency'].browse( self._context['currency']) templates = self if price_type == 'standard_price': # standard_price field can only be seen by users in base.group_user # Thus, in order to compute the sale price from the cost for users not in this group # We fetch the standard price as the superuser templates = self.with_context( force_company=company and company.id or self._context.get( 'force_company', self.env.user.company_id.id)).sudo() prices = dict.fromkeys(self.ids, 0.0) for template in templates: prices[template.id] = template[price_type] or 0.0 if uom: prices[template.id] = template.uom_id._compute_price( prices[template.id], uom) # Convert from current user company currency to asked one # This is right cause a field cannot be in more than one currency if currency: prices[template.id] = template.currency_id.compute( prices[template.id], currency) return prices # compatibility to remove after v10 - DEPRECATED @api.model def _price_get(self, products, ptype='list_price'): return products.price_compute(ptype) @api.multi def create_variant_ids(self): Product = self.env["product.product"] AttributeValues = self.env['product.attribute.value'] for tmpl_id in self.with_context(active_test=False): # adding an attribute with only one value should not recreate product # write this attribute on every product to make sure we don't lose them variant_alone = tmpl_id.attribute_line_ids.filtered( lambda line: line.attribute_id.create_variant and len( line.value_ids) == 1).mapped('value_ids') for value_id in variant_alone: updated_products = tmpl_id.product_variant_ids.filtered( lambda product: value_id.attribute_id not in product. mapped('attribute_value_ids.attribute_id')) updated_products.write( {'attribute_value_ids': [(4, value_id.id)]}) # iterator of n-uple of product.attribute.value *ids* variant_matrix = [ AttributeValues.browse(value_ids) for value_ids in itertools.product( *(line.value_ids.ids for line in tmpl_id.attribute_line_ids if line.value_ids[:1].attribute_id.create_variant)) ] # get the value (id) sets of existing variants existing_variants = { frozenset( variant.attribute_value_ids.filtered( lambda r: r.attribute_id.create_variant).ids) for variant in tmpl_id.product_variant_ids } # -> for each value set, create a recordset of values to create a # variant for if the value set isn't already a variant to_create_variants = [ value_ids for value_ids in variant_matrix if set(value_ids.ids) not in existing_variants ] # check product variants_to_activate = self.env['product.product'] variants_to_unlink = self.env['product.product'] for product_id in tmpl_id.product_variant_ids: if not product_id.active and product_id.attribute_value_ids.filtered( lambda r: r.attribute_id.create_variant ) in variant_matrix: variants_to_activate |= product_id elif product_id.attribute_value_ids.filtered( lambda r: r.attribute_id.create_variant ) not in variant_matrix: variants_to_unlink |= product_id if variants_to_activate: variants_to_activate.write({'active': True}) # create new product for variant_ids in to_create_variants: new_variant = Product.create({ 'product_tmpl_id': tmpl_id.id, 'attribute_value_ids': [(6, 0, variant_ids.ids)] }) # unlink or inactive product for variant in variants_to_unlink: try: with self._cr.savepoint(), tools.mute_logger( 'gecoerp.sql_db'): variant.unlink() # We catch all kind of exception to be sure that the operation doesn't fail. except (psycopg2.Error, except_orm): variant.write({'active': False}) pass return True
class AccountBankStatementImport(models.TransientModel): _name = 'account.bank.statement.import' _description = 'Import Bank Statement' data_file = fields.Binary( string='Bank Statement File', required=True, help= 'Get you bank statements in electronic format from your bank and select them here.' ) filename = fields.Char() @api.multi def import_file(self): """ Process the file chosen in the wizard, create bank statement(s) and go to reconciliation. """ self.ensure_one() # Let the appropriate implementation module parse the file and return the required data # The active_id is passed in context in case an implementation module requires information about the wizard state (see QIF) currency_code, account_number, stmts_vals = self.with_context( active_id=self.ids[0])._parse_file(base64.b64decode( self.data_file)) # Check raw data self._check_parsed_data(stmts_vals) # Try to find the currency and journal in gecoerp currency, journal = self._find_additional_data(currency_code, account_number) # If no journal found, ask the user about creating one if not journal: # The active_id is passed in context so the wizard can call import_file again once the journal is created return self.with_context( active_id=self.ids[0])._journal_creation_wizard( currency, account_number) if not journal.default_debit_account_id or not journal.default_credit_account_id: raise UserError( _('You have to set a Default Debit Account and a Default Credit Account for the journal: %s' ) % (journal.name, )) # Prepare statement data to be used for bank statements creation stmts_vals = self._complete_stmts_vals(stmts_vals, journal, account_number) # Create the bank statements statement_ids, notifications = self._create_bank_statements(stmts_vals) # Now that the import worked out, set it as the bank_statements_source of the journal if journal.bank_statements_source != 'file_import': # Use sudo() because only 'account.group_account_manager' # has write access on 'account.journal', but 'account.group_account_user' # must be able to import bank statement files journal.sudo().bank_statements_source = 'file_import' # Finally dispatch to reconciliation interface action = self.env.ref('account.action_bank_reconcile_bank_statements') return { 'name': action.name, 'tag': action.tag, 'context': { 'statement_ids': statement_ids, 'notifications': notifications }, 'type': 'ir.actions.client', } def _journal_creation_wizard(self, currency, account_number): """ Calls a wizard that allows the user to carry on with journal creation """ return { 'name': _('Journal Creation'), 'type': 'ir.actions.act_window', 'res_model': 'account.bank.statement.import.journal.creation', 'view_type': 'form', 'view_mode': 'form', 'target': 'new', 'context': { 'statement_import_transient_id': self.env.context['active_id'], 'default_bank_acc_number': account_number, 'default_name': _('Bank') + ' ' + account_number, 'default_currency_id': currency and currency.id or False, 'default_type': 'bank', } } def _parse_file(self, data_file): """ Each module adding a file support must extends this method. It processes the file if it can, returns super otherwise, resulting in a chain of responsability. This method parses the given file and returns the data required by the bank statement import process, as specified below. rtype: triplet (if a value can't be retrieved, use None) - currency code: string (e.g: 'EUR') The ISO 4217 currency code, case insensitive - account number: string (e.g: 'BE1234567890') The number of the bank account which the statement belongs to - bank statements data: list of dict containing (optional items marked by o) : - 'name': string (e.g: '000000123') - 'date': date (e.g: 2013-06-26) -o 'balance_start': float (e.g: 8368.56) -o 'balance_end_real': float (e.g: 8888.88) - 'transactions': list of dict containing : - 'name': string (e.g: 'KBC-INVESTERINGSKREDIET 787-5562831-01') - 'date': date - 'amount': float - 'unique_import_id': string -o 'account_number': string Will be used to find/create the res.partner.bank in gecoerp -o 'note': string -o 'partner_name': string -o 'ref': string """ raise UserError( _('Could not make sense of the given file.\nDid you install the module to support this type of file ?' )) def _check_parsed_data(self, stmts_vals): """ Basic and structural verifications """ if len(stmts_vals) == 0: raise UserError(_('This file doesn\'t contain any statement.')) no_st_line = True for vals in stmts_vals: if vals['transactions'] and len(vals['transactions']) > 0: no_st_line = False break if no_st_line: raise UserError(_('This file doesn\'t contain any transaction.')) def _check_journal_bank_account(self, journal, account_number): return journal.bank_account_id.sanitized_acc_number == account_number def _find_additional_data(self, currency_code, account_number): """ Look for a res.currency and account.journal using values extracted from the statement and make sure it's consistent. """ company_currency = self.env.user.company_id.currency_id journal_obj = self.env['account.journal'] currency = None sanitized_account_number = sanitize_account_number(account_number) if currency_code: currency = self.env['res.currency'].search( [('name', '=ilike', currency_code)], limit=1) if not currency: raise UserError( _("No currency found matching '%s'.") % currency_code) if currency == company_currency: currency = False journal = journal_obj.browse(self.env.context.get('journal_id', [])) if account_number: # No bank account on the journal : create one from the account number of the statement if journal and not journal.bank_account_id: journal.set_bank_account(account_number) # No journal passed to the wizard : try to find one using the account number of the statement elif not journal: journal = journal_obj.search([ ('bank_account_id.sanitized_acc_number', '=', sanitized_account_number) ]) # Already a bank account on the journal : check it's the same as on the statement else: if not self._check_journal_bank_account( journal, sanitized_account_number): raise UserError( _('The account of this statement (%s) is not the same as the journal (%s).' ) % (account_number, journal.bank_account_id.acc_number)) # If importing into an existing journal, its currency must be the same as the bank statement if journal: journal_currency = journal.currency_id if currency is None: currency = journal_currency if currency and currency != journal_currency: statement_cur_code = not currency and company_currency.name or currency.name journal_cur_code = not journal_currency and company_currency.name or journal_currency.name raise UserError( _('The currency of the bank statement (%s) is not the same as the currency of the journal (%s) !' ) % (statement_cur_code, journal_cur_code)) # If we couldn't find / can't create a journal, everything is lost if not journal and not account_number: raise UserError( _('Cannot find in which journal import this statement. Please manually select a journal.' )) return currency, journal def _complete_stmts_vals(self, stmts_vals, journal, account_number): for st_vals in stmts_vals: st_vals['journal_id'] = journal.id if not st_vals.get('reference'): st_vals['reference'] = self.filename if st_vals.get('number'): #build the full name like BNK/2016/00135 by just giving the number '135' st_vals['name'] = journal.sequence_id.with_context( ir_sequence_date=st_vals.get('date')).get_next_char( st_vals['number']) del (st_vals['number']) for line_vals in st_vals['transactions']: unique_import_id = line_vals.get('unique_import_id') if unique_import_id: sanitized_account_number = sanitize_account_number( account_number) line_vals['unique_import_id'] = ( sanitized_account_number and sanitized_account_number + '-' or '') + str(journal.id) + '-' + unique_import_id if not line_vals.get('bank_account_id'): # Find the partner and his bank account or create the bank account. The partner selected during the # reconciliation process will be linked to the bank when the statement is closed. partner_id = False bank_account_id = False identifying_string = line_vals.get('account_number') if identifying_string: partner_bank = self.env['res.partner.bank'].search( [('acc_number', '=', identifying_string)], limit=1) if partner_bank: bank_account_id = partner_bank.id partner_id = partner_bank.partner_id.id else: bank_account_id = self.env[ 'res.partner.bank'].create({ 'acc_number': line_vals['account_number'], 'partner_id': False, }).id line_vals['partner_id'] = partner_id line_vals['bank_account_id'] = bank_account_id return stmts_vals def _create_bank_statements(self, stmts_vals): """ Create new bank statements from imported values, filtering out already imported transactions, and returns data used by the reconciliation widget """ BankStatement = self.env['account.bank.statement'] BankStatementLine = self.env['account.bank.statement.line'] # Filter out already imported transactions and create statements statement_ids = [] ignored_statement_lines_import_ids = [] for st_vals in stmts_vals: filtered_st_lines = [] for line_vals in st_vals['transactions']: if 'unique_import_id' not in line_vals \ or not line_vals['unique_import_id'] \ or not bool(BankStatementLine.sudo().search([('unique_import_id', '=', line_vals['unique_import_id'])], limit=1)): filtered_st_lines.append(line_vals) else: ignored_statement_lines_import_ids.append( line_vals['unique_import_id']) if 'balance_start' in st_vals: st_vals['balance_start'] += float(line_vals['amount']) if len(filtered_st_lines) > 0: # Remove values that won't be used to create records st_vals.pop('transactions', None) for line_vals in filtered_st_lines: line_vals.pop('account_number', None) # Create the satement st_vals['line_ids'] = [[0, False, line] for line in filtered_st_lines] statement_ids.append(BankStatement.create(st_vals).id) if len(statement_ids) == 0: raise UserError(_('You have already imported that file.')) # Prepare import feedback notifications = [] num_ignored = len(ignored_statement_lines_import_ids) if num_ignored > 0: notifications += [{ 'type': 'warning', 'message': _("%d transactions had already been imported and were ignored." ) % num_ignored if num_ignored > 1 else _("1 transaction had already been imported and was ignored."), 'details': { 'name': _('Already imported items'), 'model': 'account.bank.statement.line', 'ids': BankStatementLine.search([ ('unique_import_id', 'in', ignored_statement_lines_import_ids) ]).ids } }] return statement_ids, notifications
class Channel(models.Model): """ A mail.channel is a discussion group that may behave like a listener on documents. """ _description = 'Discussion channel' _name = 'mail.channel' _mail_flat_thread = False _mail_post_access = 'read' _inherit = ['mail.thread', 'mail.alias.mixin'] MAX_BOUNCE_LIMIT = 10 def _get_default_image(self): image_path = modules.get_module_resource('mail', 'static/src/img', 'groupdefault.png') return tools.image_resize_image_big( base64.b64encode(open(image_path, 'rb').read())) @api.model def default_get(self, fields): res = super(Channel, self).default_get(fields) if not res.get('alias_contact') and (not fields or 'alias_contact' in fields): res['alias_contact'] = 'everyone' if res.get( 'public', 'private') == 'public' else 'followers' return res name = fields.Char('Name', required=True, translate=True) channel_type = fields.Selection([('chat', 'Chat Discussion'), ('channel', 'Channel')], 'Channel Type', default='channel') description = fields.Text('Description') uuid = fields.Char('UUID', size=50, index=True, default=lambda self: str(uuid4()), copy=False) email_send = fields.Boolean('Send messages by email', default=False) # multi users channel channel_last_seen_partner_ids = fields.One2many('mail.channel.partner', 'channel_id', string='Last Seen') channel_partner_ids = fields.Many2many('res.partner', 'mail_channel_partner', 'channel_id', 'partner_id', string='Listeners') channel_message_ids = fields.Many2many('mail.message', 'mail_message_mail_channel_rel') is_member = fields.Boolean('Is a member', compute='_compute_is_member') # access public = fields.Selection( [('public', 'Everyone'), ('private', 'Invited people only'), ('groups', 'Selected group of users')], 'Privacy', required=True, default='groups', help= 'This group is visible by non members. Invisible groups can add members through the invite button.' ) group_public_id = fields.Many2one( 'res.groups', string='Authorized Group', default=lambda self: self.env.ref('base.group_user')) group_ids = fields.Many2many( 'res.groups', string='Auto Subscription', help="Members of those groups will automatically added as followers. " "Note that they will be able to manage their subscription manually " "if necessary.") # image: all image fields are base64 encoded and PIL-supported image = fields.Binary( "Photo", default=_get_default_image, attachment=True, help= "This field holds the image used as photo for the group, limited to 1024x1024px." ) image_medium = fields.Binary( 'Medium-sized photo', attachment=True, help="Medium-sized photo of the group. It is automatically " "resized as a 128x128px image, with aspect ratio preserved. " "Use this field in form views or some kanban views.") image_small = fields.Binary( 'Small-sized photo', attachment=True, help="Small-sized photo of the group. It is automatically " "resized as a 64x64px image, with aspect ratio preserved. " "Use this field anywhere a small image is required.") is_subscribed = fields.Boolean('Is Subscribed', compute='_compute_is_subscribed') @api.one @api.depends('channel_partner_ids') def _compute_is_subscribed(self): self.is_subscribed = self.env.user.partner_id in self.channel_partner_ids @api.multi def _compute_is_member(self): memberships = self.env['mail.channel.partner'].sudo().search([ ('channel_id', 'in', self.ids), ('partner_id', '=', self.env.user.partner_id.id), ]) membership_ids = memberships.mapped('channel_id') for record in self: record.is_member = record in membership_ids @api.onchange('public') def _onchange_public(self): if self.public == 'public': self.alias_contact = 'everyone' else: self.alias_contact = 'followers' @api.model def create(self, vals): tools.image_resize_images(vals) # Create channel and alias channel = super( Channel, self.with_context(alias_model_name=self._name, alias_parent_model_name=self._name, mail_create_nolog=True, mail_create_nosubscribe=True)).create(vals) channel.alias_id.write({ "alias_force_thread_id": channel.id, 'alias_parent_thread_id': channel.id }) if vals.get('group_ids'): channel._subscribe_users() # make channel listen itself: posting on a channel notifies the channel if not self._context.get('mail_channel_noautofollow'): channel.message_subscribe(channel_ids=[channel.id]) return channel @api.multi def unlink(self): aliases = self.mapped('alias_id') # Delete mail.channel try: all_emp_group = self.env.ref('mail.channel_all_employees') except ValueError: all_emp_group = None if all_emp_group and all_emp_group in self: raise UserError( _('You cannot delete those groups, as the Whole Company group is required by other modules.' )) res = super(Channel, self).unlink() # Cascade-delete mail aliases as well, as they should not exist without the mail.channel. aliases.sudo().unlink() return res @api.multi def write(self, vals): tools.image_resize_images(vals) result = super(Channel, self).write(vals) if vals.get('group_ids'): self._subscribe_users() return result def get_alias_model_name(self, vals): return vals.get('alias_model', 'mail.channel') def _subscribe_users(self): for mail_channel in self: mail_channel.write({ 'channel_partner_ids': [(4, pid) for pid in mail_channel.mapped('group_ids').mapped( 'users').mapped('partner_id').ids] }) @api.multi def action_follow(self): self.ensure_one() channel_partner = self.mapped( 'channel_last_seen_partner_ids').filtered( lambda cp: cp.partner_id == self.env.user.partner_id) if not channel_partner: return self.write({ 'channel_last_seen_partner_ids': [(0, 0, { 'partner_id': self.env.user.partner_id.id })] }) @api.multi def action_unfollow(self): return self._action_unfollow(self.env.user.partner_id) @api.multi def _action_unfollow(self, partner): channel_info = self.channel_info('unsubscribe')[ 0] # must be computed before leaving the channel (access rights) result = self.write({'channel_partner_ids': [(3, partner.id)]}) self.env['bus.bus'].sendone( (self._cr.dbname, 'res.partner', partner.id), channel_info) if not self.email_send: notification = _( '<div class="o_mail_notification">left <a href="#" class="o_channel_redirect" data-oe-id="%s">#%s</a></div>' ) % ( self.id, self.name, ) # post 'channel left' message as root since the partner just unsubscribed from the channel self.sudo().message_post(body=notification, message_type="notification", subtype="mail.mt_comment", author_id=partner.id) return result @api.multi def _notification_recipients(self, message, groups): """ All recipients of a message on a channel are considered as partners. This means they will receive a minimal email, without a link to access in the backend. Mailing lists should indeed send minimal emails to avoid the noise. """ groups = super(Channel, self)._notification_recipients(message, groups) for (index, (group_name, group_func, group_data)) in enumerate(groups): if group_name != 'customer': groups[index] = (group_name, lambda partner: False, group_data) return groups @api.multi def message_get_email_values(self, notif_mail=None): self.ensure_one() res = super(Channel, self).message_get_email_values(notif_mail=notif_mail) headers = {} if res.get('headers'): try: headers.update(safe_eval(res['headers'])) except Exception: pass headers['Precedence'] = 'list' # avoid out-of-office replies from MS Exchange # http://blogs.technet.com/b/exchange/archive/2006/10/06/3395024.aspx headers['X-Auto-Response-Suppress'] = 'OOF' if self.alias_domain and self.alias_name: headers['List-Id'] = '<%s.%s>' % (self.alias_name, self.alias_domain) headers['List-Post'] = '<mailto:%s@%s>' % (self.alias_name, self.alias_domain) # Avoid users thinking it was a personal message # X-Forge-To: will replace To: after SMTP envelope is determined by ir.mail.server list_to = '"%s" <%s@%s>' % (self.name, self.alias_name, self.alias_domain) headers['X-Forge-To'] = list_to res['headers'] = repr(headers) return res @api.multi def message_receive_bounce(self, email, partner, mail_id=None): """ Override bounce management to unsubscribe bouncing addresses """ for p in partner: if p.message_bounce >= self.MAX_BOUNCE_LIMIT: self._action_unfollow(p) return super(Channel, self).message_receive_bounce(email, partner, mail_id=mail_id) @api.multi def message_get_recipient_values(self, notif_message=None, recipient_ids=None): # real mailing list: multiple recipients (hidden by X-Forge-To) if self.alias_domain and self.alias_name: return { 'email_to': ','.join( formataddr((partner.name, partner.email)) for partner in self.env['res.partner'].sudo().browse(recipient_ids)), 'recipient_ids': [], } return super(Channel, self).message_get_recipient_values( notif_message=notif_message, recipient_ids=recipient_ids) @api.multi @api.returns('self', lambda value: value.id) def message_post(self, body='', subject=None, message_type='notification', subtype=None, parent_id=False, attachments=None, content_subtype='html', **kwargs): # auto pin 'direct_message' channel partner self.filtered(lambda channel: channel.channel_type == 'chat').mapped( 'channel_last_seen_partner_ids').write({'is_pinned': True}) message = super( Channel, self.with_context(mail_create_nosubscribe=True)).message_post( body=body, subject=subject, message_type=message_type, subtype=subtype, parent_id=parent_id, attachments=attachments, content_subtype=content_subtype, **kwargs) return message def _alias_check_contact(self, message, message_dict, alias): if alias.alias_contact == 'followers' and self.ids: author = self.env['res.partner'].browse( message_dict.get('author_id', False)) if not author or author not in self.channel_partner_ids: return { 'error_message': _('restricted to channel members'), } return True return super(Channel, self)._alias_check_contact(message, message_dict, alias) @api.model_cr def init(self): self._cr.execute( 'SELECT indexname FROM pg_indexes WHERE indexname = %s', ('mail_channel_partner_seen_message_id_idx', )) if not self._cr.fetchone(): self._cr.execute( 'CREATE INDEX mail_channel_partner_seen_message_id_idx ON mail_channel_partner (channel_id,partner_id,seen_message_id)' ) #------------------------------------------------------ # Instant Messaging API #------------------------------------------------------ # A channel header should be broadcasted: # - when adding user to channel (only to the new added partners) # - when folding/minimizing a channel (only to the user making the action) # A message should be broadcasted: # - when a message is posted on a channel (to the channel, using _notify() method) # Anonymous method @api.multi def _broadcast(self, partner_ids): """ Broadcast the current channel header to the given partner ids :param partner_ids : the partner to notify """ notifications = self._channel_channel_notifications(partner_ids) self.env['bus.bus'].sendmany(notifications) @api.multi def _channel_channel_notifications(self, partner_ids): """ Generate the bus notifications of current channel for the given partner ids :param partner_ids : the partner to send the current channel header :returns list of bus notifications (tuple (bus_channe, message_content)) """ notifications = [] for partner in self.env['res.partner'].browse(partner_ids): user_id = partner.user_ids and partner.user_ids[0] or False if user_id: for channel_info in self.sudo(user_id).channel_info(): notifications.append([(self._cr.dbname, 'res.partner', partner.id), channel_info]) return notifications @api.multi def _notify(self, message): """ Broadcast the given message on the current channels. Send the message on the Bus Channel (uuid for public mail.channel, and partner private bus channel (the tuple)). A partner will receive only on message on its bus channel, even if this message belongs to multiple mail channel. Then 'channel_ids' field of the received message indicates on wich mail channel the message should be displayed. :param : mail.message to broadcast """ if not self: return message.ensure_one() notifications = self._channel_message_notifications(message) self.env['bus.bus'].sendmany(notifications) @api.multi def _channel_message_notifications(self, message): """ Generate the bus notifications for the given message :param message : the mail.message to sent :returns list of bus notifications (tuple (bus_channe, message_content)) """ message_values = message.message_format()[0] notifications = [] for channel in self: notifications.append([(self._cr.dbname, 'mail.channel', channel.id), dict(message_values)]) # add uuid to allow anonymous to listen if channel.public == 'public': notifications.append([channel.uuid, dict(message_values)]) return notifications @api.multi def channel_info(self, extra_info=False): """ Get the informations header for the current channels :returns a list of channels values :rtype : list(dict) """ channel_infos = [] partner_channels = self.env['mail.channel.partner'] # find the channel partner state, if logged user if self.env.user and self.env.user.partner_id: partner_channels = self.env['mail.channel.partner'].search([ ('partner_id', '=', self.env.user.partner_id.id), ('channel_id', 'in', self.ids) ]) # for each channel, build the information header and include the logged partner information for channel in self: info = { 'id': channel.id, 'name': channel.name, 'uuid': channel.uuid, 'state': 'open', 'is_minimized': False, 'channel_type': channel.channel_type, 'public': channel.public, 'mass_mailing': channel.email_send, 'group_based_subscription': bool(channel.group_ids), } if extra_info: info['info'] = extra_info # add the partner for 'direct mesage' channel if channel.channel_type == 'chat': info['direct_partner'] = (channel.sudo().with_context( active_test=False).channel_partner_ids.filtered( lambda p: p.id != self.env.user.partner_id.id).read( ['id', 'name', 'im_status'])) # add last message preview (only used in mobile) if self._context.get('isMobile', False): last_message = channel.channel_fetch_preview() if last_message: info['last_message'] = last_message[0].get('last_message') # add user session state, if available and if user is logged if partner_channels.ids: partner_channel = partner_channels.filtered( lambda c: channel.id == c.channel_id.id) if len(partner_channel) >= 1: partner_channel = partner_channel[0] info['state'] = partner_channel.fold_state or 'open' info['is_minimized'] = partner_channel.is_minimized info[ 'seen_message_id'] = partner_channel.seen_message_id.id # add needaction and unread counter, since the user is logged info[ 'message_needaction_counter'] = channel.message_needaction_counter info['message_unread_counter'] = channel.message_unread_counter channel_infos.append(info) return channel_infos @api.multi def channel_fetch_message(self, last_id=False, limit=20): """ Return message values of the current channel. :param last_id : last message id to start the research :param limit : maximum number of messages to fetch :returns list of messages values :rtype : list(dict) """ self.ensure_one() domain = [("channel_ids", "in", self.ids)] if last_id: domain.append(("id", "<", last_id)) return self.env['mail.message'].message_fetch(domain=domain, limit=limit) # User methods @api.model def channel_get(self, partners_to, pin=True): """ Get the canonical private channel between some partners, create it if needed. To reuse an old channel (conversation), this one must be private, and contains only the given partners. :param partners_to : list of res.partner ids to add to the conversation :param pin : True if getting the channel should pin it for the current user :returns a channel header, or False if the users_to was False :rtype : dict """ if partners_to: partners_to.append(self.env.user.partner_id.id) # determine type according to the number of partner in the channel self.env.cr.execute( """ SELECT P.channel_id as channel_id FROM mail_channel C, mail_channel_partner P WHERE P.channel_id = C.id AND C.public LIKE 'private' AND P.partner_id IN %s AND channel_type LIKE 'chat' GROUP BY P.channel_id HAVING array_agg(P.partner_id ORDER BY P.partner_id) = %s """, ( tuple(partners_to), sorted(list(partners_to)), )) result = self.env.cr.dictfetchall() if result: # get the existing channel between the given partners channel = self.browse(result[0].get('channel_id')) # pin up the channel for the current partner if pin: self.env['mail.channel.partner'].search([ ('partner_id', '=', self.env.user.partner_id.id), ('channel_id', '=', channel.id) ]).write({'is_pinned': True}) else: # create a new one channel = self.create({ 'channel_partner_ids': [(4, partner_id) for partner_id in partners_to], 'public': 'private', 'channel_type': 'chat', 'email_send': False, 'name': ', '.join(self.env['res.partner'].sudo().browse( partners_to).mapped('name')), }) # broadcast the channel header to the other partner (not me) channel._broadcast(partners_to) return channel.channel_info()[0] return False @api.model def channel_get_and_minimize(self, partners_to): channel = self.channel_get(partners_to) if channel: self.channel_minimize(channel['uuid']) return channel @api.model def channel_fold(self, uuid, state=None): """ Update the fold_state of the given session. In order to syncronize web browser tabs, the change will be broadcast to himself (the current user channel). Note: the user need to be logged :param state : the new status of the session for the current user. """ domain = [('partner_id', '=', self.env.user.partner_id.id), ('channel_id.uuid', '=', uuid)] for session_state in self.env['mail.channel.partner'].search(domain): if not state: state = session_state.fold_state if session_state.fold_state == 'open': state = 'folded' else: state = 'open' session_state.write({ 'fold_state': state, 'is_minimized': bool(state != 'closed'), }) self.env['bus.bus'].sendone( (self._cr.dbname, 'res.partner', self.env.user.partner_id.id), session_state.channel_id.channel_info()[0]) @api.model def channel_minimize(self, uuid, minimized=True): values = { 'fold_state': minimized and 'open' or 'closed', 'is_minimized': minimized } domain = [('partner_id', '=', self.env.user.partner_id.id), ('channel_id.uuid', '=', uuid)] channel_partners = self.env['mail.channel.partner'].search(domain, limit=1) channel_partners.write(values) self.env['bus.bus'].sendone( (self._cr.dbname, 'res.partner', self.env.user.partner_id.id), channel_partners.channel_id.channel_info()[0]) @api.model def channel_pin(self, uuid, pinned=False): # add the person in the channel, and pin it (or unpin it) channel = self.search([('uuid', '=', uuid)]) channel_partners = self.env['mail.channel.partner'].search([ ('partner_id', '=', self.env.user.partner_id.id), ('channel_id', '=', channel.id) ]) if not pinned: self.env['bus.bus'].sendone( (self._cr.dbname, 'res.partner', self.env.user.partner_id.id), channel.channel_info('unsubscribe')[0]) if channel_partners: channel_partners.write({'is_pinned': pinned}) @api.multi def channel_seen(self): self.ensure_one() if self.channel_message_ids.ids: last_message_id = self.channel_message_ids.ids[ 0] # zero is the index of the last message self.env['mail.channel.partner'].search([ ('channel_id', 'in', self.ids), ('partner_id', '=', self.env.user.partner_id.id) ]).write({'seen_message_id': last_message_id}) self.env['bus.bus'].sendone( (self._cr.dbname, 'res.partner', self.env.user.partner_id.id), { 'info': 'channel_seen', 'id': self.id, 'last_message_id': last_message_id }) return last_message_id @api.multi def channel_invite(self, partner_ids): """ Add the given partner_ids to the current channels and broadcast the channel header to them. :param partner_ids : list of partner id to add """ partners = self.env['res.partner'].browse(partner_ids) # add the partner for channel in self: partners_to_add = partners - channel.channel_partner_ids channel.write({ 'channel_last_seen_partner_ids': [(0, 0, { 'partner_id': partner_id }) for partner_id in partners_to_add.ids] }) for partner in partners_to_add: if partner.id != self.env.user.partner_id.id: notification = _( '<div class="o_mail_notification">%(author)s invited %(new_partner)s to <a href="#" class="o_channel_redirect" data-oe-id="%(channel_id)s">#%(channel_name)s</a></div>' ) % { 'author': self.env.user.display_name, 'new_partner': partner.display_name, 'channel_id': channel.id, 'channel_name': channel.name, } else: notification = _( '<div class="o_mail_notification">joined <a href="#" class="o_channel_redirect" data-oe-id="%s">#%s</a></div>' ) % ( channel.id, channel.name, ) self.message_post(body=notification, message_type="notification", subtype="mail.mt_comment", author_id=partner.id) # broadcast the channel header to the added partner self._broadcast(partner_ids) #------------------------------------------------------ # Instant Messaging View Specific (Slack Client Action) #------------------------------------------------------ @api.model def channel_fetch_slot(self): """ Return the channels of the user grouped by 'slot' (channel, direct_message or private_group), and the mapping between partner_id/channel_id for direct_message channels. :returns dict : the grouped channels and the mapping """ values = {} my_partner_id = self.env.user.partner_id.id pinned_channels = self.env['mail.channel.partner'].search([ ('partner_id', '=', my_partner_id), ('is_pinned', '=', True) ]).mapped('channel_id') # get the group/public channels values['channel_channel'] = self.search([ ('channel_type', '=', 'channel'), ('public', 'in', ['public', 'groups']), ('channel_partner_ids', 'in', [my_partner_id]) ]).channel_info() # get the pinned 'direct message' channel direct_message_channels = self.search([('channel_type', '=', 'chat'), ('id', 'in', pinned_channels.ids)]) values[ 'channel_direct_message'] = direct_message_channels.channel_info() # get the private group values['channel_private_group'] = self.search([ ('channel_type', '=', 'channel'), ('public', '=', 'private'), ('channel_partner_ids', 'in', [my_partner_id]) ]).channel_info() return values @api.model def channel_search_to_join(self, name=None, domain=None): """ Return the channel info of the channel the current partner can join :param name : the name of the researched channels :param domain : the base domain of the research :returns dict : channel dict """ if not domain: domain = [] domain = expression.AND([[('channel_type', '=', 'channel')], [('channel_partner_ids', 'not in', [self.env.user.partner_id.id])], [('public', '!=', 'private')], domain]) if name: domain = expression.AND( [domain, [('name', 'ilike', '%' + name + '%')]]) return self.search(domain).read( ['name', 'public', 'uuid', 'channel_type']) @api.multi def channel_join_and_get_info(self): self.ensure_one() if self.channel_type == 'channel' and not self.email_send: notification = _( '<div class="o_mail_notification">joined <a href="#" class="o_channel_redirect" data-oe-id="%s">#%s</a></div>' ) % ( self.id, self.name, ) self.message_post(body=notification, message_type="notification", subtype="mail.mt_comment") self.action_follow() channel_info = self.channel_info()[0] self.env['bus.bus'].sendone( (self._cr.dbname, 'res.partner', self.env.user.partner_id.id), channel_info) return channel_info @api.model def channel_create(self, name, privacy='public'): """ Create a channel and add the current partner, broadcast it (to make the user directly listen to it when polling) :param name : the name of the channel to create :param privacy : privacy of the channel. Should be 'public' or 'private'. :return dict : channel header """ # create the channel new_channel = self.create({ 'name': name, 'public': privacy, 'email_send': False, 'channel_partner_ids': [(4, self.env.user.partner_id.id)] }) notification = _( '<div class="o_mail_notification">created <a href="#" class="o_channel_redirect" data-oe-id="%s">#%s</a></div>' ) % ( new_channel.id, new_channel.name, ) new_channel.message_post(body=notification, message_type="notification", subtype="mail.mt_comment") channel_info = new_channel.channel_info('creation')[0] self.env['bus.bus'].sendone( (self._cr.dbname, 'res.partner', self.env.user.partner_id.id), channel_info) return channel_info @api.model def get_mention_suggestions(self, search, limit=8): """ Return 'limit'-first channels' id, name and public fields such that the name matches a 'search' string. Exclude channels of type chat (DM), and private channels the current user isn't registered to. """ domain = expression.AND([[('name', 'ilike', search)], [('channel_type', '=', 'channel')], expression.OR( [[('public', '!=', 'private')], [('channel_partner_ids', 'in', [self.env.user.partner_id.id])]])]) return self.search_read(domain, ['id', 'name', 'public'], limit=limit) @api.model def channel_fetch_listeners(self, uuid): """ Return the id, name and email of partners listening to the given channel """ self._cr.execute( """ SELECT P.id, P.name, P.email FROM mail_channel_partner CP INNER JOIN res_partner P ON CP.partner_id = P.id INNER JOIN mail_channel C ON CP.channel_id = C.id WHERE C.uuid = %s""", (uuid, )) return self._cr.dictfetchall() @api.multi def channel_fetch_preview(self): """ Return the last message of the given channels """ self._cr.execute( """ SELECT mail_channel_id AS id, MAX(mail_message_id) AS message_id FROM mail_message_mail_channel_rel WHERE mail_channel_id IN %s GROUP BY mail_channel_id """, (tuple(self.ids), )) channels_preview = dict( (r['message_id'], r) for r in self._cr.dictfetchall()) last_messages = self.env['mail.message'].browse( channels_preview).message_format() for message in last_messages: channel = channels_preview[message['id']] del (channel['message_id']) channel['last_message'] = message return list(channels_preview.values()) #------------------------------------------------------ # Commands #------------------------------------------------------ @api.model @ormcache() def get_mention_commands(self): """ Returns the allowed commands in channels """ commands = [] for n in dir(self): match = re.search('^_define_command_(.+?)$', n) if match: command = getattr(self, n)() command['name'] = match.group(1) commands.append(command) return commands @api.multi def execute_command(self, command='', **kwargs): """ Executes a given command """ self.ensure_one() command_callback = getattr(self, '_execute_command_' + command, False) if command_callback: command_callback(**kwargs) def _send_transient_message(self, partner_to, content): """ Notifies partner_to that a message (not stored in DB) has been written in this channel """ self.env['bus.bus'].sendone( (self._cr.dbname, 'res.partner', partner_to.id), { 'body': "<span class='o_mail_notification'>" + content + "</span>", 'channel_ids': [self.id], 'info': 'transient_message', }) def _define_command_help(self): return {'help': _("Show an helper message")} def _execute_command_help(self, **kwargs): partner = self.env.user.partner_id if self.channel_type == 'channel': msg = _("You are in channel <b>#%s</b>.") % self.name if self.public == 'private': msg += _( " This channel is private. People must be invited to join it." ) else: channel_partners = self.env['mail.channel.partner'].search([ ('partner_id', '!=', partner.id), ('channel_id', '=', self.id) ]) msg = _("You are in a private conversation with <b>@%s</b>.") % ( channel_partners[0].partner_id.name if channel_partners else _('Anonymous')) msg += _("""<br><br> You can mention someone by typing <b>@username</b>, this will grab its attention.<br> You can mention a channel by typing <b>#channel</b>.<br> You can execute a command by typing <b>/command</b>.<br> You can insert canned responses in your message by typing <b>:shortcut</b>.<br>""" ) self._send_transient_message(partner, msg) def _define_command_leave(self): return {'help': _("Leave this channel")} def _execute_command_leave(self, **kwargs): if self.channel_type == 'channel': self.action_unfollow() else: self.channel_pin(self.uuid, False) def _define_command_who(self): return { 'channel_types': ['channel', 'chat'], 'help': _("List users in the current channel") } def _execute_command_who(self, **kwargs): partner = self.env.user.partner_id members = [ '<a href="#" data-oe-id=' + str(p.id) + ' data-oe-model="res.partner">@' + p.name + '</a>' for p in self.channel_partner_ids[:30] if p != partner ] if len(members) == 0: msg = _("You are alone in this channel.") else: dots = "..." if len(members) != len( self.channel_partner_ids) - 1 else "" msg = _("Users in this channel: %s %s and you.") % ( ", ".join(members), dots) self._send_transient_message(partner, msg)
class ProductPublicCategory(models.Model): _name = "product.public.category" _inherit = ["website.seo.metadata"] _description = "Website Product Category" _order = "sequence, name" name = fields.Char(required=True, translate=True) parent_id = fields.Many2one('product.public.category', string='Parent Category', index=True) child_id = fields.One2many('product.public.category', 'parent_id', string='Children Categories') sequence = fields.Integer( help= "Gives the sequence order when displaying a list of product categories." ) # NOTE: there is no 'default image', because by default we don't show # thumbnails for categories. However if we have a thumbnail for at least one # category, then we display a default image on the other, so that the # buttons have consistent styling. # In this case, the default image is set by the js code. image = fields.Binary( attachment=True, help= "This field holds the image used as image for the category, limited to 1024x1024px." ) image_medium = fields.Binary( string='Medium-sized image', attachment=True, help="Medium-sized image of the category. It is automatically " "resized as a 128x128px image, with aspect ratio preserved. " "Use this field in form views or some kanban views.") image_small = fields.Binary( string='Small-sized image', attachment=True, help="Small-sized image of the category. It is automatically " "resized as a 64x64px image, with aspect ratio preserved. " "Use this field anywhere a small image is required.") @api.model def create(self, vals): tools.image_resize_images(vals) return super(ProductPublicCategory, self).create(vals) @api.multi def write(self, vals): tools.image_resize_images(vals) return super(ProductPublicCategory, self).write(vals) @api.constrains('parent_id') def check_parent_id(self): if not self._check_recursion(): raise ValueError( _('Error ! You cannot create recursive categories.')) @api.multi def name_get(self): res = [] for category in self: names = [category.name] parent_category = category.parent_id while parent_category: names.append(parent_category.name) parent_category = parent_category.parent_id res.append((category.id, ' / '.join(reversed(names)))) return res
class Track(models.Model): _name = "event.track" _description = 'Event Track' _order = 'priority, date' _inherit = [ 'mail.thread', 'mail.activity.mixin', 'website.seo.metadata', 'website.published.mixin' ] @api.model def _get_default_stage_id(self): return self.env['event.track.stage'].search([], limit=1).id name = fields.Char('Title', required=True, translate=True) active = fields.Boolean(default=True) user_id = fields.Many2one('res.users', 'Responsible', track_visibility='onchange', default=lambda self: self.env.user) partner_id = fields.Many2one('res.partner', 'Speaker') partner_name = fields.Char('Speaker Name') partner_email = fields.Char('Speaker Email') partner_phone = fields.Char('Speaker Phone') partner_biography = fields.Html('Speaker Biography') tag_ids = fields.Many2many('event.track.tag', string='Tags') stage_id = fields.Many2one('event.track.stage', string='Stage', index=True, copy=False, default=_get_default_stage_id, group_expand='_read_group_stage_ids', required=True, track_visibility='onchange') kanban_state = fields.Selection( [('normal', 'Grey'), ('done', 'Green'), ('blocked', 'Red')], string='Kanban State', copy=False, default='normal', required=True, track_visibility='onchange', help= "A track's kanban state indicates special situations affecting it:\n" " * Grey is the default situation\n" " * Red indicates something is preventing the progress of this track\n" " * Green indicates the track is ready to be pulled to the next stage") description = fields.Html('Track Description', translate=html_translate, sanitize_attributes=False) date = fields.Datetime('Track Date') duration = fields.Float('Duration', default=1.5) location_id = fields.Many2one('event.track.location', 'Room') event_id = fields.Many2one('event.event', 'Event', required=True) color = fields.Integer('Color Index') priority = fields.Selection([('0', 'Low'), ('1', 'Medium'), ('2', 'High'), ('3', 'Highest')], 'Priority', required=True, default='1') image = fields.Binary('Image', related='partner_id.image_medium', store=True, attachment=True) @api.multi @api.depends('name') def _compute_website_url(self): super(Track, self)._compute_website_url() for track in self: if not isinstance(track.id, models.NewId): track.website_url = '/event/%s/track/%s' % (slug( track.event_id), slug(track)) @api.onchange('partner_id') def _onchange_partner_id(self): if self.partner_id: self.partner_name = self.partner_id.name self.partner_email = self.partner_id.email self.partner_phone = self.partner_id.phone self.partner_biography = self.partner_id.website_description @api.model def create(self, vals): track = super(Track, self).create(vals) track.event_id.message_post_with_view( 'website_event_track.event_track_template_new', values={'track': track}, subject=track.name, subtype_id=self.env.ref('website_event_track.mt_event_track').id, ) return track @api.multi def write(self, vals): if 'stage_id' in vals and 'kanban_state' not in vals: vals['kanban_state'] = 'normal' res = super(Track, self).write(vals) if vals.get('partner_id'): self.message_subscribe([vals['partner_id']]) return res @api.model def _read_group_stage_ids(self, stages, domain, order): """ Always display all stages """ return stages.search([], order=order) @api.multi def _track_template(self, tracking): res = super(Track, self)._track_template(tracking) track = self[0] changes, tracking_value_ids = tracking[track.id] if 'stage_id' in changes and track.stage_id.mail_template_id: res['stage_id'] = (track.stage_id.mail_template_id, { 'composition_mode': 'mass_mail' }) return res @api.multi def _track_subtype(self, init_values): self.ensure_one() if 'kanban_state' in init_values and self.kanban_state == 'blocked': return 'website_event_track.mt_track_blocked' elif 'kanban_state' in init_values and self.kanban_state == 'done': return 'website_event_track.mt_track_ready' return super(Track, self)._track_subtype(init_values) @api.multi def message_get_suggested_recipients(self): recipients = super(Track, self).message_get_suggested_recipients() for track in self: if track.partner_email != track.partner_id.email: track._message_add_suggested_recipient( recipients, email=track.partner_email, reason=_('Speaker Email')) return recipients def _message_post_after_hook(self, message): if self.partner_email and not self.partner_id: # we consider that posting a message with a specified recipient (not a follower, a specific one) # on a document without customer means that it was created through the chatter using # suggested recipients. This heuristic allows to avoid ugly hacks in JS. new_partner = message.partner_ids.filtered( lambda partner: partner.email == self.partner_email) if new_partner: self.search([ ('partner_id', '=', False), ('partner_email', '=', new_partner.email), ('stage_id.is_cancel', '=', False), ]).write({'partner_id': new_partner.id}) return super(Track, self)._message_post_after_hook(message) @api.multi def open_track_speakers_list(self): return { 'name': _('Speakers'), 'domain': [('id', 'in', self.mapped('partner_id').ids)], 'view_type': 'form', 'view_mode': 'kanban,form', 'res_model': 'res.partner', 'view_id': False, 'type': 'ir.actions.act_window', }
class FleetVehicle(models.Model): _inherit = 'mail.thread' _name = 'fleet.vehicle' _description = 'Information on a vehicle' _order = 'license_plate asc' def _get_default_state(self): state = self.env.ref('fleet.vehicle_state_active', raise_if_not_found=False) return state and state.id or False name = fields.Char(compute="_compute_vehicle_name", store=True) active = fields.Boolean('Active', default=True, track_visibility="onchange") company_id = fields.Many2one('res.company', 'Company') license_plate = fields.Char(required=True, track_visibility="onchange", help='License plate number of the vehicle (i = plate number for a car)') vin_sn = fields.Char('Chassis Number', help='Unique number written on the vehicle motor (VIN/SN number)', copy=False) driver_id = fields.Many2one('res.partner', 'Driver', track_visibility="onchange", help='Driver of the vehicle', copy=False) model_id = fields.Many2one('fleet.vehicle.model', 'Model', required=True, help='Model of the vehicle') log_fuel = fields.One2many('fleet.vehicle.log.fuel', 'vehicle_id', 'Fuel Logs') log_services = fields.One2many('fleet.vehicle.log.services', 'vehicle_id', 'Services Logs') log_contracts = fields.One2many('fleet.vehicle.log.contract', 'vehicle_id', 'Contracts') cost_count = fields.Integer(compute="_compute_count_all", string="Costs") contract_count = fields.Integer(compute="_compute_count_all", string='Contracts') service_count = fields.Integer(compute="_compute_count_all", string='Services') fuel_logs_count = fields.Integer(compute="_compute_count_all", string='Fuel Logs') odometer_count = fields.Integer(compute="_compute_count_all", string='Odometer') acquisition_date = fields.Date('Immatriculation Date', required=False, help='Date when the vehicle has been immatriculated') color = fields.Char(help='Color of the vehicle') state_id = fields.Many2one('fleet.vehicle.state', 'State', default=_get_default_state, help='Current state of the vehicle', ondelete="set null") location = fields.Char(help='Location of the vehicle (garage, ...)') seats = fields.Integer('Seats Number', help='Number of seats of the vehicle') model_year = fields.Char('Model Year',help='Year of the model') doors = fields.Integer('Doors Number', help='Number of doors of the vehicle', default=5) tag_ids = fields.Many2many('fleet.vehicle.tag', 'fleet_vehicle_vehicle_tag_rel', 'vehicle_tag_id', 'tag_id', 'Tags', copy=False) odometer = fields.Float(compute='_get_odometer', inverse='_set_odometer', string='Last Odometer', help='Odometer measure of the vehicle at the moment of this log') odometer_unit = fields.Selection([ ('kilometers', 'Kilometers'), ('miles', 'Miles') ], 'Odometer Unit', default='kilometers', help='Unit of the odometer ', required=True) transmission = fields.Selection([('manual', 'Manual'), ('automatic', 'Automatic')], 'Transmission', help='Transmission Used by the vehicle') fuel_type = fields.Selection([ ('gasoline', 'Gasoline'), ('diesel', 'Diesel'), ('electric', 'Electric'), ('hybrid', 'Hybrid') ], 'Fuel Type', help='Fuel Used by the vehicle') horsepower = fields.Integer() horsepower_tax = fields.Float('Horsepower Taxation') power = fields.Integer('Power', help='Power in kW of the vehicle') co2 = fields.Float('CO2 Emissions', help='CO2 emissions of the vehicle') image = fields.Binary(related='model_id.image', string="Logo") image_medium = fields.Binary(related='model_id.image_medium', string="Logo (medium)") image_small = fields.Binary(related='model_id.image_small', string="Logo (small)") contract_renewal_due_soon = fields.Boolean(compute='_compute_contract_reminder', search='_search_contract_renewal_due_soon', string='Has Contracts to renew', multi='contract_info') contract_renewal_overdue = fields.Boolean(compute='_compute_contract_reminder', search='_search_get_overdue_contract_reminder', string='Has Contracts Overdue', multi='contract_info') contract_renewal_name = fields.Text(compute='_compute_contract_reminder', string='Name of contract to renew soon', multi='contract_info') contract_renewal_total = fields.Text(compute='_compute_contract_reminder', string='Total of contracts due or overdue minus one', multi='contract_info') car_value = fields.Float(string="Catalog Value (VAT Incl.)", help='Value of the bought vehicle') residual_value = fields.Float() _sql_constraints = [ ('driver_id_unique', 'UNIQUE(driver_id)', 'Only one car can be assigned to the same employee!') ] @api.depends('model_id.brand_id.name', 'model_id.name', 'license_plate') def _compute_vehicle_name(self): for record in self: record.name = record.model_id.brand_id.name + '/' + record.model_id.name + '/' + (record.license_plate or _('No Plate')) def _get_odometer(self): FleetVehicalOdometer = self.env['fleet.vehicle.odometer'] for record in self: vehicle_odometer = FleetVehicalOdometer.search([('vehicle_id', '=', record.id)], limit=1, order='value desc') if vehicle_odometer: record.odometer = vehicle_odometer.value else: record.odometer = 0 def _set_odometer(self): for record in self: if record.odometer: date = fields.Date.context_today(record) data = {'value': record.odometer, 'date': date, 'vehicle_id': record.id} self.env['fleet.vehicle.odometer'].create(data) def _compute_count_all(self): Odometer = self.env['fleet.vehicle.odometer'] LogFuel = self.env['fleet.vehicle.log.fuel'] LogService = self.env['fleet.vehicle.log.services'] LogContract = self.env['fleet.vehicle.log.contract'] Cost = self.env['fleet.vehicle.cost'] for record in self: record.odometer_count = Odometer.search_count([('vehicle_id', '=', record.id)]) record.fuel_logs_count = LogFuel.search_count([('vehicle_id', '=', record.id)]) record.service_count = LogService.search_count([('vehicle_id', '=', record.id)]) record.contract_count = LogContract.search_count([('vehicle_id', '=', record.id),('state','!=','closed')]) record.cost_count = Cost.search_count([('vehicle_id', '=', record.id), ('parent_id', '=', False)]) @api.depends('log_contracts') def _compute_contract_reminder(self): for record in self: overdue = False due_soon = False total = 0 name = '' for element in record.log_contracts: if element.state in ('open', 'expired') and element.expiration_date: current_date_str = fields.Date.context_today(record) due_time_str = element.expiration_date current_date = fields.Date.from_string(current_date_str) due_time = fields.Date.from_string(due_time_str) diff_time = (due_time - current_date).days if diff_time < 0: overdue = True total += 1 if diff_time < 15 and diff_time >= 0: due_soon = True total += 1 if overdue or due_soon: log_contract = self.env['fleet.vehicle.log.contract'].search([ ('vehicle_id', '=', record.id), ('state', 'in', ('open', 'expired')) ], limit=1, order='expiration_date asc') if log_contract: # we display only the name of the oldest overdue/due soon contract name = log_contract.cost_subtype_id.name record.contract_renewal_overdue = overdue record.contract_renewal_due_soon = due_soon record.contract_renewal_total = total - 1 # we remove 1 from the real total for display purposes record.contract_renewal_name = name def _search_contract_renewal_due_soon(self, operator, value): res = [] assert operator in ('=', '!=', '<>') and value in (True, False), 'Operation not supported' if (operator == '=' and value is True) or (operator in ('<>', '!=') and value is False): search_operator = 'in' else: search_operator = 'not in' today = fields.Date.context_today(self) datetime_today = fields.Datetime.from_string(today) limit_date = fields.Datetime.to_string(datetime_today + relativedelta(days=+15)) self.env.cr.execute("""SELECT cost.vehicle_id, count(contract.id) AS contract_number FROM fleet_vehicle_cost cost LEFT JOIN fleet_vehicle_log_contract contract ON contract.cost_id = cost.id WHERE contract.expiration_date IS NOT NULL AND contract.expiration_date > %s AND contract.expiration_date < %s AND contract.state IN ('open', 'expired') GROUP BY cost.vehicle_id""", (today, limit_date)) res_ids = [x[0] for x in self.env.cr.fetchall()] res.append(('id', search_operator, res_ids)) return res def _search_get_overdue_contract_reminder(self, operator, value): res = [] assert operator in ('=', '!=', '<>') and value in (True, False), 'Operation not supported' if (operator == '=' and value is True) or (operator in ('<>', '!=') and value is False): search_operator = 'in' else: search_operator = 'not in' today = fields.Date.context_today(self) self.env.cr.execute('''SELECT cost.vehicle_id, count(contract.id) AS contract_number FROM fleet_vehicle_cost cost LEFT JOIN fleet_vehicle_log_contract contract ON contract.cost_id = cost.id WHERE contract.expiration_date IS NOT NULL AND contract.expiration_date < %s AND contract.state IN ('open', 'expired') GROUP BY cost.vehicle_id ''', (today,)) res_ids = [x[0] for x in self.env.cr.fetchall()] res.append(('id', search_operator, res_ids)) return res @api.onchange('model_id') def _onchange_model(self): if self.model_id: self.image_medium = self.model_id.image else: self.image_medium = False @api.model def create(self, data): vehicle = super(FleetVehicle, self.with_context(mail_create_nolog=True)).create(data) vehicle.message_post(body=_('%s %s has been added to the fleet!') % (vehicle.model_id.name, vehicle.license_plate)) return vehicle @api.multi def write(self, vals): """ This function write an entry in the openchatter whenever we change important information on the vehicle like the model, the drive, the state of the vehicle or its license plate """ for vehicle in self: changes = [] if 'model_id' in vals and vehicle.model_id.id != vals['model_id']: value = self.env['fleet.vehicle.model'].browse(vals['model_id']).name oldmodel = vehicle.model_id.name or _('None') changes.append(_("Model: from '%s' to '%s'") % (oldmodel, value)) if 'driver_id' in vals and vehicle.driver_id.id != vals['driver_id']: value = self.env['res.partner'].browse(vals['driver_id']).name olddriver = (vehicle.driver_id.name) or _('None') changes.append(_("Driver: from '%s' to '%s'") % (olddriver, value)) if 'state_id' in vals and vehicle.state_id.id != vals['state_id']: value = self.env['fleet.vehicle.state'].browse(vals['state_id']).name oldstate = vehicle.state_id.name or _('None') changes.append(_("State: from '%s' to '%s'") % (oldstate, value)) if 'license_plate' in vals and vehicle.license_plate != vals['license_plate']: old_license_plate = vehicle.license_plate or _('None') changes.append(_("License Plate: from '%s' to '%s'") % (old_license_plate, vals['license_plate'])) if len(changes) > 0: self.message_post(body=", ".join(changes)) return super(FleetVehicle, self).write(vals) @api.multi def return_action_to_open(self): """ This opens the xml view specified in xml_id for the current vehicle """ self.ensure_one() xml_id = self.env.context.get('xml_id') if xml_id: res = self.env['ir.actions.act_window'].for_xml_id('fleet', xml_id) res.update( context=dict(self.env.context, default_vehicle_id=self.id, group_by=False), domain=[('vehicle_id', '=', self.id)] ) return res return False @api.multi def act_show_log_cost(self): """ This opens log view to view and add new log for this vehicle, groupby default to only show effective costs @return: the costs log view """ self.ensure_one() copy_context = dict(self.env.context) copy_context.pop('group_by', None) res = self.env['ir.actions.act_window'].for_xml_id('fleet', 'fleet_vehicle_costs_action') res.update( context=dict(copy_context, default_vehicle_id=self.id, search_default_parent_false=True), domain=[('vehicle_id', '=', self.id)] ) return res
class Import(models.TransientModel): _name = 'base_import.import' # allow imports to survive for 12h in case user is slow _transient_max_hours = 12.0 res_model = fields.Char('Model') file = fields.Binary( 'File', help="File to check and/or import, raw binary (not base64)") file_name = fields.Char('File Name') file_type = fields.Char('File Type') @api.model def get_fields(self, model, depth=FIELDS_RECURSION_LIMIT): """ Recursively get fields for the provided model (through fields_get) and filter them according to importability The output format is a list of ``Field``, with ``Field`` defined as: .. class:: Field .. attribute:: id (str) A non-unique identifier for the field, used to compute the span of the ``required`` attribute: if multiple ``required`` fields have the same id, only one of them is necessary. .. attribute:: name (str) The field's logical (GECOERP) name within the scope of its parent. .. attribute:: string (str) The field's human-readable name (``@string``) .. attribute:: required (bool) Whether the field is marked as required in the model. Clients must provide non-empty import values for all required fields or the import will error out. .. attribute:: fields (list(Field)) The current field's subfields. The database and external identifiers for m2o and m2m fields; a filtered and transformed fields_get for o2m fields (to a variable depth defined by ``depth``). Fields with no sub-fields will have an empty list of sub-fields. :param str model: name of the model to get fields form :param int landing: depth of recursion into o2m fields """ Model = self.env[model] importable_fields = [{ 'id': 'id', 'name': 'id', 'string': _("External ID"), 'required': False, 'fields': [], 'type': 'id', }] model_fields = Model.fields_get() blacklist = models.MAGIC_COLUMNS + [Model.CONCURRENCY_CHECK_FIELD] for name, field in model_fields.items(): if name in blacklist: continue # an empty string means the field is deprecated, @deprecated must # be absent or False to mean not-deprecated if field.get('deprecated', False) is not False: continue if field.get('readonly'): states = field.get('states') if not states: continue # states = {state: [(attr, value), (attr2, value2)], state2:...} if not any(attr == 'readonly' and value is False for attr, value in itertools.chain.from_iterable( states.values())): continue field_value = { 'id': name, 'name': name, 'string': field['string'], # Y U NO ALWAYS HAS REQUIRED 'required': bool(field.get('required')), 'fields': [], 'type': field['type'], } if field['type'] in ('many2many', 'many2one'): field_value['fields'] = [ dict(field_value, name='id', string=_("External ID"), type='id'), dict(field_value, name='.id', string=_("Database ID"), type='id'), ] elif field['type'] == 'one2many' and depth: field_value['fields'] = self.get_fields(field['relation'], depth=depth - 1) if self.user_has_groups('base.group_no_one'): field_value['fields'].append({ 'id': '.id', 'name': '.id', 'string': _("Database ID"), 'required': False, 'fields': [], 'type': 'id' }) importable_fields.append(field_value) # TODO: cache on model? return importable_fields @api.multi def _read_file(self, options): """ Dispatch to specific method to read file content, according to its mimetype or file type :param options : dict of reading options (quoting, separator, ...) """ self.ensure_one() # guess mimetype from file content mimetype = guess_mimetype(self.file) (file_extension, handler, req) = FILE_TYPE_DICT.get(mimetype, (None, None, None)) if handler: try: return getattr(self, '_read_' + file_extension)(options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %d) using guessed mimetype %s", self.file_name or '<unknown>', self.id, mimetype) # try reading with user-provided mimetype (file_extension, handler, req) = FILE_TYPE_DICT.get(self.file_type, (None, None, None)) if handler: try: return getattr(self, '_read_' + file_extension)(options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %d) using user-provided mimetype %s", self.file_name or '<unknown>', self.id, self.file_type) # fallback on file extensions as mime types can be unreliable (e.g. # software setting incorrect mime types, or non-installed software # leading to browser not sending mime types) if self.file_name: p, ext = os.path.splitext(self.file_name) if ext in EXTENSIONS: try: return getattr(self, '_read_' + ext[1:])(options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %s) using file extension", self.file_name, self.id) if req: raise ImportError( _("Unable to load \"{extension}\" file: requires Python module \"{modname}\"" ).format(extension=file_extension, modname=req)) raise ValueError( _("Unsupported file format \"{}\", import only supports CSV, ODS, XLS and XLSX" ).format(self.file_type)) @api.multi def _read_xls(self, options): """ Read file content, using xlrd lib """ book = xlrd.open_workbook(file_contents=self.file) return self._read_xls_book(book) def _read_xls_book(self, book): sheet = book.sheet_by_index(0) # emulate Sheet.get_rows for pre-0.9.4 for row in pycompat.imap(sheet.row, range(sheet.nrows)): values = [] for cell in row: if cell.ctype is xlrd.XL_CELL_NUMBER: is_float = cell.value % 1 != 0.0 values.append( pycompat.text_type(cell.value) if is_float else pycompat.text_type(int(cell.value))) elif cell.ctype is xlrd.XL_CELL_DATE: is_datetime = cell.value % 1 != 0.0 # emulate xldate_as_datetime for pre-0.9.3 dt = datetime.datetime(*xlrd.xldate.xldate_as_tuple( cell.value, book.datemode)) values.append( dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT ) if is_datetime else dt. strftime(DEFAULT_SERVER_DATE_FORMAT)) elif cell.ctype is xlrd.XL_CELL_BOOLEAN: values.append(u'True' if cell.value else u'False') elif cell.ctype is xlrd.XL_CELL_ERROR: raise ValueError( _("Error cell found while reading XLS/XLSX file: %s") % xlrd.error_text_from_code.get( cell.value, "unknown error code %s" % cell.value)) else: values.append(cell.value) if any(x for x in values if x.strip()): yield values # use the same method for xlsx and xls files _read_xlsx = _read_xls @api.multi def _read_ods(self, options): """ Read file content using ODSReader custom lib """ doc = odf_ods_reader.ODSReader(file=io.BytesIO(self.file)) return (row for row in doc.getFirstSheet() if any(x for x in row if x.strip())) @api.multi def _read_csv(self, options): """ Returns a CSV-parsed iterator of all empty lines in the file :throws csv.Error: if an error is detected during CSV parsing :throws UnicodeDecodeError: if ``options.encoding`` is incorrect """ csv_data = self.file # TODO: guess encoding with chardet? Or https://github.com/aadsm/jschardet encoding = options.get('encoding', 'utf-8') if encoding != 'utf-8': # csv module expect utf-8, see http://docs.python.org/2/library/csv.html csv_data = csv_data.decode(encoding).encode('utf-8') csv_iterator = pycompat.csv_reader(io.BytesIO(csv_data), quotechar=str(options['quoting']), delimiter=str(options['separator'])) return (row for row in csv_iterator if any(x for x in row if x.strip())) @api.model def _try_match_column(self, preview_values, options): """ Returns the potential field types, based on the preview values, using heuristics :param preview_values : list of value for the column to determine :param options : parsing options """ # If all values are empty in preview than can be any field if all([v == '' for v in preview_values]): return ['all'] # If all values starts with __export__ this is probably an id if all(v.startswith('__export__') for v in preview_values): return ['id', 'many2many', 'many2one', 'one2many'] # If all values can be cast to int type is either id, float or monetary # Exception: if we only have 1 and 0, it can also be a boolean try: field_type = [ 'id', 'integer', 'char', 'float', 'monetary', 'many2one', 'many2many', 'one2many' ] res = set(int(v) for v in preview_values if v) if {0, 1}.issuperset(res): field_type.append('boolean') return field_type except ValueError: pass # If all values are either True or False, type is boolean if all(val.lower() in ('true', 'false', 't', 'f', '') for val in preview_values): return ['boolean'] # If all values can be cast to float, type is either float or monetary # Or a date/datetime if it matches the pattern results = [] try: thousand_separator = decimal_separator = False for val in preview_values: val = val.strip() if not val: continue # value might have the currency symbol left or right from the value val = self._remove_currency_symbol(val) if val: if options.get('float_thousand_separator') and options.get( 'float_decimal_separator'): val = val.replace( options['float_thousand_separator'], '').replace(options['float_decimal_separator'], '.') # We are now sure that this is a float, but we still need to find the # thousand and decimal separator else: if val.count('.') > 1: options['float_thousand_separator'] = '.' options['float_decimal_separator'] = ',' elif val.count(',') > 1: options['float_thousand_separator'] = ',' options['float_decimal_separator'] = '.' elif val.find('.') > val.find(','): thousand_separator = ',' decimal_separator = '.' elif val.find(',') > val.find('.'): thousand_separator = '.' decimal_separator = ',' else: # This is not a float so exit this try float('a') if thousand_separator and not options.get( 'float_decimal_separator'): options['float_thousand_separator'] = thousand_separator options['float_decimal_separator'] = decimal_separator results = ['float', 'monetary'] except ValueError: pass # Try to see if all values are a date or datetime dt = datetime.datetime separator = [' ', '/', '-'] date_format = ['%mr%dr%Y', '%dr%mr%Y', '%Yr%mr%d', '%Yr%dr%m'] date_patterns = [options['date_format'] ] if options.get('date_format') else [] if not date_patterns: date_patterns = [ pattern.replace('r', sep) for sep in separator for pattern in date_format ] date_patterns.extend([p.replace('Y', 'y') for p in date_patterns]) datetime_patterns = [options['datetime_format'] ] if options.get('datetime_format') else [] if not datetime_patterns: datetime_patterns = [ pattern + ' %H:%M:%S' for pattern in date_patterns ] current_date_pattern = False current_datetime_pattern = False def check_patterns(patterns, preview_values): for pattern in patterns: match = True for val in preview_values: if not val: continue try: dt.strptime(val, pattern) except ValueError: match = False break if match: return pattern return False current_date_pattern = check_patterns(date_patterns, preview_values) if current_date_pattern: options['date_format'] = current_date_pattern results += ['date'] current_datetime_pattern = check_patterns(datetime_patterns, preview_values) if current_datetime_pattern: options['datetime_format'] = current_datetime_pattern results += ['datetime'] if results: return results return [ 'id', 'text', 'char', 'datetime', 'selection', 'many2one', 'one2many', 'many2many', 'html' ] @api.model def _find_type_from_preview(self, options, preview): type_fields = [] if preview: for column in range(0, len(preview[0])): preview_values = [value[column].strip() for value in preview] type_field = self._try_match_column(preview_values, options) type_fields.append(type_field) return type_fields def _match_header(self, header, fields, options): """ Attempts to match a given header to a field of the imported model. :param str header: header name from the CSV file :param fields: :param dict options: :returns: an empty list if the header couldn't be matched, or all the fields to traverse :rtype: list(Field) """ string_match = None for field in fields: # FIXME: should match all translations & original # TODO: use string distance (levenshtein? hamming?) if header.lower() == field['name'].lower(): return [field] if header.lower() == field['string'].lower(): # matching string are not reliable way because # strings have no unique constraint string_match = field if string_match: # this behavior is only applied if there is no matching field['name'] return [string_match] if '/' not in header: return [] # relational field path traversal = [] subfields = fields # Iteratively dive into fields tree for section in header.split('/'): # Strip section in case spaces are added around '/' for # readability of paths match = self._match_header(section.strip(), subfields, options) # Any match failure, exit if not match: return [] # prep subfields for next iteration within match[0] field = match[0] subfields = field['fields'] traversal.append(field) return traversal def _match_headers(self, rows, fields, options): """ Attempts to match the imported model's fields to the titles of the parsed CSV file, if the file is supposed to have headers. Will consume the first line of the ``rows`` iterator. Returns a pair of (None, None) if headers were not requested or the list of headers and a dict mapping cell indices to key paths in the ``fields`` tree :param Iterator rows: :param dict fields: :param dict options: :rtype: (None, None) | (list(str), dict(int: list(str))) """ if not options.get('headers'): return [], {} headers = next(rows) return headers, { index: [ field['name'] for field in self._match_header(header, fields, options) ] or None for index, header in enumerate(headers) } @api.multi def parse_preview(self, options, count=10): """ Generates a preview of the uploaded files, and performs fields-matching between the import's file data and the model's columns. If the headers are not requested (not options.headers), ``matches`` and ``headers`` are both ``False``. :param int count: number of preview lines to generate :param options: format-specific options. CSV: {encoding, quoting, separator, headers} :type options: {str, str, str, bool} :returns: {fields, matches, headers, preview} | {error, preview} :rtype: {dict(str: dict(...)), dict(int, list(str)), list(str), list(list(str))} | {str, str} """ self.ensure_one() fields = self.get_fields(self.res_model) try: rows = self._read_file(options) headers, matches = self._match_headers(rows, fields, options) # Match should have consumed the first row (iif headers), get # the ``count`` next rows for preview preview = list(itertools.islice(rows, count)) assert preview, "CSV file seems to have no content" header_types = self._find_type_from_preview(options, preview) if options.get('keep_matches', False) and len( options.get('fields', [])): matches = {} for index, match in enumerate(options.get('fields')): if match: matches[index] = match.split('/') return { 'fields': fields, 'matches': matches or False, 'headers': headers or False, 'headers_type': header_types or False, 'preview': preview, 'options': options, 'debug': self.user_has_groups('base.group_no_one'), } except Exception as error: # Due to lazy generators, UnicodeDecodeError (for # instance) may only be raised when serializing the # preview to a list in the return. _logger.debug("Error during parsing preview", exc_info=True) preview = None if self.file_type == 'text/csv': preview = self.file[:ERROR_PREVIEW_BYTES].decode('iso-8859-1') return { 'error': str(error), # iso-8859-1 ensures decoding will always succeed, # even if it yields non-printable characters. This is # in case of UnicodeDecodeError (or csv.Error # compounded with UnicodeDecodeError) 'preview': preview, } @api.model def _convert_import_data(self, fields, options): """ Extracts the input BaseModel and fields list (with ``False``-y placeholders for fields to *not* import) into a format Model.import_data can use: a fields list without holes and the precisely matching data matrix :param list(str|bool): fields :returns: (data, fields) :rtype: (list(list(str)), list(str)) :raises ValueError: in case the import data could not be converted """ # Get indices for non-empty fields indices = [index for index, field in enumerate(fields) if field] if not indices: raise ValueError( _("You must configure at least one field to import")) # If only one index, itemgetter will return an atom rather # than a 1-tuple if len(indices) == 1: mapper = lambda row: [row[indices[0]]] else: mapper = operator.itemgetter(*indices) # Get only list of actually imported fields import_fields = [f for f in fields if f] rows_to_import = self._read_file(options) if options.get('headers'): rows_to_import = itertools.islice(rows_to_import, 1, None) data = [ list(row) for row in pycompat.imap(mapper, rows_to_import) # don't try inserting completely empty rows (e.g. from # filtering out o2m fields) if any(row) ] return data, import_fields @api.model def _remove_currency_symbol(self, value): value = value.strip() negative = False # Careful that some countries use () for negative so replace it by - sign if value.startswith('(') and value.endswith(')'): value = value[1:-1] negative = True float_regex = re.compile(r'([-]?[0-9.,]+)') split_value = [g for g in float_regex.split(value) if g] if len(split_value) > 2: # This is probably not a float return False if len(split_value) == 1: if float_regex.search(split_value[0]) is not None: return split_value[0] if not negative else '-' + split_value[0] return False else: # String has been split in 2, locate which index contains the float and which does not currency_index = 0 if float_regex.search(split_value[0]) is not None: currency_index = 1 # Check that currency exists currency = self.env['res.currency'].search([ ('symbol', '=', split_value[currency_index].strip()) ]) if len(currency): return split_value[(currency_index + 1) % 2] if not negative else '-' + split_value[ (currency_index + 1) % 2] # Otherwise it is not a float with a currency symbol return False @api.model def _parse_float_from_data(self, data, index, name, options): thousand_separator = options.get('float_thousand_separator', ' ') decimal_separator = options.get('float_decimal_separator', '.') for line in data: line[index] = line[index].strip() if not line[index]: continue line[index] = line[index].replace(thousand_separator, '').replace( decimal_separator, '.') old_value = line[index] line[index] = self._remove_currency_symbol(line[index]) if line[index] is False: raise ValueError( _("Column %s contains incorrect values (value: %s)" % (name, old_value))) @api.multi def _parse_import_data(self, data, import_fields, options): """ Lauch first call to _parse_import_data_recursive with an empty prefix. _parse_import_data_recursive will be run recursively for each relational field. """ return self._parse_import_data_recursive(self.res_model, '', data, import_fields, options) @api.multi def _parse_import_data_recursive(self, model, prefix, data, import_fields, options): # Get fields of type date/datetime all_fields = self.env[model].fields_get() for name, field in all_fields.items(): name = prefix + name if field['type'] in ('date', 'datetime') and name in import_fields: # Parse date index = import_fields.index(name) dt = datetime.datetime server_format = DEFAULT_SERVER_DATE_FORMAT if field[ 'type'] == 'date' else DEFAULT_SERVER_DATETIME_FORMAT if options.get('%s_format' % field['type'], server_format) != server_format: # datetime.str[fp]time takes *native strings* in both # versions, for both data and pattern user_format = pycompat.to_native( options.get('%s_format' % field['type'])) for num, line in enumerate(data): if line[index]: line[index] = line[index].strip() if line[index]: try: line[index] = dt.strftime( dt.strptime( pycompat.to_native(line[index]), user_format), server_format) except ValueError as e: raise ValueError( _("Column %s contains incorrect values. Error in line %d: %s" ) % (name, num + 1, e)) except Exception as e: raise ValueError( _("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e)) # Check if the field is in import_field and is a relational (followed by /) # Also verify that the field name exactly match the import_field at the correct level. elif any(name + '/' in import_field and name == import_field.split('/')[prefix.count('/')] for import_field in import_fields): # Recursive call with the relational as new model and add the field name to the prefix self._parse_import_data_recursive(field['relation'], name + '/', data, import_fields, options) elif field['type'] in ('float', 'monetary') and name in import_fields: # Parse float, sometimes float values from file have currency symbol or () to denote a negative value # We should be able to manage both case index = import_fields.index(name) self._parse_float_from_data(data, index, name, options) return data @api.multi def do(self, fields, options, dryrun=False): """ Actual execution of the import :param fields: import mapping: maps each column to a field, ``False`` for the columns to ignore :type fields: list(str|bool) :param dict options: :param bool dryrun: performs all import operations (and validations) but rollbacks writes, allows getting as much errors as possible without the risk of clobbering the database. :returns: A list of errors. If the list is empty the import executed fully and correctly. If the list is non-empty it contains dicts with 3 keys ``type`` the type of error (``error|warning``); ``message`` the error message associated with the error (a string) and ``record`` the data which failed to import (or ``false`` if that data isn't available or provided) :rtype: list({type, message, record}) """ self.ensure_one() self._cr.execute('SAVEPOINT import') try: data, import_fields = self._convert_import_data(fields, options) # Parse date and float field data = self._parse_import_data(data, import_fields, options) except ValueError as error: return [{ 'type': 'error', 'message': pycompat.text_type(error), 'record': False, }] _logger.info('importing %d rows...', len(data)) model = self.env[self.res_model].with_context(import_file=True) defer_parent_store = self.env.context.get( 'defer_parent_store_computation', True) if defer_parent_store and model._parent_store: model = model.with_context(defer_parent_store_computation=True) import_result = model.load(import_fields, data) _logger.info('done') # If transaction aborted, RELEASE SAVEPOINT is going to raise # an InternalError (ROLLBACK should work, maybe). Ignore that. # TODO: to handle multiple errors, create savepoint around # write and release it in case of write error (after # adding error to errors array) => can keep on trying to # import stuff, and rollback at the end if there is any # error in the results. try: if dryrun: self._cr.execute('ROLLBACK TO SAVEPOINT import') else: self._cr.execute('RELEASE SAVEPOINT import') except psycopg2.InternalError: pass return import_result['messages']
class Message(models.Model): """ Messages model: system notification (replacing res.log notifications), comments (OpenChatter discussion) and incoming emails. """ _name = 'mail.message' _description = 'Message' _order = 'id desc' _rec_name = 'record_name' _message_read_limit = 30 @api.model def _get_default_from(self): if self.env.user.email: return formataddr((self.env.user.name, self.env.user.email)) raise UserError(_("Unable to send email, please configure the sender's email address.")) @api.model def _get_default_author(self): return self.env.user.partner_id # content subject = fields.Char('Subject') date = fields.Datetime('Date', default=fields.Datetime.now) body = fields.Html('Contents', default='', sanitize_style=True, strip_classes=True) attachment_ids = fields.Many2many( 'ir.attachment', 'message_attachment_rel', 'message_id', 'attachment_id', string='Attachments', help='Attachments are linked to a document through model / res_id and to the message ' 'through this field.') parent_id = fields.Many2one( 'mail.message', 'Parent Message', index=True, ondelete='set null', help="Initial thread message.") child_ids = fields.One2many('mail.message', 'parent_id', 'Child Messages') # related document model = fields.Char('Related Document Model', index=True) res_id = fields.Integer('Related Document ID', index=True) record_name = fields.Char('Message Record Name', help="Name get of the related document.") # characteristics message_type = fields.Selection([ ('email', 'Email'), ('comment', 'Comment'), ('notification', 'System notification')], 'Type', required=True, default='email', help="Message type: email for email message, notification for system " "message, comment for other messages such as user replies", oldname='type') subtype_id = fields.Many2one('mail.message.subtype', 'Subtype', ondelete='set null', index=True) mail_activity_type_id = fields.Many2one( 'mail.activity.type', 'Mail Activity Type', index=True, ondelete='set null') # origin email_from = fields.Char( 'From', default=_get_default_from, help="Email address of the sender. This field is set when no matching partner is found and replaces the author_id field in the chatter.") author_id = fields.Many2one( 'res.partner', 'Author', index=True, ondelete='set null', default=_get_default_author, help="Author of the message. If not set, email_from may hold an email address that did not match any partner.") author_avatar = fields.Binary("Author's avatar", related='author_id.image_small') # recipients partner_ids = fields.Many2many('res.partner', string='Recipients') needaction_partner_ids = fields.Many2many( 'res.partner', 'mail_message_res_partner_needaction_rel', string='Partners with Need Action') needaction = fields.Boolean( 'Need Action', compute='_get_needaction', search='_search_needaction', help='Need Action') channel_ids = fields.Many2many( 'mail.channel', 'mail_message_mail_channel_rel', string='Channels') # notifications notification_ids = fields.One2many( 'mail.notification', 'mail_message_id', 'Notifications', auto_join=True, copy=False) # user interface starred_partner_ids = fields.Many2many( 'res.partner', 'mail_message_res_partner_starred_rel', string='Favorited By') starred = fields.Boolean( 'Starred', compute='_get_starred', search='_search_starred', help='Current user has a starred notification linked to this message') # tracking tracking_value_ids = fields.One2many( 'mail.tracking.value', 'mail_message_id', string='Tracking values', groups="base.group_no_one", help='Tracked values are stored in a separate model. This field allow to reconstruct ' 'the tracking and to generate statistics on the model.') # mail gateway no_auto_thread = fields.Boolean( 'No threading for answers', help='Answers do not go in the original document discussion thread. This has an impact on the generated message-id.') message_id = fields.Char('Message-Id', help='Message unique identifier', index=True, readonly=1, copy=False) reply_to = fields.Char('Reply-To', help='Reply email address. Setting the reply_to bypasses the automatic thread creation.') mail_server_id = fields.Many2one('ir.mail_server', 'Outgoing mail server') @api.multi def _get_needaction(self): """ Need action on a mail.message = notified on my channel """ my_messages = self.env['mail.notification'].sudo().search([ ('mail_message_id', 'in', self.ids), ('res_partner_id', '=', self.env.user.partner_id.id), ('is_read', '=', False)]).mapped('mail_message_id') for message in self: message.needaction = message in my_messages @api.model def _search_needaction(self, operator, operand): if operator == '=' and operand: return ['&', ('notification_ids.res_partner_id', '=', self.env.user.partner_id.id), ('notification_ids.is_read', '=', False)] return ['&', ('notification_ids.res_partner_id', '=', self.env.user.partner_id.id), ('notification_ids.is_read', '=', True)] @api.depends('starred_partner_ids') def _get_starred(self): """ Compute if the message is starred by the current user. """ # TDE FIXME: use SQL starred = self.sudo().filtered(lambda msg: self.env.user.partner_id in msg.starred_partner_ids) for message in self: message.starred = message in starred @api.model def _search_starred(self, operator, operand): if operator == '=' and operand: return [('starred_partner_ids', 'in', [self.env.user.partner_id.id])] return [('starred_partner_ids', 'not in', [self.env.user.partner_id.id])] #------------------------------------------------------ # Notification API #------------------------------------------------------ @api.model def mark_all_as_read(self, channel_ids=None, domain=None): """ Remove all needactions of the current partner. If channel_ids is given, restrict to messages written in one of those channels. """ partner_id = self.env.user.partner_id.id delete_mode = not self.env.user.share # delete employee notifs, keep customer ones if not domain and delete_mode: query = "DELETE FROM mail_message_res_partner_needaction_rel WHERE res_partner_id IN %s" args = [(partner_id,)] if channel_ids: query += """ AND mail_message_id in (SELECT mail_message_id FROM mail_message_mail_channel_rel WHERE mail_channel_id in %s)""" args += [tuple(channel_ids)] query += " RETURNING mail_message_id as id" self._cr.execute(query, args) self.invalidate_cache() ids = [m['id'] for m in self._cr.dictfetchall()] else: # not really efficient method: it does one db request for the # search, and one for each message in the result set to remove the # current user from the relation. msg_domain = [('needaction_partner_ids', 'in', partner_id)] if channel_ids: msg_domain += [('channel_ids', 'in', channel_ids)] unread_messages = self.search(expression.AND([msg_domain, domain])) notifications = self.env['mail.notification'].sudo().search([ ('mail_message_id', 'in', unread_messages.ids), ('res_partner_id', '=', self.env.user.partner_id.id), ('is_read', '=', False)]) if delete_mode: notifications.unlink() else: notifications.write({'is_read': True}) ids = unread_messages.mapped('id') notification = {'type': 'mark_as_read', 'message_ids': ids, 'channel_ids': channel_ids} self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification) return ids @api.multi def mark_as_unread(self, channel_ids=None): """ Add needactions to messages for the current partner. """ partner_id = self.env.user.partner_id.id for message in self: message.write({'needaction_partner_ids': [(4, partner_id)]}) ids = [m.id for m in self] notification = {'type': 'mark_as_unread', 'message_ids': ids, 'channel_ids': channel_ids} self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification) @api.multi def set_message_done(self): """ Remove the needaction from messages for the current partner. """ partner_id = self.env.user.partner_id delete_mode = not self.env.user.share # delete employee notifs, keep customer ones notifications = self.env['mail.notification'].sudo().search([ ('mail_message_id', 'in', self.ids), ('res_partner_id', '=', partner_id.id), ('is_read', '=', False)]) if not notifications: return # notifies changes in messages through the bus. To minimize the number of # notifications, we need to group the messages depending on their channel_ids groups = [] messages = notifications.mapped('mail_message_id') current_channel_ids = messages[0].channel_ids current_group = [] for record in messages: if record.channel_ids == current_channel_ids: current_group.append(record.id) else: groups.append((current_group, current_channel_ids)) current_group = [record.id] current_channel_ids = record.channel_ids groups.append((current_group, current_channel_ids)) current_group = [record.id] current_channel_ids = record.channel_ids if delete_mode: notifications.unlink() else: notifications.write({'is_read': True}) for (msg_ids, channel_ids) in groups: notification = {'type': 'mark_as_read', 'message_ids': msg_ids, 'channel_ids': [c.id for c in channel_ids]} self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', partner_id.id), notification) @api.model def unstar_all(self): """ Unstar messages for the current partner. """ partner_id = self.env.user.partner_id.id starred_messages = self.search([('starred_partner_ids', 'in', partner_id)]) starred_messages.write({'starred_partner_ids': [(3, partner_id)]}) ids = [m.id for m in starred_messages] notification = {'type': 'toggle_star', 'message_ids': ids, 'starred': False} self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification) @api.multi def toggle_message_starred(self): """ Toggle messages as (un)starred. Technically, the notifications related to uid are set to (un)starred. """ # a user should always be able to star a message he can read self.check_access_rule('read') starred = not self.starred if starred: self.sudo().write({'starred_partner_ids': [(4, self.env.user.partner_id.id)]}) else: self.sudo().write({'starred_partner_ids': [(3, self.env.user.partner_id.id)]}) notification = {'type': 'toggle_star', 'message_ids': [self.id], 'starred': starred} self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification) #------------------------------------------------------ # Message loading for web interface #------------------------------------------------------ @api.model def _message_read_dict_postprocess(self, messages, message_tree): """ Post-processing on values given by message_read. This method will handle partners in batch to avoid doing numerous queries. :param list messages: list of message, as get_dict result :param dict message_tree: {[msg.id]: msg browse record as super user} """ # 1. Aggregate partners (author_id and partner_ids), attachments and tracking values partners = self.env['res.partner'].sudo() attachments = self.env['ir.attachment'] message_ids = list(message_tree.keys()) for message in message_tree.values(): if message.author_id: partners |= message.author_id if message.subtype_id and message.partner_ids: # take notified people of message with a subtype partners |= message.partner_ids elif not message.subtype_id and message.partner_ids: # take specified people of message without a subtype (log) partners |= message.partner_ids if message.needaction_partner_ids: # notified partners |= message.needaction_partner_ids if message.attachment_ids: attachments |= message.attachment_ids # Read partners as SUPERUSER -> message being browsed as SUPERUSER it is already the case partners_names = partners.name_get() partner_tree = dict((partner[0], partner) for partner in partners_names) # 2. Attachments as SUPERUSER, because could receive msg and attachments for doc uid cannot see attachments_data = attachments.sudo().read(['id', 'datas_fname', 'name', 'mimetype']) safari = request and request.httprequest.user_agent.browser == 'safari' attachments_tree = dict((attachment['id'], { 'id': attachment['id'], 'filename': attachment['datas_fname'], 'name': attachment['name'], 'mimetype': 'application/octet-stream' if safari and attachment['mimetype'] and 'video' in attachment['mimetype'] else attachment['mimetype'], }) for attachment in attachments_data) # 3. Tracking values tracking_values = self.env['mail.tracking.value'].sudo().search([('mail_message_id', 'in', message_ids)]) message_to_tracking = dict() tracking_tree = dict.fromkeys(tracking_values.ids, False) for tracking in tracking_values: message_to_tracking.setdefault(tracking.mail_message_id.id, list()).append(tracking.id) tracking_tree[tracking.id] = { 'id': tracking.id, 'changed_field': tracking.field_desc, 'old_value': tracking.get_old_display_value()[0], 'new_value': tracking.get_new_display_value()[0], 'field_type': tracking.field_type, } # 4. Update message dictionaries for message_dict in messages: message_id = message_dict.get('id') message = message_tree[message_id] if message.author_id: author = partner_tree[message.author_id.id] else: author = (0, message.email_from) partner_ids = [] if message.subtype_id: partner_ids = [partner_tree[partner.id] for partner in message.partner_ids if partner.id in partner_tree] else: partner_ids = [partner_tree[partner.id] for partner in message.partner_ids if partner.id in partner_tree] customer_email_data = [] for notification in message.notification_ids.filtered(lambda notif: notif.res_partner_id.partner_share and notif.res_partner_id.active): customer_email_data.append((partner_tree[notification.res_partner_id.id][0], partner_tree[notification.res_partner_id.id][1], notification.email_status)) attachment_ids = [] for attachment in message.attachment_ids: if attachment.id in attachments_tree: attachment_ids.append(attachments_tree[attachment.id]) tracking_value_ids = [] for tracking_value_id in message_to_tracking.get(message_id, list()): if tracking_value_id in tracking_tree: tracking_value_ids.append(tracking_tree[tracking_value_id]) message_dict.update({ 'author_id': author, 'partner_ids': partner_ids, 'customer_email_status': (all(d[2] == 'sent' for d in customer_email_data) and 'sent') or (any(d[2] == 'exception' for d in customer_email_data) and 'exception') or (any(d[2] == 'bounce' for d in customer_email_data) and 'bounce') or 'ready', 'customer_email_data': customer_email_data, 'attachment_ids': attachment_ids, 'tracking_value_ids': tracking_value_ids, }) return True @api.model def message_fetch(self, domain, limit=20): return self.search(domain, limit=limit).message_format() @api.multi def message_format(self): """ Get the message values in the format for web client. Since message values can be broadcasted, computed fields MUST NOT BE READ and broadcasted. :returns list(dict). Example : { 'body': HTML content of the message 'model': u'res.partner', 'record_name': u'Agrolait', 'attachment_ids': [ { 'file_type_icon': u'webimage', 'id': 45, 'name': u'sample.png', 'filename': u'sample.png' } ], 'needaction_partner_ids': [], # list of partner ids 'res_id': 7, 'tracking_value_ids': [ { 'old_value': "", 'changed_field': "Customer", 'id': 2965, 'new_value': "Axelor" } ], 'author_id': (3, u'Administrator'), 'email_from': '*****@*****.**' # email address or False 'subtype_id': (1, u'Discussions'), 'channel_ids': [], # list of channel ids 'date': '2015-06-30 08:22:33', 'partner_ids': [[7, "Sacha Du Bourg-Palette"]], # list of partner name_get 'message_type': u'comment', 'id': 59, 'subject': False 'is_note': True # only if the subtype is internal } """ message_values = self.read([ 'id', 'body', 'date', 'author_id', 'email_from', # base message fields 'message_type', 'subtype_id', 'subject', # message specific 'model', 'res_id', 'record_name', # document related 'channel_ids', 'partner_ids', # recipients 'starred_partner_ids', # list of partner ids for whom the message is starred ]) message_tree = dict((m.id, m) for m in self.sudo()) self._message_read_dict_postprocess(message_values, message_tree) # add subtype data (is_note flag, subtype_description). Do it as sudo # because portal / public may have to look for internal subtypes subtype_ids = [msg['subtype_id'][0] for msg in message_values if msg['subtype_id']] subtypes = self.env['mail.message.subtype'].sudo().browse(subtype_ids).read(['internal', 'description']) subtypes_dict = dict((subtype['id'], subtype) for subtype in subtypes) # fetch notification status notif_dict = {} notifs = self.env['mail.notification'].sudo().search([('mail_message_id', 'in', list(mid for mid in message_tree)), ('is_read', '=', False)]) for notif in notifs: mid = notif.mail_message_id.id if not notif_dict.get(mid): notif_dict[mid] = {'partner_id': list()} notif_dict[mid]['partner_id'].append(notif.res_partner_id.id) for message in message_values: message['needaction_partner_ids'] = notif_dict.get(message['id'], dict()).get('partner_id', []) message['is_note'] = message['subtype_id'] and subtypes_dict[message['subtype_id'][0]]['internal'] message['subtype_description'] = message['subtype_id'] and subtypes_dict[message['subtype_id'][0]]['description'] if message['model'] and self.env[message['model']]._original_module: message['module_icon'] = modules.module.get_module_icon(self.env[message['model']]._original_module) return message_values #------------------------------------------------------ # mail_message internals #------------------------------------------------------ @api.model_cr def init(self): self._cr.execute("""SELECT indexname FROM pg_indexes WHERE indexname = 'mail_message_model_res_id_idx'""") if not self._cr.fetchone(): self._cr.execute("""CREATE INDEX mail_message_model_res_id_idx ON mail_message (model, res_id)""") @api.model def _find_allowed_model_wise(self, doc_model, doc_dict): doc_ids = list(doc_dict) allowed_doc_ids = self.env[doc_model].with_context(active_test=False).search([('id', 'in', doc_ids)]).ids return set([message_id for allowed_doc_id in allowed_doc_ids for message_id in doc_dict[allowed_doc_id]]) @api.model def _find_allowed_doc_ids(self, model_ids): IrModelAccess = self.env['ir.model.access'] allowed_ids = set() for doc_model, doc_dict in model_ids.items(): if not IrModelAccess.check(doc_model, 'read', False): continue allowed_ids |= self._find_allowed_model_wise(doc_model, doc_dict) return allowed_ids @api.model def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None): """ Override that adds specific access rights of mail.message, to remove ids uid could not see according to our custom rules. Please refer to check_access_rule for more details about those rules. Non employees users see only message with subtype (aka do not see internal logs). After having received ids of a classic search, keep only: - if author_id == pid, uid is the author, OR - uid belongs to a notified channel, OR - uid is in the specified recipients, OR - uid has a notification on the message, OR - uid have read access to the related document is model, res_id - otherwise: remove the id """ # Rules do not apply to administrator if self._uid == SUPERUSER_ID: return super(Message, self)._search( args, offset=offset, limit=limit, order=order, count=count, access_rights_uid=access_rights_uid) # Non-employee see only messages with a subtype (aka, no internal logs) if not self.env['res.users'].has_group('base.group_user'): args = ['&', '&', ('subtype_id', '!=', False), ('subtype_id.internal', '=', False)] + list(args) # Perform a super with count as False, to have the ids, not a counter ids = super(Message, self)._search( args, offset=offset, limit=limit, order=order, count=False, access_rights_uid=access_rights_uid) if not ids and count: return 0 elif not ids: return ids pid = self.env.user.partner_id.id author_ids, partner_ids, channel_ids, allowed_ids = set([]), set([]), set([]), set([]) model_ids = {} # check read access rights before checking the actual rules on the given ids super(Message, self.sudo(access_rights_uid or self._uid)).check_access_rights('read') self._cr.execute(""" SELECT DISTINCT m.id, m.model, m.res_id, m.author_id, COALESCE(partner_rel.res_partner_id, needaction_rel.res_partner_id), channel_partner.channel_id as channel_id FROM "%s" m LEFT JOIN "mail_message_res_partner_rel" partner_rel ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = %%(pid)s LEFT JOIN "mail_message_res_partner_needaction_rel" needaction_rel ON needaction_rel.mail_message_id = m.id AND needaction_rel.res_partner_id = %%(pid)s LEFT JOIN "mail_message_mail_channel_rel" channel_rel ON channel_rel.mail_message_id = m.id LEFT JOIN "mail_channel" channel ON channel.id = channel_rel.mail_channel_id LEFT JOIN "mail_channel_partner" channel_partner ON channel_partner.channel_id = channel.id AND channel_partner.partner_id = %%(pid)s WHERE m.id = ANY (%%(ids)s)""" % self._table, dict(pid=pid, ids=ids)) for id, rmod, rid, author_id, partner_id, channel_id in self._cr.fetchall(): if author_id == pid: author_ids.add(id) elif partner_id == pid: partner_ids.add(id) elif channel_id: channel_ids.add(id) elif rmod and rid: model_ids.setdefault(rmod, {}).setdefault(rid, set()).add(id) allowed_ids = self._find_allowed_doc_ids(model_ids) final_ids = author_ids | partner_ids | channel_ids | allowed_ids if count: return len(final_ids) else: # re-construct a list based on ids, because set did not keep the original order id_list = [id for id in ids if id in final_ids] return id_list @api.multi def check_access_rule(self, operation): """ Access rules of mail.message: - read: if - author_id == pid, uid is the author OR - uid is in the recipients (partner_ids) OR - uid has been notified (needaction) OR - uid is member of a listern channel (channel_ids.partner_ids) OR - uid have read access to the related document if model, res_id - otherwise: raise - create: if - no model, no res_id (private message) OR - pid in message_follower_ids if model, res_id OR - uid can read the parent OR - uid have write or create access on the related document if model, res_id, OR - otherwise: raise - write: if - author_id == pid, uid is the author, OR - uid is in the recipients (partner_ids) OR - uid has write or create access on the related document if model, res_id - otherwise: raise - unlink: if - uid has write or create access on the related document if model, res_id - otherwise: raise Specific case: non employee users see only messages with subtype (aka do not see internal logs). """ def _generate_model_record_ids(msg_val, msg_ids): """ :param model_record_ids: {'model': {'res_id': (msg_id, msg_id)}, ... } :param message_values: {'msg_id': {'model': .., 'res_id': .., 'author_id': ..}} """ model_record_ids = {} for id in msg_ids: vals = msg_val.get(id, {}) if vals.get('model') and vals.get('res_id'): model_record_ids.setdefault(vals['model'], set()).add(vals['res_id']) return model_record_ids if self._uid == SUPERUSER_ID: return # Non employees see only messages with a subtype (aka, not internal logs) if not self.env['res.users'].has_group('base.group_user'): self._cr.execute('''SELECT DISTINCT message.id, message.subtype_id, subtype.internal FROM "%s" AS message LEFT JOIN "mail_message_subtype" as subtype ON message.subtype_id = subtype.id WHERE message.message_type = %%s AND (message.subtype_id IS NULL OR subtype.internal IS TRUE) AND message.id = ANY (%%s)''' % (self._table), ('comment', self.ids,)) if self._cr.fetchall(): raise AccessError( _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % (self._description, operation)) # Read mail_message.ids to have their values message_values = dict((res_id, {}) for res_id in self.ids) if operation in ['read', 'write']: self._cr.execute(""" SELECT DISTINCT m.id, m.model, m.res_id, m.author_id, m.parent_id, COALESCE(partner_rel.res_partner_id, needaction_rel.res_partner_id), channel_partner.channel_id as channel_id FROM "%s" m LEFT JOIN "mail_message_res_partner_rel" partner_rel ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = %%(pid)s LEFT JOIN "mail_message_res_partner_needaction_rel" needaction_rel ON needaction_rel.mail_message_id = m.id AND needaction_rel.res_partner_id = %%(pid)s LEFT JOIN "mail_message_mail_channel_rel" channel_rel ON channel_rel.mail_message_id = m.id LEFT JOIN "mail_channel" channel ON channel.id = channel_rel.mail_channel_id LEFT JOIN "mail_channel_partner" channel_partner ON channel_partner.channel_id = channel.id AND channel_partner.partner_id = %%(pid)s WHERE m.id = ANY (%%(ids)s)""" % self._table, dict(pid=self.env.user.partner_id.id, ids=self.ids)) for mid, rmod, rid, author_id, parent_id, partner_id, channel_id in self._cr.fetchall(): message_values[mid] = { 'model': rmod, 'res_id': rid, 'author_id': author_id, 'parent_id': parent_id, 'notified': any((message_values[mid].get('notified'), partner_id, channel_id)) } else: self._cr.execute("""SELECT DISTINCT id, model, res_id, author_id, parent_id FROM "%s" WHERE id = ANY (%%s)""" % self._table, (self.ids,)) for mid, rmod, rid, author_id, parent_id in self._cr.fetchall(): message_values[mid] = {'model': rmod, 'res_id': rid, 'author_id': author_id, 'parent_id': parent_id} # Author condition (READ, WRITE, CREATE (private)) author_ids = [] if operation == 'read' or operation == 'write': author_ids = [mid for mid, message in message_values.items() if message.get('author_id') and message.get('author_id') == self.env.user.partner_id.id] elif operation == 'create': author_ids = [mid for mid, message in message_values.items() if not message.get('model') and not message.get('res_id')] # Parent condition, for create (check for received notifications for the created message parent) notified_ids = [] if operation == 'create': # TDE: probably clean me parent_ids = [message.get('parent_id') for message in message_values.values() if message.get('parent_id')] self._cr.execute("""SELECT DISTINCT m.id, partner_rel.res_partner_id, channel_partner.partner_id FROM "%s" m LEFT JOIN "mail_message_res_partner_rel" partner_rel ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = (%%s) LEFT JOIN "mail_message_mail_channel_rel" channel_rel ON channel_rel.mail_message_id = m.id LEFT JOIN "mail_channel" channel ON channel.id = channel_rel.mail_channel_id LEFT JOIN "mail_channel_partner" channel_partner ON channel_partner.channel_id = channel.id AND channel_partner.partner_id = (%%s) WHERE m.id = ANY (%%s)""" % self._table, (self.env.user.partner_id.id, self.env.user.partner_id.id, parent_ids,)) not_parent_ids = [mid[0] for mid in self._cr.fetchall() if any([mid[1], mid[2]])] notified_ids += [mid for mid, message in message_values.items() if message.get('parent_id') in not_parent_ids] # Recipients condition, for read and write (partner_ids) and create (message_follower_ids) other_ids = set(self.ids).difference(set(author_ids), set(notified_ids)) model_record_ids = _generate_model_record_ids(message_values, other_ids) if operation in ['read', 'write']: notified_ids = [mid for mid, message in message_values.items() if message.get('notified')] elif operation == 'create': for doc_model, doc_ids in model_record_ids.items(): followers = self.env['mail.followers'].sudo().search([ ('res_model', '=', doc_model), ('res_id', 'in', list(doc_ids)), ('partner_id', '=', self.env.user.partner_id.id), ]) fol_mids = [follower.res_id for follower in followers] notified_ids += [mid for mid, message in message_values.items() if message.get('model') == doc_model and message.get('res_id') in fol_mids] # CRUD: Access rights related to the document other_ids = other_ids.difference(set(notified_ids)) model_record_ids = _generate_model_record_ids(message_values, other_ids) document_related_ids = [] for model, doc_ids in model_record_ids.items(): DocumentModel = self.env[model] mids = DocumentModel.browse(doc_ids).exists() if hasattr(DocumentModel, 'check_mail_message_access'): DocumentModel.check_mail_message_access(mids.ids, operation) # ?? mids ? else: self.env['mail.thread'].check_mail_message_access(mids.ids, operation, model_name=model) document_related_ids += [mid for mid, message in message_values.items() if message.get('model') == model and message.get('res_id') in mids.ids] # Calculate remaining ids: if not void, raise an error other_ids = other_ids.difference(set(document_related_ids)) if not (other_ids and self.browse(other_ids).exists()): return raise AccessError( _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % (self._description, operation)) @api.model def _get_record_name(self, values): """ Return the related document name, using name_get. It is done using SUPERUSER_ID, to be sure to have the record name correctly stored. """ model = values.get('model', self.env.context.get('default_model')) res_id = values.get('res_id', self.env.context.get('default_res_id')) if not model or not res_id or model not in self.env: return False return self.env[model].sudo().browse(res_id).name_get()[0][1] @api.model def _get_reply_to(self, values): """ Return a specific reply_to: alias of the document through message_get_reply_to or take the email_from """ model, res_id, email_from = values.get('model', self._context.get('default_model')), values.get('res_id', self._context.get('default_res_id')), values.get('email_from') # ctx values / defualt_get res ? if model and hasattr(self.env[model], 'message_get_reply_to'): # return self.env[model].browse(res_id).message_get_reply_to([res_id], default=email_from)[res_id] return self.env[model].message_get_reply_to([res_id], default=email_from)[res_id] else: # return self.env['mail.thread'].message_get_reply_to(default=email_from)[None] return self.env['mail.thread'].message_get_reply_to([None], default=email_from)[None] @api.model def _get_message_id(self, values): if values.get('no_auto_thread', False) is True: message_id = tools.generate_tracking_message_id('reply_to') elif values.get('res_id') and values.get('model'): message_id = tools.generate_tracking_message_id('%(res_id)s-%(model)s' % values) else: message_id = tools.generate_tracking_message_id('private') return message_id @api.multi def _invalidate_documents(self): """ Invalidate the cache of the documents followed by ``self``. """ for record in self: if record.model and record.res_id: self.env[record.model].invalidate_cache(ids=[record.res_id]) @api.model def create(self, values): # coming from mail.js that does not have pid in its values if self.env.context.get('default_starred'): self = self.with_context({'default_starred_partner_ids': [(4, self.env.user.partner_id.id)]}) if 'email_from' not in values: # needed to compute reply_to values['email_from'] = self._get_default_from() if not values.get('message_id'): values['message_id'] = self._get_message_id(values) if 'reply_to' not in values: values['reply_to'] = self._get_reply_to(values) if 'record_name' not in values and 'default_record_name' not in self.env.context: values['record_name'] = self._get_record_name(values) if 'attachment_ids' not in values: values.setdefault('attachment_ids', []) # extract base64 images if 'body' in values: Attachments = self.env['ir.attachment'] data_to_url = {} def base64_to_boundary(match): key = match.group(2) if not data_to_url.get(key): name = 'image%s' % len(data_to_url) attachment = Attachments.create({ 'name': name, 'datas': match.group(2), 'datas_fname': name, 'res_model': 'mail.message', }) attachment.generate_access_token() values['attachment_ids'].append((4, attachment.id)) data_to_url[key] = ['/web/image/%s?access_token=%s' % (attachment.id, attachment.access_token), name] return '%s%s alt="%s"' % (data_to_url[key][0], match.group(3), data_to_url[key][1]) values['body'] = _image_dataurl.sub(base64_to_boundary, tools.ustr(values['body'])) # delegate creation of tracking after the create as sudo to avoid access rights issues tracking_values_cmd = values.pop('tracking_value_ids', False) message = super(Message, self).create(values) if tracking_values_cmd: message.sudo().write({'tracking_value_ids': tracking_values_cmd}) message._invalidate_documents() if not self.env.context.get('message_create_from_mail_mail'): message._notify(force_send=self.env.context.get('mail_notify_force_send', True), user_signature=self.env.context.get('mail_notify_user_signature', True)) return message @api.multi def read(self, fields=None, load='_classic_read'): """ Override to explicitely call check_access_rule, that is not called by the ORM. It instead directly fetches ir.rules and apply them. """ self.check_access_rule('read') return super(Message, self).read(fields=fields, load=load) @api.multi def write(self, vals): if 'model' in vals or 'res_id' in vals: self._invalidate_documents() res = super(Message, self).write(vals) self._invalidate_documents() return res @api.multi def unlink(self): # cascade-delete attachments that are directly attached to the message (should only happen # for mail.messages that act as parent for a standalone mail.mail record). self.check_access_rule('unlink') self.mapped('attachment_ids').filtered( lambda attach: attach.res_model == self._name and (attach.res_id in self.ids or attach.res_id == 0) ).unlink() self._invalidate_documents() return super(Message, self).unlink() #------------------------------------------------------ # Messaging API #------------------------------------------------------ @api.multi def _notify(self, force_send=False, send_after_commit=True, user_signature=True): """ Compute recipients to notify based on specified recipients and document followers. Delegate notification to partners to send emails and bus notifications and to channels to broadcast messages on channels """ group_user = self.env.ref('base.group_user') # have a sudoed copy to manipulate partners (public can go here with website modules like forum / blog / ... ) self_sudo = self.sudo() self.ensure_one() partners_sudo = self_sudo.partner_ids channels_sudo = self_sudo.channel_ids # all followers of the mail.message document have to be added as partners and notified # and filter to employees only if the subtype is internal if self_sudo.subtype_id and self.model and self.res_id: followers = self_sudo.env['mail.followers'].search([ ('res_model', '=', self.model), ('res_id', '=', self.res_id), ('subtype_ids', 'in', self_sudo.subtype_id.id), ]) if self_sudo.subtype_id.internal: followers = followers.filtered(lambda fol: fol.channel_id or (fol.partner_id.user_ids and group_user in fol.partner_id.user_ids[0].mapped('groups_id'))) channels_sudo |= followers.mapped('channel_id') partners_sudo |= followers.mapped('partner_id') # remove author from notified partners if not self._context.get('mail_notify_author', False) and self_sudo.author_id: partners_sudo = partners_sudo - self_sudo.author_id # update message, with maybe custom values message_values = {} if channels_sudo: message_values['channel_ids'] = [(6, 0, channels_sudo.ids)] if partners_sudo: message_values['needaction_partner_ids'] = [(6, 0, partners_sudo.ids)] if self.model and self.res_id and hasattr(self.env[self.model], 'message_get_message_notify_values'): message_values.update(self.env[self.model].browse(self.res_id).message_get_message_notify_values(self, message_values)) if message_values: self.write(message_values) # notify partners and channels # those methods are called as SUPERUSER because portal users posting messages # have no access to partner model. Maybe propagating a real uid could be necessary. email_channels = channels_sudo.filtered(lambda channel: channel.email_send) notif_partners = partners_sudo.filtered(lambda partner: 'inbox' in partner.mapped('user_ids.notification_type')) if email_channels or partners_sudo - notif_partners: partners_sudo.search([ '|', ('id', 'in', (partners_sudo - notif_partners).ids), ('channel_ids', 'in', email_channels.ids), ('email', '!=', self_sudo.author_id.email or self_sudo.email_from), ])._notify(self, force_send=force_send, send_after_commit=send_after_commit, user_signature=user_signature) notif_partners._notify_by_chat(self) channels_sudo._notify(self) # Discard cache, because child / parent allow reading and therefore # change access rights. if self.parent_id: self.parent_id.invalidate_cache() return True
class MrpWorkorder(models.Model): _name = 'mrp.workorder' _description = 'Work Order' _inherit = ['mail.thread'] name = fields.Char( 'Work Order', required=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) workcenter_id = fields.Many2one( 'mrp.workcenter', 'Work Center', required=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) working_state = fields.Selection( 'Workcenter Status', related='workcenter_id.working_state', help='Technical: used in views only') production_id = fields.Many2one( 'mrp.production', 'Manufacturing Order', index=True, ondelete='cascade', required=True, track_visibility='onchange', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) product_id = fields.Many2one( 'product.product', 'Product', related='production_id.product_id', readonly=True, help='Technical: used in views only.', store=True) product_uom_id = fields.Many2one( 'product.uom', 'Unit of Measure', related='production_id.product_uom_id', readonly=True, help='Technical: used in views only.') production_availability = fields.Selection( 'Stock Availability', readonly=True, related='production_id.availability', store=True, help='Technical: used in views and domains only.') production_state = fields.Selection( 'Production State', readonly=True, related='production_id.state', help='Technical: used in views only.') product_tracking = fields.Selection( 'Product Tracking', related='production_id.product_id.tracking', help='Technical: used in views only.') qty_production = fields.Float('Original Production Quantity', readonly=True, related='production_id.product_qty') qty_remaining = fields.Float('Quantity To Be Produced', compute='_compute_qty_remaining', digits=dp.get_precision('Product Unit of Measure')) qty_produced = fields.Float( 'Quantity', default=0.0, readonly=True, digits=dp.get_precision('Product Unit of Measure'), help="The number of products already handled by this work order") qty_producing = fields.Float( 'Currently Produced Quantity', default=1.0, digits=dp.get_precision('Product Unit of Measure'), states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) is_produced = fields.Boolean(string="Has Been Produced", compute='_compute_is_produced') state = fields.Selection([ ('pending', 'Pending'), ('ready', 'Ready'), ('progress', 'In Progress'), ('done', 'Finished'), ('cancel', 'Cancelled')], string='Status', default='pending') date_planned_start = fields.Datetime( 'Scheduled Date Start', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) date_planned_finished = fields.Datetime( 'Scheduled Date Finished', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) date_start = fields.Datetime( 'Effective Start Date', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) date_finished = fields.Datetime( 'Effective End Date', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) duration_expected = fields.Float( 'Expected Duration', digits=(16, 2), states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="Expected duration (in minutes)") duration = fields.Float( 'Real Duration', compute='_compute_duration', readonly=True, store=True) duration_unit = fields.Float( 'Duration Per Unit', compute='_compute_duration', readonly=True, store=True) duration_percent = fields.Integer( 'Duration Deviation (%)', compute='_compute_duration', group_operator="avg", readonly=True, store=True) operation_id = fields.Many2one( 'mrp.routing.workcenter', 'Operation') # Should be used differently as BoM can change in the meantime worksheet = fields.Binary( 'Worksheet', related='operation_id.worksheet', readonly=True) move_raw_ids = fields.One2many( 'stock.move', 'workorder_id', 'Moves') move_line_ids = fields.One2many( 'stock.move.line', 'workorder_id', 'Moves to Track', domain=[('done_wo', '=', True)], help="Inventory moves for which you must scan a lot number at this work order") active_move_line_ids = fields.One2many( 'stock.move.line', 'workorder_id', domain=[('done_wo', '=', False)]) final_lot_id = fields.Many2one( 'stock.production.lot', 'Lot/Serial Number', domain="[('product_id', '=', product_id)]", states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}) tracking = fields.Selection(related='production_id.product_id.tracking') time_ids = fields.One2many( 'mrp.workcenter.productivity', 'workorder_id') is_user_working = fields.Boolean( 'Is the Current User Working', compute='_compute_is_user_working', help="Technical field indicating whether the current user is working. ") production_messages = fields.Html('Workorder Message', compute='_compute_production_messages') next_work_order_id = fields.Many2one('mrp.workorder', "Next Work Order") scrap_ids = fields.One2many('stock.scrap', 'workorder_id') scrap_count = fields.Integer(compute='_compute_scrap_move_count', string='Scrap Move') production_date = fields.Datetime('Production Date', related='production_id.date_planned_start', store=True) color = fields.Integer('Color', compute='_compute_color') capacity = fields.Float( 'Capacity', default=1.0, help="Number of pieces that can be produced in parallel.") @api.multi def name_get(self): return [(wo.id, "%s - %s - %s" % (wo.production_id.name, wo.product_id.name, wo.name)) for wo in self] @api.one @api.depends('production_id.product_qty', 'qty_produced') def _compute_is_produced(self): rounding = self.production_id.product_uom_id.rounding self.is_produced = float_compare(self.qty_produced, self.production_id.product_qty, precision_rounding=rounding) >= 0 @api.one @api.depends('time_ids.duration', 'qty_produced') def _compute_duration(self): self.duration = sum(self.time_ids.mapped('duration')) self.duration_unit = round(self.duration / max(self.qty_produced, 1), 2) # rounding 2 because it is a time if self.duration_expected: self.duration_percent = 100 * (self.duration_expected - self.duration) / self.duration_expected else: self.duration_percent = 0 def _compute_is_user_working(self): """ Checks whether the current user is working """ for order in self: if order.time_ids.filtered(lambda x: (x.user_id.id == self.env.user.id) and (not x.date_end) and (x.loss_type in ('productive', 'performance'))): order.is_user_working = True else: order.is_user_working = False @api.depends('production_id', 'workcenter_id', 'production_id.bom_id') def _compute_production_messages(self): ProductionMessage = self.env['mrp.message'] for workorder in self: domain = [ ('valid_until', '>=', fields.Date.today()), '|', ('workcenter_id', '=', False), ('workcenter_id', '=', workorder.workcenter_id.id), '|', '|', '|', ('product_id', '=', workorder.product_id.id), '&', ('product_id', '=', False), ('product_tmpl_id', '=', workorder.product_id.product_tmpl_id.id), ('bom_id', '=', workorder.production_id.bom_id.id), ('routing_id', '=', workorder.operation_id.routing_id.id)] messages = ProductionMessage.search(domain).mapped('message') workorder.production_messages = "<br/>".join(messages) or False @api.multi def _compute_scrap_move_count(self): data = self.env['stock.scrap'].read_group([('workorder_id', 'in', self.ids)], ['workorder_id'], ['workorder_id']) count_data = dict((item['workorder_id'][0], item['workorder_id_count']) for item in data) for workorder in self: workorder.scrap_count = count_data.get(workorder.id, 0) @api.multi @api.depends('date_planned_finished', 'production_id.date_planned_finished') def _compute_color(self): late_orders = self.filtered(lambda x: x.production_id.date_planned_finished and x.date_planned_finished > x.production_id.date_planned_finished) for order in late_orders: order.color = 4 for order in (self - late_orders): order.color = 2 @api.onchange('qty_producing') def _onchange_qty_producing(self): """ Update stock.move.lot records, according to the new qty currently produced. """ moves = self.move_raw_ids.filtered(lambda move: move.state not in ('done', 'cancel') and move.product_id.tracking != 'none' and move.product_id.id != self.production_id.product_id.id) for move in moves: move_lots = self.active_move_line_ids.filtered(lambda move_lot: move_lot.move_id == move) if not move_lots: continue rounding = move.product_uom.rounding new_qty = float_round(move.unit_factor * self.qty_producing, precision_rounding=rounding) if move.product_id.tracking == 'lot': move_lots[0].product_qty = new_qty move_lots[0].qty_done = new_qty elif move.product_id.tracking == 'serial': # Create extra pseudo record qty_todo = float_round(new_qty - sum(move_lots.mapped('qty_done')), precision_rounding=rounding) if float_compare(qty_todo, 0.0, precision_rounding=rounding) > 0: while float_compare(qty_todo, 0.0, precision_rounding=rounding) > 0: self.active_move_line_ids += self.env['stock.move.line'].new({ 'move_id': move.id, 'product_id': move.product_id.id, 'lot_id': False, 'product_uom_qty': 0.0, 'product_uom_id': move.product_uom.id, 'qty_done': min(1.0, qty_todo), 'workorder_id': self.id, 'done_wo': False, 'location_id': move.location_id.id, 'location_dest_id': move.location_dest_id.id, 'date': move.date, }) qty_todo -= 1 elif float_compare(qty_todo, 0.0, precision_rounding=rounding) < 0: qty_todo = abs(qty_todo) for move_lot in move_lots: if float_compare(qty_todo, 0, precision_rounding=rounding) <= 0: break if not move_lot.lot_id and float_compare(qty_todo, move_lot.qty_done, precision_rounding=rounding) >= 0: qty_todo = float_round(qty_todo - move_lot.qty_done, precision_rounding=rounding) self.active_move_line_ids -= move_lot # Difference operator else: #move_lot.product_qty = move_lot.product_qty - qty_todo if float_compare(move_lot.qty_done - qty_todo, 0, precision_rounding=rounding) == 1: move_lot.qty_done = move_lot.qty_done - qty_todo else: move_lot.qty_done = 0 qty_todo = 0 @api.multi def write(self, values): if ('date_planned_start' in values or 'date_planned_finished' in values) and any(workorder.state == 'done' for workorder in self): raise UserError(_('You can not change the finished work order.')) return super(MrpWorkorder, self).write(values) def _generate_lot_ids(self): """ Generate stock move lines """ self.ensure_one() MoveLine = self.env['stock.move.line'] tracked_moves = self.move_raw_ids.filtered( lambda move: move.state not in ('done', 'cancel') and move.product_id.tracking != 'none' and move.product_id != self.production_id.product_id and move.bom_line_id) for move in tracked_moves: qty = move.unit_factor * self.qty_producing if move.product_id.tracking == 'serial': while float_compare(qty, 0.0, precision_rounding=move.product_uom.rounding) > 0: MoveLine.create({ 'move_id': move.id, 'product_uom_qty': 0, 'product_uom_id': move.product_uom.id, 'qty_done': min(1, qty), 'production_id': self.production_id.id, 'workorder_id': self.id, 'product_id': move.product_id.id, 'done_wo': False, 'location_id': move.location_id.id, 'location_dest_id': move.location_dest_id.id, }) qty -= 1 else: MoveLine.create({ 'move_id': move.id, 'product_uom_qty': 0, 'product_uom_id': move.product_uom.id, 'qty_done': qty, 'product_id': move.product_id.id, 'production_id': self.production_id.id, 'workorder_id': self.id, 'done_wo': False, 'location_id': move.location_id.id, 'location_dest_id': move.location_dest_id.id, }) def _assign_default_final_lot_id(self): self.final_lot_id = self.env['stock.production.lot'].search([('use_next_on_work_order_id', '=', self.id)], order='create_date, id', limit=1) def _get_byproduct_move_line(self, by_product_move, quantity): return { 'move_id': by_product_move.id, 'product_id': by_product_move.product_id.id, 'product_uom_qty': quantity, 'product_uom_id': by_product_move.product_uom.id, 'qty_done': quantity, 'workorder_id': self.id, 'location_id': by_product_move.location_id.id, 'location_dest_id': by_product_move.location_dest_id.id, } @api.multi def record_production(self): self.ensure_one() if self.qty_producing <= 0: raise UserError(_('Please set the quantity you are currently producing. It should be different from zero.')) if (self.production_id.product_id.tracking != 'none') and not self.final_lot_id and self.move_raw_ids: raise UserError(_('You should provide a lot/serial number for the final product')) # Update quantities done on each raw material line # For each untracked component without any 'temporary' move lines, # (the new workorder tablet view allows registering consumed quantities for untracked components) # we assume that only the theoretical quantity was used for move in self.move_raw_ids: if move.has_tracking == 'none' and (move.state not in ('done', 'cancel')) and move.bom_line_id\ and move.unit_factor and not move.move_line_ids.filtered(lambda ml: not ml.done_wo): rounding = move.product_uom.rounding if self.product_id.tracking != 'none': qty_to_add = float_round(self.qty_producing * move.unit_factor, precision_rounding=rounding) move._generate_consumed_move_line(qty_to_add, self.final_lot_id) elif len(move._get_move_lines()) < 2: move.quantity_done += float_round(self.qty_producing * move.unit_factor, precision_rounding=rounding) else: move._set_quantity_done(move.quantity_done + float_round(self.qty_producing * move.unit_factor, precision_rounding=rounding)) # Transfer quantities from temporary to final move lots or make them final for move_line in self.active_move_line_ids: # Check if move_line already exists if move_line.qty_done <= 0: # rounding... move_line.sudo().unlink() continue if move_line.product_id.tracking != 'none' and not move_line.lot_id: raise UserError(_('You should provide a lot/serial number for a component')) # Search other move_line where it could be added: lots = self.move_line_ids.filtered(lambda x: (x.lot_id.id == move_line.lot_id.id) and (not x.lot_produced_id) and (not x.done_move) and (x.product_id == move_line.product_id)) if lots: lots[0].qty_done += move_line.qty_done lots[0].lot_produced_id = self.final_lot_id.id move_line.sudo().unlink() else: move_line.lot_produced_id = self.final_lot_id.id move_line.done_wo = True # One a piece is produced, you can launch the next work order if self.next_work_order_id.state == 'pending': self.next_work_order_id.state = 'ready' self.move_line_ids.filtered( lambda move_line: not move_line.done_move and not move_line.lot_produced_id and move_line.qty_done > 0 ).write({ 'lot_produced_id': self.final_lot_id.id, 'lot_produced_qty': self.qty_producing }) # If last work order, then post lots used # TODO: should be same as checking if for every workorder something has been done? if not self.next_work_order_id: production_move = self.production_id.move_finished_ids.filtered( lambda x: (x.product_id.id == self.production_id.product_id.id) and (x.state not in ('done', 'cancel'))) if production_move.product_id.tracking != 'none': move_line = production_move.move_line_ids.filtered(lambda x: x.lot_id.id == self.final_lot_id.id) if move_line: move_line.product_uom_qty += self.qty_producing move_line.qty_done += self.qty_producing else: move_line.create({'move_id': production_move.id, 'product_id': production_move.product_id.id, 'lot_id': self.final_lot_id.id, 'product_uom_qty': self.qty_producing, 'product_uom_id': production_move.product_uom.id, 'qty_done': self.qty_producing, 'workorder_id': self.id, 'location_id': production_move.location_id.id, 'location_dest_id': production_move.location_dest_id.id, }) else: production_move.quantity_done += self.qty_producing if not self.next_work_order_id: for by_product_move in self.production_id.move_finished_ids.filtered(lambda x: (x.product_id.id != self.production_id.product_id.id) and (x.state not in ('done', 'cancel'))): if by_product_move.has_tracking != 'serial': values = self._get_byproduct_move_line(by_product_move, self.qty_producing * by_product_move.unit_factor) self.env['stock.move.line'].create(values) elif by_product_move.has_tracking == 'serial': qty_todo = by_product_move.product_uom._compute_quantity(self.qty_producing * by_product_move.unit_factor, by_product_move.product_id.uom_id) for i in range(0, int(float_round(qty_todo, precision_digits=0))): values = self._get_byproduct_move_line(by_product_move, 1) self.env['stock.move.line'].create(values) # Update workorder quantity produced self.qty_produced += self.qty_producing if self.final_lot_id: self.final_lot_id.use_next_on_work_order_id = self.next_work_order_id self.final_lot_id = False # Set a qty producing rounding = self.production_id.product_uom_id.rounding if float_compare(self.qty_produced, self.production_id.product_qty, precision_rounding=rounding) >= 0: self.qty_producing = 0 elif self.production_id.product_id.tracking == 'serial': self._assign_default_final_lot_id() self.qty_producing = 1.0 self._generate_lot_ids() else: self.qty_producing = float_round(self.production_id.product_qty - self.qty_produced, precision_rounding=rounding) self._generate_lot_ids() if self.next_work_order_id and self.production_id.product_id.tracking != 'none': self.next_work_order_id._assign_default_final_lot_id() if float_compare(self.qty_produced, self.production_id.product_qty, precision_rounding=rounding) >= 0: self.button_finish() return True @api.multi def button_start(self): self.ensure_one() # As button_start is automatically called in the new view if self.state in ('done', 'cancel'): return True # Need a loss in case of the real time exceeding the expected timeline = self.env['mrp.workcenter.productivity'] if self.duration < self.duration_expected: loss_id = self.env['mrp.workcenter.productivity.loss'].search([('loss_type','=','productive')], limit=1) if not len(loss_id): raise UserError(_("You need to define at least one productivity loss in the category 'Productivity'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses.")) else: loss_id = self.env['mrp.workcenter.productivity.loss'].search([('loss_type','=','performance')], limit=1) if not len(loss_id): raise UserError(_("You need to define at least one productivity loss in the category 'Performance'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses.")) for workorder in self: if workorder.production_id.state != 'progress': workorder.production_id.write({ 'state': 'progress', 'date_start': datetime.now(), }) timeline.create({ 'workorder_id': workorder.id, 'workcenter_id': workorder.workcenter_id.id, 'description': _('Time Tracking: ')+self.env.user.name, 'loss_id': loss_id[0].id, 'date_start': datetime.now(), 'user_id': self.env.user.id }) return self.write({'state': 'progress', 'date_start': datetime.now(), }) @api.multi def button_finish(self): self.ensure_one() self.end_all() return self.write({'state': 'done', 'date_finished': fields.Datetime.now()}) @api.multi def end_previous(self, doall=False): """ @param: doall: This will close all open time lines on the open work orders when doall = True, otherwise only the one of the current user """ # TDE CLEANME timeline_obj = self.env['mrp.workcenter.productivity'] domain = [('workorder_id', 'in', self.ids), ('date_end', '=', False)] if not doall: domain.append(('user_id', '=', self.env.user.id)) not_productive_timelines = timeline_obj.browse() for timeline in timeline_obj.search(domain, limit=None if doall else 1): wo = timeline.workorder_id if wo.duration_expected <= wo.duration: if timeline.loss_type == 'productive': not_productive_timelines += timeline timeline.write({'date_end': fields.Datetime.now()}) else: maxdate = fields.Datetime.from_string(timeline.date_start) + relativedelta(minutes=wo.duration_expected - wo.duration) enddate = datetime.now() if maxdate > enddate: timeline.write({'date_end': enddate}) else: timeline.write({'date_end': maxdate}) not_productive_timelines += timeline.copy({'date_start': maxdate, 'date_end': enddate}) if not_productive_timelines: loss_id = self.env['mrp.workcenter.productivity.loss'].search([('loss_type', '=', 'performance')], limit=1) if not len(loss_id): raise UserError(_("You need to define at least one unactive productivity loss in the category 'Performance'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses.")) not_productive_timelines.write({'loss_id': loss_id.id}) return True @api.multi def end_all(self): return self.end_previous(doall=True) @api.multi def button_pending(self): self.end_previous() return True @api.multi def button_unblock(self): for order in self: order.workcenter_id.unblock() return True @api.multi def action_cancel(self): return self.write({'state': 'cancel'}) @api.multi def button_done(self): if any([x.state in ('done', 'cancel') for x in self]): raise UserError(_('A Manufacturing Order is already done or cancelled!')) self.end_all() return self.write({'state': 'done', 'date_finished': datetime.now()}) @api.multi def button_scrap(self): self.ensure_one() return { 'name': _('Scrap'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'stock.scrap', 'view_id': self.env.ref('stock.stock_scrap_form_view2').id, 'type': 'ir.actions.act_window', 'context': {'default_workorder_id': self.id, 'default_production_id': self.production_id.id, 'product_ids': (self.production_id.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')) | self.production_id.move_finished_ids.filtered(lambda x: x.state == 'done')).mapped('product_id').ids}, # 'context': {'product_ids': self.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')).mapped('product_id').ids + [self.production_id.product_id.id]}, 'target': 'new', } @api.multi def action_see_move_scrap(self): self.ensure_one() action = self.env.ref('stock.action_stock_scrap').read()[0] action['domain'] = [('workorder_id', '=', self.id)] return action @api.multi @api.depends('qty_production', 'qty_produced') def _compute_qty_remaining(self): for wo in self: wo.qty_remaining = float_round(wo.qty_production - wo.qty_produced, precision_rounding=wo.production_id.product_uom_id.rounding)
class IrModelFieldsAnonymizeWizard(models.TransientModel): _name = 'ir.model.fields.anonymize.wizard' name = fields.Char('File Name') summary = fields.Text(compute='_compute_summary') file_export = fields.Binary('Export') file_import = fields.Binary( 'Import', help= "This is the file created by the anonymization process. It should have the extension '.json' or '.pickle'." ) state = fields.Selection(compute='_compute_state', string='Status', selection=WIZARD_ANONYMIZATION_STATES) msg = fields.Text('Message') @api.multi def _compute_summary(self): for anonymize_wizard in self: anonymize_wizard.summary = anonymize_wizard._get_summary_value() @api.multi def _compute_state(self): for anonymize_wizard in self: anonymize_wizard.state = anonymize_wizard._get_state_value() @api.model def _get_state_value(self): return self.env['ir.model.fields.anonymization']._get_global_state() @api.model def _get_summary_value(self): summary = u'' for anon_field in self.env['ir.model.fields.anonymization'].search([ ('state', '!=', 'not_existing') ]): field = anon_field.field_id if field: values = { 'model_name': field.model_id.name, 'model_code': field.model_id.model, 'field_code': field.name, 'field_name': field.field_description, 'state': anon_field.state, } summary += u" * %(model_name)s (%(model_code)s) -> %(field_name)s (%(field_code)s): state: (%(state)s)\n" % values else: summary += u"* Missing local model (%s) and field (%s): state: (%s) \n" % ( anon_field.model_name, anon_field.field_name, anon_field.state) return summary @api.model def default_get(self, fields_list): res = {} res['name'] = '.json' res['summary'] = self._get_summary_value() res['state'] = self._get_state_value() res['msg'] = _( "Before executing the anonymization process, you should make a backup of your database." ) return res @api.model def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False): state = self.env['ir.model.fields.anonymization']._get_global_state() step = self.env.context.get('step', 'new_window') res = super(IrModelFieldsAnonymizeWizard, self).fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu) eview = etree.fromstring(res['arch']) placeholder = eview.xpath("group[@name='placeholder1']") if len(placeholder): placeholder = placeholder[0] if step == 'new_window' and state == 'clear': # clicked in the menu and the fields are not anonymized: warn the admin that backuping the db is very important placeholder.addnext( etree.Element('field', { 'name': 'msg', 'colspan': '4', 'nolabel': '1' })) placeholder.addnext(etree.Element('newline')) placeholder.addnext( etree.Element('label', {'string': 'Warning'})) eview.remove(placeholder) elif step == 'new_window' and state == 'anonymized': # clicked in the menu and the fields are already anonymized placeholder.addnext(etree.Element('newline')) placeholder.addnext( etree.Element('field', { 'name': 'file_import', 'required': "1" })) placeholder.addnext( etree.Element('label', {'string': 'Anonymization file'})) eview.remove(placeholder) elif step == 'just_anonymized': # we just ran the anonymization process, we need the file export field placeholder.addnext(etree.Element('newline')) placeholder.addnext( etree.Element('field', {'name': 'file_export'})) # we need to remove the button: buttons = eview.xpath("button") for button in buttons: eview.remove(button) # and add a message: placeholder.addnext( etree.Element('field', { 'name': 'msg', 'colspan': '4', 'nolabel': '1' })) placeholder.addnext(etree.Element('newline')) placeholder.addnext( etree.Element('label', {'string': 'Result'})) # remove the placeholer: eview.remove(placeholder) elif step == 'just_desanonymized': # we just reversed the anonymization process, we don't need any field # we need to remove the button buttons = eview.xpath("button") for button in buttons: eview.remove(button) # and add a message placeholder.addnext( etree.Element('field', { 'name': 'msg', 'colspan': '4', 'nolabel': '1' })) placeholder.addnext(etree.Element('newline')) placeholder.addnext( etree.Element('label', {'string': 'Result'})) # remove the placeholer: eview.remove(placeholder) else: raise UserError( _("The database anonymization is currently in an unstable state. Some fields are anonymized," " while some fields are not anonymized. You should try to solve this problem before trying to do anything else." )) res['arch'] = etree.tostring(eview, encoding='unicode') return res @api.multi def anonymize_database(self): """Sets the 'anonymized' state to defined fields""" # pylint: disable=W0101 raise UserError( """The GECOERP Migration Platform no longer accepts anonymized databases.\n If you wish for your data to remain private during migration, please contact us at [email protected]""" ) self.ensure_one() # create a new history record: history = self.env['ir.model.fields.anonymization.history'].create({ 'date': fields.Datetime.now(), 'state': 'started', 'direction': 'clear -> anonymized' }) # check that all the defined fields are in the 'clear' state state = self.env['ir.model.fields.anonymization']._get_global_state() error_type = _('Error !') if state == 'anonymized': raise UserError('%s: %s' % ( error_type, _("The database is currently anonymized, you cannot anonymize it again." ))) elif state == 'unstable': raise UserError('%s: %s' % ( error_type, _("The database anonymization is currently in an unstable state. Some fields are anonymized," " while some fields are not anonymized. You should try to solve this problem before trying to do anything." ))) # do the anonymization: dirpath = os.environ.get('HOME') or os.getcwd() rel_filepath = 'field_anonymization_%s_%s.json' % (self.env.cr.dbname, history.id) abs_filepath = os.path.abspath(os.path.join(dirpath, rel_filepath)) ano_fields = self.env['ir.model.fields.anonymization'].search([ ('state', '!=', 'not_existing') ]) if not ano_fields: raise UserError( '%s: %s' % (error_type, _("No fields are going to be anonymized."))) data = [] for field in ano_fields: model_name = field.model_id.model field_name = field.field_id.name field_type = field.field_id.ttype table_name = self.env[model_name]._table # get the current value self.env.cr.execute('select id, "%s" from "%s"' % (field_name, table_name)) for record in self.env.cr.dictfetchall(): data.append({ "model_id": model_name, "field_id": field_name, "id": record['id'], "value": record[field_name] }) # anonymize the value: anonymized_value = None sid = str(record['id']) if field_type == 'char': anonymized_value = 'xxx' + sid elif field_type == 'selection': anonymized_value = 'xxx' + sid elif field_type == 'text': anonymized_value = 'xxx' + sid elif field_type == 'html': anonymized_value = 'xxx' + sid elif field_type == 'boolean': anonymized_value = random.choice([True, False]) elif field_type == 'date': anonymized_value = '2011-11-11' elif field_type == 'datetime': anonymized_value = '2011-11-11 11:11:11' elif field_type in ('float', 'monetary'): anonymized_value = 0.0 elif field_type == 'integer': anonymized_value = 0 elif field_type in [ 'binary', 'many2many', 'many2one', 'one2many', 'reference' ]: # cannot anonymize these kind of fields raise UserError('%s: %s' % ( error_type, _("Cannot anonymize fields of these types: binary, many2many, many2one, one2many, reference." ))) if anonymized_value is None: raise UserError( '%s: %s' % (error_type, _("Anonymized value can not be empty."))) sql = 'update "%(table)s" set "%(field)s" = %%(anonymized_value)s where id = %%(id)s' % { 'table': table_name, 'field': field_name, } self.env.cr.execute(sql, { 'anonymized_value': anonymized_value, 'id': record['id'] }) # save json file: with open(abs_filepath, 'w') as fn: json.dump(data, fn) # update the anonymization fields: ano_fields.write({'state': 'anonymized'}) # add a result message in the wizard: msgs = [ "Anonymization successful.", "", "Donot forget to save the resulting file to a safe place because you will not be able to revert the anonymization without this file.", "", "This file is also stored in the %s directory. The absolute file path is: %s." ] msg = '\n'.join(msgs) % (dirpath, abs_filepath) with open(abs_filepath, 'rb') as fn: self.write({ 'msg': msg, 'file_export': base64.encodestring(fn.read()), }) # update the history record: history.write({ 'field_ids': [[6, 0, ano_fields.ids]], 'msg': msg, 'filepath': abs_filepath, 'state': 'done', }) return { 'res_id': self.id, 'view_id': self.env.ref( 'anonymization.view_ir_model_fields_anonymize_wizard_form'). ids, 'view_type': 'form', "view_mode": 'form', 'res_model': 'ir.model.fields.anonymize.wizard', 'type': 'ir.actions.act_window', 'context': { 'step': 'just_anonymized' }, 'target': 'new' } @api.multi def reverse_anonymize_database(self): """Set the 'clear' state to defined fields""" self.ensure_one() IrModelFieldsAnonymization = self.env['ir.model.fields.anonymization'] # check that all the defined fields are in the 'anonymized' state state = IrModelFieldsAnonymization._get_global_state() if state == 'clear': raise UserError( _("The database is not currently anonymized, you cannot reverse the anonymization." )) elif state == 'unstable': raise UserError( _("The database anonymization is currently in an unstable state. Some fields are anonymized," " while some fields are not anonymized. You should try to solve this problem before trying to do anything." )) if not self.file_import: raise UserError('%s: %s' % ( _('Error !'), _("It is not possible to reverse the anonymization process without supplying the anonymization export file." ))) # reverse the anonymization: # load the json/pickle file content into a data structure: content = base64.decodestring(self.file_import) try: data = json.loads(content.decode('utf8')) except Exception: # backward-compatible mode data = pickle.loads(content, encoding='utf8') fixes = self.env[ 'ir.model.fields.anonymization.migration.fix'].search_read([ ('target_version', '=', '.'.join( str(v) for v in version_info[:2])) ], ['model_name', 'field_name', 'query', 'query_type', 'sequence']) fixes = group(fixes, ('model_name', 'field_name')) for line in data: queries = [] table_name = self.env[line['model_id']]._table if line[ 'model_id'] in self.env else None # check if custom sql exists: key = (line['model_id'], line['field_id']) custom_updates = fixes.get(key) if custom_updates: custom_updates.sort(key=itemgetter('sequence')) queries = [(record['query'], record['query_type']) for record in custom_updates if record['query_type']] elif table_name: queries = [( 'update "%(table)s" set "%(field)s" = %%(value)s where id = %%(id)s' % { 'table': table_name, 'field': line['field_id'], }, 'sql')] for query in queries: if query[1] == 'sql': self.env.cr.execute(query[0], { 'value': line['value'], 'id': line['id'] }) elif query[1] == 'python': safe_eval(query[0] % line) else: raise Exception( "Unknown query type '%s'. Valid types are: sql, python." % (query['query_type'], )) # update the anonymization fields: ano_fields = IrModelFieldsAnonymization.search([('state', '!=', 'not_existing')]) ano_fields.write({'state': 'clear'}) # add a result message in the wizard: self.msg = '\n'.join(["Successfully reversed the anonymization.", ""]) # create a new history record: history = self.env['ir.model.fields.anonymization.history'].create({ 'date': fields.Datetime.now(), 'field_ids': [[6, 0, ano_fields.ids]], 'msg': self.msg, 'filepath': False, 'direction': 'anonymized -> clear', 'state': 'done' }) return { 'res_id': self.id, 'view_id': self.env.ref( 'anonymization.view_ir_model_fields_anonymize_wizard_form'). ids, 'view_type': 'form', "view_mode": 'form', 'res_model': 'ir.model.fields.anonymize.wizard', 'type': 'ir.actions.act_window', 'context': { 'step': 'just_desanonymized' }, 'target': 'new' }