class MixedModel(models.Model): _name = 'test_new_api.mixed' number = fields.Float(digits=(10, 2), default=3.14) date = fields.Date() now = fields.Datetime(compute='_compute_now') lang = fields.Selection(string='Language', selection='_get_lang') reference = fields.Reference(string='Related Document', selection='_reference_models') comment1 = fields.Html(sanitize=False) comment2 = fields.Html(sanitize_attributes=True, strip_classes=False) comment3 = fields.Html(sanitize_attributes=True, strip_classes=True) comment4 = fields.Html(sanitize_attributes=True, strip_style=True) currency_id = fields.Many2one( 'res.currency', default=lambda self: self.env.ref('base.EUR')) amount = fields.Monetary() @api.one def _compute_now(self): # this is a non-stored computed field without dependencies self.now = fields.Datetime.now() @api.model def _get_lang(self): return self.env['res.lang'].get_installed() @api.model def _reference_models(self): models = self.env['ir.model'].sudo().search([('state', '!=', 'manual') ]) return [(model.model, model.name) for model in models if not model.model.startswith('ir.')]
class MailMember(models.Model): _name = 'mail.member' _rec_name = 'member' @api.model def get_records_selection(self): models = self.env['ir.model'].sudo().search([]) return [(model.model, model.name) for model in models] def _compute_record_details(self): for obj in self: obj.member = self.env[obj.model_id.model].search( [('id', '=', obj.member_id)], limit=1) member_activity_line = fields.One2many('member.activity', 'mail_member_id', string='Member Activity') member_id = fields.Integer(string='Record') member = fields.Reference(compute='_compute_record_details', selection='get_records_selection', string='Reference') schedule_date = fields.Datetime('Schedule Date') mail_marketing_id = fields.Many2one('mail.marketing', string='Mail marketing', ondelete="cascade") marketing_config_id = fields.Many2one( 'marketing.config', related="mail_marketing_id.marketing_config_id", store=True, string='Marketing') model_id = fields.Many2one('ir.model', string='Model', related='mail_marketing_id.model_id')
class RecurringHistory(models.Model): _name = "recurring.history" _description = "Recurring history" _rec_name = 'date' date = fields.Datetime(string='Date') recurring_id = fields.Many2one('recurring', string='Recurring', ondelete='cascade') document_id = fields.Reference( selection=_get_document_types, string='Source Document')
class Lock(models.Model): _name = 'muk_dms.lock' _description = "Directory or File Lock" name = fields.Char(compute='_compute_name', string="Name") locked_by = fields.Char(string="Locked by", required=True) locked_by_ref = fields.Reference([('res.users', 'User')], string="User Reference") lock_ref = fields.Reference([], string="Object Reference", required=True) token = fields.Char(string="Token") operation = fields.Char(string="Operation") @api.one @api.depends('lock_ref') def _compute_name(self): self.name = "Lock for " + str(self.lock_ref.name)
class SMSTemplatePreview(models.TransientModel): _name = "sms.template.preview" _description = "SMS Template Preview" @api.model def _selection_target_model(self): models = self.env['ir.model'].search([]) return [(model.model, model.name) for model in models] @api.model def _selection_languages(self): return self.env['res.lang'].get_installed() @api.model def default_get(self, fields): result = super(SMSTemplatePreview, self).default_get(fields) sms_template_id = self.env.context.get('default_sms_template_id') if not sms_template_id or 'resource_ref' not in fields: return result sms_template = self.env['sms.template'].browse(sms_template_id) res = self.env[sms_template.model_id.model].search([], limit=1) if res: result['resource_ref'] = '%s,%s' % (sms_template.model_id.model, res.id) return result sms_template_id = fields.Many2one('sms.template', required=True, ondelete='cascade') lang = fields.Selection(_selection_languages, string='Template Preview Language') model_id = fields.Many2one('ir.model', related="sms_template_id.model_id") body = fields.Char('Body', compute='_compute_sms_template_fields') resource_ref = fields.Reference(string='Record reference', selection='_selection_target_model') no_record = fields.Boolean('No Record', compute='_compute_no_record') @api.depends('model_id') def _compute_no_record(self): for preview in self: preview.no_record = (self.env[preview.model_id.model].search_count( []) == 0) if preview.model_id else True @api.depends('lang', 'resource_ref') def _compute_sms_template_fields(self): for wizard in self: if wizard.sms_template_id and wizard.resource_ref: wizard.body = wizard.sms_template_id._render_field( 'body', [wizard.resource_ref.id], set_lang=wizard.lang)[wizard.resource_ref.id] else: wizard.body = wizard.sms_template_id.body
class QcTest(models.Model): """ A test is a group of questions along with the values that make them valid. """ _name = 'qc.test' _description = 'Quality control test' @api.multi def _links_get(self): link_obj = self.env['res.request.link'] return [(r.object, r.name) for r in link_obj.search([])] @api.onchange('type') def onchange_type(self): if self.type == 'generic': self.object_id = False active = fields.Boolean('Active', default=True) name = fields.Char(string='Name', required=True, translate=True) test_lines = fields.One2many(comodel_name='qc.test.question', inverse_name='test', string='Questions', copy=True) object_id = fields.Reference( string='Reference object', selection=_links_get, ) fill_correct_values = fields.Boolean(string='Pre-fill with correct values') type = fields.Selection([('generic', 'Generic'), ('related', 'Related')], string='Type', required=True, default='generic') category = fields.Many2one(comodel_name='qc.test.category', string='Category') company_id = fields.Many2one(comodel_name='res.company', string='Company', default=lambda self: self.env['res.company']. _company_default_get('qc.test'))
class IrUiMenu(models.Model): _name = 'ir.ui.menu' _order = "sequence,id" _parent_store = True def __init__(self, *args, **kwargs): super(IrUiMenu, self).__init__(*args, **kwargs) self.pool['ir.model.access'].register_cache_clearing_method( self._name, 'clear_caches') name = fields.Char(string='Menu', required=True, translate=True) active = fields.Boolean(default=True) sequence = fields.Integer(default=10) child_id = fields.One2many('ir.ui.menu', 'parent_id', string='Child IDs') parent_id = fields.Many2one('ir.ui.menu', string='Parent Menu', index=True, ondelete="restrict") parent_left = fields.Integer(index=True) parent_right = fields.Integer(index=True) groups_id = fields.Many2many('res.groups', 'ir_ui_menu_group_rel', 'menu_id', 'gid', string='Groups', help="If you have groups, the visibility of this menu will be based on these groups. "\ "If this field is empty, Flectra will compute visibility based on the related object's read access.") complete_name = fields.Char(compute='_compute_complete_name', string='Full Path') web_icon = fields.Char(string='Web Icon File') action = fields.Reference(selection=[( 'ir.actions.report', 'ir.actions.report' ), ('ir.actions.act_window', 'ir.actions.act_window'), ( 'ir.actions.act_url', 'ir.actions.act_url'), ( 'ir.actions.server', 'ir.actions.server'), ('ir.actions.client', 'ir.actions.client')]) web_icon_data = fields.Binary(string='Web Icon Image', attachment=True) @api.depends('name', 'parent_id.complete_name') def _compute_complete_name(self): for menu in self: menu.complete_name = menu._get_full_name() def _get_full_name(self, level=6): """ Return the full name of ``self`` (up to a certain level). """ if level <= 0: return '...' if self.parent_id: return self.parent_id._get_full_name( level - 1) + MENU_ITEM_SEPARATOR + (self.name or "") else: return self.name def read_image(self, path): if not path: return False path_info = path.split(',') icon_path = get_module_resource(path_info[0], path_info[1]) icon_image = False if icon_path: with tools.file_open(icon_path, 'rb') as icon_file: icon_image = base64.encodestring(icon_file.read()) return icon_image @api.constrains('parent_id') def _check_parent_id(self): if not self._check_recursion(): raise ValidationError( _('Error! You cannot create recursive menus.')) @api.model @tools.ormcache('frozenset(self.env.user.groups_id.ids)', 'debug') def _visible_menu_ids(self, debug=False): """ Return the ids of the menu items visible to the user. """ # retrieve all menus, and determine which ones are visible context = {'ir.ui.menu.full_list': True} menus = self.with_context(context).search([]) groups = self.env.user.groups_id if not debug: groups = groups - self.env.ref('base.group_no_one') # first discard all menus with groups the user does not have menus = menus.filtered( lambda menu: not menu.groups_id or menu.groups_id & groups) # take apart menus that have an action action_menus = menus.filtered(lambda m: m.action and m.action.exists()) folder_menus = menus - action_menus visible = self.browse() # process action menus, check whether their action is allowed access = self.env['ir.model.access'] MODEL_GETTER = { 'ir.actions.act_window': lambda action: action.res_model, 'ir.actions.report': lambda action: action.model, 'ir.actions.server': lambda action: action.model_id.model, } for menu in action_menus: get_model = MODEL_GETTER.get(menu.action._name) if not get_model or not get_model(menu.action) or \ access.check(get_model(menu.action), 'read', False): # make menu visible, and its folder ancestors, too visible += menu menu = menu.parent_id while menu and menu in folder_menus and menu not in visible: visible += menu menu = menu.parent_id return set(visible.ids) @api.multi @api.returns('self') def _filter_visible_menus(self): """ Filter `self` to only keep the menu items that should be visible in the menu hierarchy of the current user. Uses a cache for speeding up the computation. """ visible_ids = self._visible_menu_ids( request.debug if request else False) return self.filtered(lambda menu: menu.id in visible_ids) @api.model def search(self, args, offset=0, limit=None, order=None, count=False): menus = super(IrUiMenu, self).search(args, offset=0, limit=None, order=order, count=False) if menus: # menu filtering is done only on main menu tree, not other menu lists if not self._context.get('ir.ui.menu.full_list'): menus = menus._filter_visible_menus() if offset: menus = menus[offset:] if limit: menus = menus[:limit] return len(menus) if count else menus @api.multi def name_get(self): return [(menu.id, menu._get_full_name()) for menu in self] @api.model def create(self, values): self.clear_caches() if 'web_icon' in values: values['web_icon_data'] = self._compute_web_icon_data( values.get('web_icon')) return super(IrUiMenu, self).create(values) @api.multi def write(self, values): self.clear_caches() if 'web_icon' in values: values['web_icon_data'] = self._compute_web_icon_data( values.get('web_icon')) return super(IrUiMenu, self).write(values) def _compute_web_icon_data(self, web_icon): """ Returns the image associated to `web_icon`. `web_icon` can either be: - an image icon [module, path] - a built icon [icon_class, icon_color, background_color] and it only has to call `read_image` if it's an image. """ if web_icon and len(web_icon.split(',')) == 2: return self.read_image(web_icon) @api.multi def unlink(self): # Detach children and promote them to top-level, because it would be unwise to # cascade-delete submenus blindly. We also can't use ondelete=set null because # that is not supported when _parent_store is used (would silently corrupt it). # TODO: ideally we should move them under a generic "Orphans" menu somewhere? extra = {'ir.ui.menu.full_list': True} direct_children = self.with_context(**extra).search([('parent_id', 'in', self.ids)]) direct_children.write({'parent_id': False}) self.clear_caches() return super(IrUiMenu, self).unlink() @api.multi def copy(self, default=None): record = super(IrUiMenu, self).copy(default=default) match = NUMBER_PARENS.search(record.name) if match: next_num = int(match.group(1)) + 1 record.name = NUMBER_PARENS.sub('(%d)' % next_num, record.name) else: record.name = record.name + '(1)' return record @api.model @api.returns('self') def get_user_roots(self): """ Return all root menu ids visible for the user. :return: the root menu ids :rtype: list(int) """ return self.search([('parent_id', '=', False)]) @api.model @tools.ormcache_context('self._uid', keys=('lang', )) def load_menus_root(self): fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon_data'] menu_roots = self.get_user_roots() menu_roots_data = menu_roots.read(fields) if menu_roots else [] menu_root = { 'id': False, 'name': 'root', 'parent_id': [-1, ''], 'children': menu_roots_data, 'all_menu_ids': menu_roots.ids, } menu_roots._set_menuitems_xmlids(menu_root) return menu_root @api.model @tools.ormcache_context('self._uid', 'debug', keys=('lang', )) def load_menus(self, debug): """ Loads all menu items (all applications and their sub-menus). :return: the menu root :rtype: dict('children': menu_nodes) """ fields = [ 'name', 'sequence', 'parent_id', 'action', 'web_icon', 'web_icon_data' ] menu_roots = self.get_user_roots() menu_roots_data = menu_roots.read(fields) if menu_roots else [] menu_root = { 'id': False, 'name': 'root', 'parent_id': [-1, ''], 'children': menu_roots_data, 'all_menu_ids': menu_roots.ids, } if not menu_roots_data: return menu_root # menus are loaded fully unlike a regular tree view, cause there are a # limited number of items (752 when all 6.1 addons are installed) menus = self.search([('id', 'child_of', menu_roots.ids)]) menu_items = menus.read(fields) # add roots at the end of the sequence, so that they will overwrite # equivalent menu items from full menu read when put into id:item # mapping, resulting in children being correctly set on the roots. menu_items.extend(menu_roots_data) menu_root['all_menu_ids'] = menus.ids # includes menu_roots! # make a tree using parent_id menu_items_map = { menu_item["id"]: menu_item for menu_item in menu_items } for menu_item in menu_items: parent = menu_item['parent_id'] and menu_item['parent_id'][0] if parent in menu_items_map: menu_items_map[parent].setdefault('children', []).append(menu_item) # sort by sequence a tree using parent_id for menu_item in menu_items: menu_item.setdefault('children', []).sort(key=operator.itemgetter('sequence')) (menu_roots + menus)._set_menuitems_xmlids(menu_root) return menu_root def _set_menuitems_xmlids(self, menu_root): menuitems = self.env['ir.model.data'].sudo().search([ ('res_id', 'in', self.ids), ('model', '=', 'ir.ui.menu') ]) xmlids = {menu.res_id: menu.complete_name for menu in menuitems} def _set_xmlids(tree, xmlids): tree['xmlid'] = xmlids.get(tree['id'], '') if 'children' in tree: for child in tree['children']: _set_xmlids(child, xmlids) _set_xmlids(menu_root, xmlids)
class File(dms_base.DMSModel): _name = 'muk_dms.file' _description = "File" _inherit = 'muk_dms.access' #---------------------------------------------------------- # Database #---------------------------------------------------------- name = fields.Char(string="Filename", required=True) settings = fields.Many2one('muk_dms.settings', string="Settings", store=True, auto_join=True, ondelete='restrict', compute='_compute_settings') content = fields.Binary(string='Content', required=True, compute='_compute_content', inverse='_inverse_content') reference = fields.Reference(selection=[('muk_dms.data', _('Data'))], string="Data Reference", readonly=True) directory = fields.Many2one('muk_dms.directory', string="Directory", ondelete='restrict', auto_join=True, required=True) extension = fields.Char(string='Extension', compute='_compute_extension', readonly=True, store=True) mimetype = fields.Char(string='Type', compute='_compute_mimetype', readonly=True, store=True) size = fields.Integer(string='Size', readonly=True) custom_thumbnail = fields.Binary(string="Custom Thumbnail") thumbnail = fields.Binary(compute='_compute_thumbnail', string="Thumbnail") path = fields.Char(string="Path", store=True, readonly=True, compute='_compute_path') relational_path = fields.Text(string="Path", store=True, readonly=True, compute='_compute_relational_path') index_content = fields.Text(string='Indexed Content', compute='_compute_index', readonly=True, store=True, prefetch=False) locked_by = fields.Reference(string='Locked by', related='locked.locked_by_ref') #---------------------------------------------------------- # Functions #---------------------------------------------------------- def notify_change(self, values, refresh=False, operation=None): super(File, self).notify_change(values, refresh, operation) if "index_files" in values: self._compute_index() if "save_type" in values: self._update_reference_type() def trigger_computation_up(self, fields): self.directory.trigger_computation(fields) def trigger_computation(self, fields, refresh=True, operation=None): super(File, self).trigger_computation(fields, refresh, operation) values = {} if "settings" in fields: values.update( self.with_context(operation=operation)._compute_settings( write=False)) if "path" in fields: values.update( self.with_context(operation=operation)._compute_path( write=False)) values.update( self.with_context( operation=operation)._compute_relational_path(write=False)) if "extension" in fields: values.update( self.with_context(operation=operation)._compute_extension( write=False)) if "mimetype" in fields: values.update( self.with_context(operation=operation)._compute_mimetype( write=False)) if "index_content" in fields: values.update( self.with_context(operation=operation)._compute_index( write=False)) if values: self.write(values) if "settings" in fields: self.notify_change({'save_type': self.settings.save_type}) @api.model def max_upload_size(self): config_parameter = self.env['ir.config_parameter'].sudo() return config_parameter.get_param('muk_dms.max_upload_size', default=25) #---------------------------------------------------------- # Read, View #---------------------------------------------------------- def _compute_settings(self, write=True): if write: for record in self: record.settings = record.directory.settings else: self.ensure_one() return {'settings': self.directory.settings.id} def _compute_extension(self, write=True): if write: for record in self: record.extension = os.path.splitext(record.name)[1] else: self.ensure_one() return {'extension': os.path.splitext(self.name)[1]} def _compute_mimetype(self, write=True): def get_mimetype(record): mimetype = mimetypes.guess_type(record.name)[0] if (not mimetype or mimetype == 'application/octet-stream') and record.content: mimetype = guess_mimetype(base64.b64decode(record.content)) return mimetype or 'application/octet-stream' if write: for record in self: record.mimetype = get_mimetype(record) else: self.ensure_one() return {'mimetype': get_mimetype(self)} def _compute_path(self, write=True): if write: for record in self: record.path = "%s%s" % (record.directory.path, record.name) else: self.ensure_one() return {'path': "%s%s" % (self.directory.path, self.name)} def _compute_relational_path(self, write=True): def get_relational_path(record): path = json.loads(record.directory.relational_path) path.append({ 'model': record._name, 'id': record.id, 'name': record.name }) return json.dumps(path) if write: for record in self: record.relational_path = get_relational_path(record) else: self.ensure_one() return {'relational_path': get_relational_path(self)} def _compute_index(self, write=True): def get_index(record): type = record.mimetype.split( '/')[0] if record.mimetype else record._compute_mimetype( write=False)['mimetype'] index_files = record.settings.index_files if record.settings else record.directory.settings.index_files if type and type.split( '/')[0] == 'text' and record.content and index_files: words = re.findall( b"[\x20-\x7E]{4,}", base64.b64decode(record.content) if record.content else b'') return b"\n".join(words).decode('ascii') else: return None if write: for record in self: record.index_content = get_index(record) else: self.ensure_one() return {'index_content': get_index(self)} def _compute_content(self): for record in self: record.content = record._get_content() @api.depends('custom_thumbnail') def _compute_thumbnail(self): for record in self: if record.custom_thumbnail: record.thumbnail = record.with_context({}).custom_thumbnail else: extension = record.extension and record.extension.strip( ".") or "" path = os.path.join(_img_path, "file_%s.png" % extension) if not os.path.isfile(path): path = os.path.join(_img_path, "file_unkown.png") with open(path, "rb") as image_file: record.thumbnail = base64.b64encode(image_file.read()) @api.one def _compute_perm_create(self): try: result = super(File, self)._compute_perm_create() if self.directory: self.perm_create = result and self.directory.check_access( 'create') else: self.perm_create = result except AccessError: self.perm_create = False #---------------------------------------------------------- # Create, Update, Delete #---------------------------------------------------------- @api.constrains('name') def _check_name(self): if not self.check_name(self.name): raise ValidationError("The file name is invalid.") childs = self.sudo().directory.files.mapped( lambda rec: [rec.id, rec.name]) duplicates = [ rec for rec in childs if rec[1] == self.name and rec[0] != self.id ] if duplicates: raise ValidationError( _("A file with the same name already exists.")) @api.constrains('name') def _check_extension(self): config_parameter = self.env['ir.config_parameter'].sudo() forbidden_extensions = config_parameter.get_param( 'muk_dms.forbidden_extensions', default="") forbidden_extensions = [ x.strip() for x in forbidden_extensions.split(',') ] file_extension = self._compute_extension(write=False)['extension'] if file_extension and file_extension in forbidden_extensions: raise ValidationError( _("The file has a forbidden file extension.")) @api.constrains('content') def _check_size(self): config_parameter = self.env['ir.config_parameter'].sudo() max_upload_size = config_parameter.get_param('muk_dms.max_upload_size', default=25) try: max_upload_size = int(max_upload_size) except ValueError: max_upload_size = 25 if max_upload_size * 1024 * 1024 < len(base64.b64decode(self.content)): raise ValidationError( _("The maximum upload size is %s MB).") % max_upload_size) def _after_create(self, vals): record = super(File, self)._after_create(vals) record._check_recomputation(vals) return record def _after_write_record(self, vals, operation): vals = super(File, self)._after_write_record(vals, operation) self._check_recomputation(vals, operation) return vals def _check_recomputation(self, values, operation=None): fields = [] if 'name' in values: fields.extend(['extension', 'mimetype', 'path']) if 'directory' in values: fields.extend(['settings', 'path']) if 'content' in values: fields.extend(['index_content']) if fields: self.trigger_computation(fields) self._check_reference_values(values) if 'size' in values: self.trigger_computation_up(['size']) def _inverse_content(self): for record in self: if record.content: content = record.content directory = record.directory settings = record.settings if record.settings else directory.settings reference = record.reference if reference: record._update_reference_content(content) else: reference = record._create_reference( settings, directory.path, record.name, content) record.reference = "%s,%s" % (reference._name, reference.id) record.size = len(base64.b64decode(content)) else: record._unlink_reference() record.reference = None @api.returns('self', lambda value: value.id) def copy(self, default=None): self.ensure_one() default = dict(default or []) names = [] if 'directory' in default: directory = self.env['muk_dms.directory'].sudo().browse( default['directory']) names = directory.files.mapped('name') else: names = self.sudo().directory.files.mapped('name') default.update( {'name': self.unique_name(self.name, names, self.extension)}) vals = self.copy_data(default)[0] if 'reference' in vals: del vals['reference'] if not 'content' in vals: vals.update({'content': self.content}) new = self.with_context(lang=None).create(vals) self.copy_translations(new) return new def _before_unlink(self, operation): info = super(File, self)._before_unlink(operation) references = set(record.reference for record in self if record.reference) info['references'] = references return info def _after_unlink(self, result, info, infos, operation): super(File, self)._after_unlink(result, info, infos, operation) if 'references' in info: for reference in info['references']: reference.sudo().delete() reference.sudo().unlink() #---------------------------------------------------------- # Reference #---------------------------------------------------------- def _create_reference(self, settings, path, filename, content): self.ensure_one() self.check_access('create', raise_exception=True) if settings.save_type == 'database': return self.env['muk_dms.data_database'].sudo().create( {'data': content}) return None def _update_reference_content(self, content): self.ensure_one() self.check_access('write', raise_exception=True) self.reference.sudo().update({'content': content}) def _update_reference_type(self): self.ensure_one() self.check_access('write', raise_exception=True) if self.reference and self.settings.save_type != self.reference.type(): reference = self._create_reference(self.settings, self.directory.path, self.name, self.content) self._unlink_reference() self.reference = "%s,%s" % (reference._name, reference.id) def _check_reference_values(self, values): self.ensure_one() self.check_access('write', raise_exception=True) if 'content' in values: self._update_reference_content(values['content']) if 'settings' in values: self._update_reference_type() def _get_content(self): self.ensure_one() self.check_access('read', raise_exception=True) return self.reference.sudo().content() if self.reference else None def _unlink_reference(self): self.ensure_one() self.check_access('unlink', raise_exception=True) if self.reference: self.reference.sudo().delete() self.reference.sudo().unlink()
class Recurring(models.Model): _name = "recurring" _description = "Recurring" @api.model def default_get(self, fields): res = super(Recurring, self).default_get(fields) active_model = self._context.get('active_model') active_id = self._context.get('active_id') if active_model and active_id: record = self.env[active_model].browse(active_id) if 'partner_id' in self.env[active_model]._fields: res['partner_id'] = record.partner_id.id else: res['name'] = record.name if not res['name']: res['name'] = record.number return res @api.onchange('partner_id') def _onchange_partner_id(self): active_model = self._context.get('active_model') active_id = self._context.get('active_id') if self.partner_id and active_model and active_id: record = self.env[active_model].browse(active_id) name = record.name if not name: name = record.number if name: self.name = name + '-' + self.partner_id.name else: self.name = self.partner_id.name @api.constrains('partner_id', 'doc_source') def _check_partner_id_doc_source(self): for record in self: if record.partner_id and record.doc_source and 'partner_id' in \ self.env[record.doc_source._name]._fields and \ record.doc_source.partner_id != record.partner_id: raise ValidationError( _('Error! Source Document should be related to partner %s' % record.doc_source.partner_id.name)) name = fields.Char(string='Name') active = fields.Boolean( help="If the active field is set to False, it will allow you to hide " "the recurring without removing it.", default=True) partner_id = fields.Many2one('res.partner', string='Partner') notes = fields.Text(string='Internal Notes') user_id = fields.Many2one('res.users', string='User', default=lambda self: self.env.user) interval_number = fields.Integer(string='Internal Qty', default=1) interval_type = fields.Selection([('minutes', 'Minutes'), ('hours', 'Hours'), ('days', 'Days'), ('weeks', 'Weeks'), ('months', 'Months')], string='Interval Unit', default='months') exec_init = fields.Integer(string='Number of Documents') date_init = fields.Datetime(string='First Date', default=fields.Datetime.now) state = fields.Selection([('draft', 'Draft'), ('running', 'Running'), ('done', 'Done')], string='Status', copy=False, default='draft') doc_source = fields.Reference( selection=_get_document_types, string='Source Document', help="User can choose the source document on which he wants to " "create documents") doc_lines = fields.One2many('recurring.history', 'recurring_id', string='Documents created') cron_id = fields.Many2one('ir.cron', string='Cron Job', help="Scheduler which runs on recurring", states={ 'running': [('readonly', True)], 'done': [('readonly', True)] }) note = fields.Text(string='Notes', help="Description or Summary of Recurring") @api.model def _auto_end(self): super(Recurring, self)._auto_end() # drop the FK from recurring to ir.cron, as it would cause deadlocks # during cron job execution. When model_copy() tries to write() on # the recurring, # it has to wait for an ExclusiveLock on the cron job record, # but the latter is locked by the cron system for the duration of # the job! # FIXME: the recurring module should be reviewed to simplify the # scheduling process # and to use a unique cron job for all recurrings, so that it # never needs to be updated during its execution. self.env.cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (self._table, '%s_cron_id_fkey' % self._table)) @api.multi def create_recurring_type(self): rec_doc_obj = self.env['recurring.document'] ir_model_id = self.env['ir.model'].search([ ('model', '=', self._context.get('active_model', False)) ]) rec_doc_id = rec_doc_obj.search([('model', '=', ir_model_id.id)]) if not rec_doc_id: rec_doc_id = rec_doc_obj.create({ 'name': ir_model_id.name, 'model': ir_model_id.id, }) return rec_doc_id @api.multi def btn_recurring(self): self.ensure_one() rec_doc_id = self.create_recurring_type() if rec_doc_id: active_model = self._context.get('active_model') active_id = self._context.get('active_id') if active_id and active_model: record = self.env[active_model].browse(active_id) self.doc_source = record._name + "," + str(record.id) record.recurring_id = self.id record.rec_source_id = record.id if self._context.get('process') == 'start': self.set_process() @api.multi def set_process(self): for recurring in self: model = 'recurring' cron_data = { 'name': recurring.name, 'interval_number': recurring.interval_number, 'interval_type': recurring.interval_type, 'numbercall': recurring.exec_init, 'nextcall': recurring.date_init, 'model_id': self.env['ir.model'].search([('model', '=', model)]).id, 'priority': 6, 'user_id': recurring.user_id.id, 'state': 'code', 'code': 'model._cron_model_copy(' + repr([recurring.id]) + ')', } cron = self.env['ir.cron'].sudo().create(cron_data) recurring.write({'cron_id': cron.id, 'state': 'running'}) @api.multi def set_recurring_id(self): if self.doc_source and 'recurring_id' and 'rec_source_id' in \ self.env[self.doc_source._name]._fields: rec_id = self.env[self.doc_source._name].browse(self.doc_source.id) if not rec_id.recurring_id and not rec_id.rec_source_id: rec_id.recurring_id = self.id rec_id.rec_source_id = self.doc_source.id else: raise ValidationError(_('Document is already recurring')) @api.model def create(self, vals): if vals.get('doc_source', False) and self.search( [('doc_source', '=', vals['doc_source'])]): raise ValidationError( _('Recurring of the selected Source Document already exist')) res = super(Recurring, self).create(vals) res.set_recurring_id() return res @api.multi def write(self, values): doc_source_id = False if values.get('doc_source', False): doc_source_id = self.doc_source res = super(Recurring, self).write(values) if doc_source_id: rec_id = self.env[doc_source_id._name].browse(doc_source_id.id) rec_id.recurring_id = False self.set_recurring_id() return res @api.multi def get_recurring(self, model, active_id): result = self.env.ref('recurring.action_recurring_form').read()[0] record = self.env[model].browse(active_id) rec_ids = self.env['recurring'].search([ ('doc_source', '=', record._name + "," + str(record.id)) ]) result['domain'] = [('id', 'in', rec_ids.ids)] return result @api.multi def get_recurring_documents(self, model, action, recurring_id): result = self.env.ref(action).read()[0] res_ids = self.env[model].search([('recurring_id', '=', recurring_id.id)]) result['domain'] = [('id', 'in', res_ids.ids)] return result @api.model def _cron_model_copy(self, ids): self.browse(ids).model_copy() @api.multi def model_copy(self): for recurring in self.filtered(lambda sub: sub.cron_id): if not recurring.doc_source.exists(): raise UserError( _('Please provide another source ' 'document.\nThis one does not exist!')) default = {} documents = self.env['recurring.document'].search( [('model.model', '=', recurring.doc_source._name)], limit=1) fieldnames = dict( (f.field.name, f.value == 'date' and fields.Date.today() or False) for f in documents.field_ids) default.update(fieldnames) # if there was only one remaining document to generate # the recurring is over and we mark it as being done if recurring.cron_id.numbercall == 1: recurring.write({'state': 'done'}) else: recurring.write({'state': 'running'}) copied_doc = recurring.doc_source.copy(default) self.env['recurring.history'].create({ 'recurring_id': recurring.id, 'date': fields.Datetime.now(), 'document_id': '%s,%s' % (recurring.doc_source._name, copied_doc.id) }) @api.multi def unlink(self): if any(self.filtered(lambda s: s.state == "running")): raise UserError(_('You cannot delete an active recurring!')) return super(Recurring, self).unlink() @api.multi def set_done(self): self.mapped('cron_id').write({'active': False}) self.write({'state': 'done'}) @api.multi def set_draft(self): self.write({'state': 'draft'})
class ThemeView(models.Model): _name = 'theme.ir.ui.view' _description = 'Theme UI View' def compute_arch_fs(self): if 'install_filename' not in self._context: return '' path_info = get_resource_from_path(self._context['install_filename']) if path_info: return '/'.join(path_info[0:2]) name = fields.Char(required=True) key = fields.Char() type = fields.Char() priority = fields.Integer(default=16, required=True) mode = fields.Selection([('primary', "Base view"), ('extension', "Extension View")]) active = fields.Boolean(default=True) arch = fields.Text(translate=xml_translate) arch_fs = fields.Char(default=compute_arch_fs) inherit_id = fields.Reference( selection=[('ir.ui.view', 'ir.ui.view'), ('theme.ir.ui.view', 'theme.ir.ui.view')]) copy_ids = fields.One2many('ir.ui.view', 'theme_template_id', 'Views using a copy of me', copy=False, readonly=True) customize_show = fields.Boolean() def _convert_to_base_model(self, website, **kwargs): self.ensure_one() inherit = self.inherit_id if self.inherit_id and self.inherit_id._name == 'theme.ir.ui.view': inherit = self.inherit_id.with_context( active_test=False).copy_ids.filtered( lambda x: x.website_id == website) if not inherit: # inherit_id not yet created, add to the queue return False if inherit and inherit.website_id != website: website_specific_inherit = self.env['ir.ui.view'].with_context( active_test=False).search([('key', '=', inherit.key), ('website_id', '=', website.id)], limit=1) if website_specific_inherit: inherit = website_specific_inherit new_view = { 'type': self.type or 'qweb', 'name': self.name, 'arch': self.arch, 'key': self.key, 'inherit_id': inherit and inherit.id, 'arch_fs': self.arch_fs, 'priority': self.priority, 'active': self.active, 'theme_template_id': self.id, 'website_id': website.id, 'customize_show': self.customize_show, } if self.mode: # if not provided, it will be computed automatically (if inherit_id or not) new_view['mode'] = self.mode return new_view
class IrServerObjectLines(models.Model): _name = 'ir.server.object.lines' _description = 'Server Action value mapping' _sequence = 'ir_actions_id_seq' server_id = fields.Many2one('ir.actions.server', string='Related Server Action', ondelete='cascade') col1 = fields.Many2one('ir.model.fields', string='Field', required=True, ondelete='cascade') value = fields.Text(required=True, help="Expression containing a value specification. \n" "When Formula type is selected, this field may be a Python expression " " that can use the same values as for the code field on the server action.\n" "If Value type is selected, the value will be used directly without evaluation.") evaluation_type = fields.Selection([ ('value', 'Value'), ('reference', 'Reference'), ('equation', 'Python expression') ], 'Evaluation Type', default='value', required=True, change_default=True) resource_ref = fields.Reference( string='Record', selection='_selection_target_model', compute='_compute_resource_ref', inverse='_set_resource_ref') @api.model def _selection_target_model(self): models = self.env['ir.model'].search([]) return [(model.model, model.name) for model in models] @api.depends('col1.relation', 'value', 'evaluation_type') def _compute_resource_ref(self): for line in self: if line.evaluation_type in ['reference', 'value'] and line.col1 and line.col1.relation: value = line.value or '' try: value = int(value) if not self.env[line.col1.relation].browse(value).exists(): record = list(self.env[line.col1.relation]._search([], limit=1)) value = record[0] if record else 0 except ValueError: record = list(self.env[line.col1.relation]._search([], limit=1)) value = record[0] if record else 0 line.resource_ref = '%s,%s' % (line.col1.relation, value) else: line.resource_ref = False @api.onchange('resource_ref') def _set_resource_ref(self): for line in self.filtered(lambda line: line.evaluation_type == 'reference'): if line.resource_ref: line.value = str(line.resource_ref.id) def eval_value(self, eval_context=None): result = {} for line in self: expr = line.value if line.evaluation_type == 'equation': expr = safe_eval(line.value, eval_context) elif line.col1.ttype in ['many2one', 'integer']: try: expr = int(line.value) except Exception: pass result[line.id] = expr return result
class MailTemplatePreview(models.TransientModel): _name = 'mail.template.preview' _description = 'Email Template Preview' _MAIL_TEMPLATE_FIELDS = [ 'subject', 'body_html', 'email_from', 'email_to', 'email_cc', 'reply_to', 'scheduled_date', 'attachment_ids' ] @api.model def _selection_target_model(self): return [(model.model, model.name) for model in self.env['ir.model'].search([])] @api.model def _selection_languages(self): return self.env['res.lang'].get_installed() @api.model def default_get(self, fields): result = super(MailTemplatePreview, self).default_get(fields) if not result.get('mail_template_id') or 'resource_ref' not in fields: return result mail_template = self.env['mail.template'].browse( result['mail_template_id']) res = self.env[mail_template.model_id.model].search([], limit=1) if res: result['resource_ref'] = '%s,%s' % (mail_template.model_id.model, res.id) return result mail_template_id = fields.Many2one('mail.template', string='Related Mail Template', required=True) model_id = fields.Many2one('ir.model', string='Targeted model', related="mail_template_id.model_id") resource_ref = fields.Reference(string='Record', selection='_selection_target_model') lang = fields.Selection(_selection_languages, string='Template Preview Language') no_record = fields.Boolean('No Record', compute='_compute_no_record') error_msg = fields.Char('Error Message', readonly=True) # Fields same than the mail.template model, computed with resource_ref and lang subject = fields.Char('Subject', compute='_compute_mail_template_fields') email_from = fields.Char('From', compute='_compute_mail_template_fields', help="Sender address") email_to = fields.Char('To', compute='_compute_mail_template_fields', help="Comma-separated recipient addresses") email_cc = fields.Char('Cc', compute='_compute_mail_template_fields', help="Carbon copy recipients") reply_to = fields.Char('Reply-To', compute='_compute_mail_template_fields', help="Preferred response address") scheduled_date = fields.Char( 'Scheduled Date', compute='_compute_mail_template_fields', help="The queue manager will send the email after the date") body_html = fields.Html('Body', compute='_compute_mail_template_fields', sanitize=False) attachment_ids = fields.Many2many('ir.attachment', 'Attachments', compute='_compute_mail_template_fields') # Extra fields info generated by generate_email partner_ids = fields.Many2many('res.partner', string='Recipients', compute='_compute_mail_template_fields') @api.depends('model_id') def _compute_no_record(self): for preview in self: preview.no_record = (self.env[preview.model_id.model].search_count( []) == 0) if preview.model_id else True @api.depends('lang', 'resource_ref') def _compute_mail_template_fields(self): """ Preview the mail template (body, subject, ...) depending of the language and the record reference, more precisely the record id for the defined model of the mail template. If no record id is selectable/set, the jinja placeholders won't be replace in the display information. """ copy_depends_values = {'lang': self.lang} mail_template = self.mail_template_id.with_context(lang=self.lang) try: if not self.resource_ref: self._set_mail_attributes() else: copy_depends_values['resource_ref'] = '%s,%s' % ( self.resource_ref._name, self.resource_ref.id) mail_values = mail_template.with_context( template_preview_lang=self.lang).generate_email( self.resource_ref.id, self._MAIL_TEMPLATE_FIELDS) self._set_mail_attributes(values=mail_values) self.error_msg = False except UserError as user_error: self._set_mail_attributes() self.error_msg = user_error.args[0] finally: # Avoid to be change by a invalidate_cache call (in generate_mail), e.g. Quotation / Order report for key, value in copy_depends_values.items(): self[key] = value def _set_mail_attributes(self, values=None): for field in self._MAIL_TEMPLATE_FIELDS: field_value = values.get( field, False) if values else self.mail_template_id[field] self[field] = field_value self.partner_ids = values.get('partner_ids', False) if values else False
class Rating(models.Model): _name = "rating.rating" _description = "Rating" _order = 'write_date desc' _rec_name = 'res_name' _sql_constraints = [ ('rating_range', 'check(rating >= 0 and rating <= 5)', 'Rating should be between 0 and 5'), ] @api.depends('res_model', 'res_id') def _compute_res_name(self): for rating in self: name = self.env[rating.res_model].sudo().browse( rating.res_id).name_get() rating.res_name = name and name[0][1] or ('%s/%s') % ( rating.res_model, rating.res_id) @api.model def _default_access_token(self): return uuid.uuid4().hex @api.model def _selection_target_model(self): return [(model.model, model.name) for model in self.env['ir.model'].search([])] create_date = fields.Datetime(string="Submitted on") res_name = fields.Char(string='Resource name', compute='_compute_res_name', store=True, help="The name of the rated resource.") res_model_id = fields.Many2one('ir.model', 'Related Document Model', index=True, ondelete='cascade', help='Model of the followed resource') res_model = fields.Char(string='Document Model', related='res_model_id.model', store=True, index=True, readonly=True) res_id = fields.Integer(string='Document', required=True, help="Identifier of the rated object", index=True) resource_ref = fields.Reference(string='Resource Ref', selection='_selection_target_model', compute='_compute_resource_ref', readonly=True) parent_res_name = fields.Char('Parent Document Name', compute='_compute_parent_res_name', store=True) parent_res_model_id = fields.Many2one('ir.model', 'Parent Related Document Model', index=True, ondelete='cascade') parent_res_model = fields.Char('Parent Document Model', store=True, related='parent_res_model_id.model', index=True, readonly=False) parent_res_id = fields.Integer('Parent Document', index=True) parent_ref = fields.Reference(string='Parent Ref', selection='_selection_target_model', compute='_compute_parent_ref', readonly=True) rated_partner_id = fields.Many2one('res.partner', string="Rated Operator", help="Owner of the rated resource") partner_id = fields.Many2one('res.partner', string='Customer', help="Author of the rating") rating = fields.Float(string="Rating Value", group_operator="avg", default=0, help="Rating value: 0=Unhappy, 5=Happy") rating_image = fields.Binary('Image', compute='_compute_rating_image') rating_text = fields.Selection( [('satisfied', 'Satisfied'), ('not_satisfied', 'Not satisfied'), ('highly_dissatisfied', 'Highly dissatisfied'), ('no_rating', 'No Rating yet')], string='Rating', store=True, compute='_compute_rating_text', readonly=True) feedback = fields.Text('Comment', help="Reason of the rating") message_id = fields.Many2one( 'mail.message', string="Message", index=True, ondelete='cascade', help= "Associated message when posting a review. Mainly used in website addons." ) is_internal = fields.Boolean('Visible Internally Only', readonly=False, related='message_id.is_internal', store=True) access_token = fields.Char( 'Security Token', default=_default_access_token, help="Access token to set the rating of the value") consumed = fields.Boolean(string="Filled Rating", help="Enabled if the rating has been filled.") @api.depends('res_model', 'res_id') def _compute_resource_ref(self): for rating in self: if rating.res_model and rating.res_model in self.env: rating.resource_ref = '%s,%s' % (rating.res_model, rating.res_id or 0) else: rating.resource_ref = None @api.depends('parent_res_model', 'parent_res_id') def _compute_parent_ref(self): for rating in self: if rating.parent_res_model and rating.parent_res_model in self.env: rating.parent_ref = '%s,%s' % (rating.parent_res_model, rating.parent_res_id or 0) else: rating.parent_ref = None @api.depends('parent_res_model', 'parent_res_id') def _compute_parent_res_name(self): for rating in self: name = False if rating.parent_res_model and rating.parent_res_id: name = self.env[rating.parent_res_model].sudo().browse( rating.parent_res_id).name_get() name = name and name[0][1] or ('%s/%s') % ( rating.parent_res_model, rating.parent_res_id) rating.parent_res_name = name def _get_rating_image_filename(self): self.ensure_one() if self.rating >= RATING_LIMIT_SATISFIED: rating_int = 5 elif self.rating >= RATING_LIMIT_OK: rating_int = 3 elif self.rating >= RATING_LIMIT_MIN: rating_int = 1 else: rating_int = 0 return 'rating_%s.png' % rating_int def _compute_rating_image(self): for rating in self: try: image_path = get_resource_path( 'rating', 'static/src/img', rating._get_rating_image_filename()) rating.rating_image = base64.b64encode( open(image_path, 'rb').read()) if image_path else False except (IOError, OSError): rating.rating_image = False @api.depends('rating') def _compute_rating_text(self): for rating in self: if rating.rating >= RATING_LIMIT_SATISFIED: rating.rating_text = 'satisfied' elif rating.rating >= RATING_LIMIT_OK: rating.rating_text = 'not_satisfied' elif rating.rating >= RATING_LIMIT_MIN: rating.rating_text = 'highly_dissatisfied' else: rating.rating_text = 'no_rating' @api.model def create(self, values): if values.get('res_model_id') and values.get('res_id'): values.update(self._find_parent_data(values)) return super(Rating, self).create(values) def write(self, values): if values.get('res_model_id') and values.get('res_id'): values.update(self._find_parent_data(values)) return super(Rating, self).write(values) def unlink(self): # OPW-2181568: Delete the chatter message too self.env['mail.message'].search([('rating_ids', 'in', self.ids) ]).unlink() return super(Rating, self).unlink() def _find_parent_data(self, values): """ Determine the parent res_model/res_id, based on the values to create or write """ current_model_name = self.env['ir.model'].sudo().browse( values['res_model_id']).model current_record = self.env[current_model_name].browse(values['res_id']) data = { 'parent_res_model_id': False, 'parent_res_id': False, } if hasattr(current_record, '_rating_get_parent_field_name'): current_record_parent = current_record._rating_get_parent_field_name( ) if current_record_parent: parent_res_model = getattr(current_record, current_record_parent) data['parent_res_model_id'] = self.env['ir.model']._get( parent_res_model._name).id data['parent_res_id'] = parent_res_model.id return data def reset(self): for record in self: record.write({ 'rating': 0, 'access_token': record._default_access_token(), 'feedback': False, 'consumed': False, }) def action_open_rated_object(self): self.ensure_one() return { 'type': 'ir.actions.act_window', 'res_model': self.res_model, 'res_id': self.res_id, 'views': [[False, 'form']] }
class QcInspection(models.Model): _name = 'qc.inspection' _description = 'Quality control inspection' _inherit = ['mail.thread', 'mail.activity.mixin'] @api.depends('inspection_lines', 'inspection_lines.success') def _compute_success(self): for i in self: i.success = all([x.success for x in i.inspection_lines]) @api.multi def _links_get(self): link_obj = self.env['res.request.link'] return [(r.object, r.name) for r in link_obj.search([])] @api.depends('object_id') def _compute_product_id(self): for i in self: if i.object_id and i.object_id._name == 'product.product': i.product_id = i.object_id else: i.product_id = False name = fields.Char( string='Inspection number', required=True, default='/', readonly=True, states={'draft': [('readonly', False)]}, copy=False) date = fields.Datetime( string='Date', required=True, readonly=True, copy=False, default=fields.Datetime.now, states={'draft': [('readonly', False)]}) object_id = fields.Reference( string='Reference', selection=_links_get, readonly=True, states={'draft': [('readonly', False)]}, ondelete="set null") product_id = fields.Many2one( comodel_name="product.product", compute="_compute_product_id", store=True, help="Product associated with the inspection", oldname='product') qty = fields.Float(string="Quantity", default=1.0) test = fields.Many2one( comodel_name='qc.test', string='Test', readonly=True) inspection_lines = fields.One2many( comodel_name='qc.inspection.line', inverse_name='inspection_id', string='Inspection lines', readonly=True, states={'ready': [('readonly', False)]}) internal_notes = fields.Text(string='Internal notes') external_notes = fields.Text( string='External notes', states={'success': [('readonly', True)], 'failed': [('readonly', True)]}) state = fields.Selection( [('draft', 'Draft'), ('ready', 'Ready'), ('waiting', 'Waiting supervisor approval'), ('success', 'Quality success'), ('failed', 'Quality failed'), ('canceled', 'Canceled')], string='State', readonly=True, default='draft', track_visibility='onchange') success = fields.Boolean( compute="_compute_success", string='Success', help='This field will be marked if all tests have succeeded.', store=True) auto_generated = fields.Boolean( string='Auto-generated', readonly=True, copy=False, help='If an inspection is auto-generated, it can be canceled but not ' 'removed.') company_id = fields.Many2one( comodel_name='res.company', string='Company', readonly=True, states={'draft': [('readonly', False)]}, default=lambda self: self.env['res.company']._company_default_get( 'qc.inspection')) user = fields.Many2one( comodel_name='res.users', string='Responsible', track_visibility='always', default=lambda self: self.env.user) @api.model def create(self, vals): if vals.get('name', '/') == '/': vals['name'] = self.env['ir.sequence'] \ .next_by_code('qc.inspection') return super(QcInspection, self).create(vals) @api.multi def unlink(self): for inspection in self: if inspection.auto_generated: raise exceptions.UserError( _("You cannot remove an auto-generated inspection.")) if inspection.state != 'draft': raise exceptions.UserError( _("You cannot remove an inspection that is not in draft " "state.")) return super(QcInspection, self).unlink() @api.multi def action_draft(self): self.write({'state': 'draft'}) @api.multi def action_todo(self): for inspection in self: if not inspection.test: raise exceptions.UserError( _("You must first set the test to perform.")) self.write({'state': 'ready'}) @api.multi def action_confirm(self): for inspection in self: for line in inspection.inspection_lines: if line.question_type == 'qualitative': if not line.qualitative_value: raise exceptions.UserError( _("You should provide an answer for all " "qualitative questions.")) else: if not line.uom_id: raise exceptions.UserError( _("You should provide a unit of measure for " "quantitative questions.")) if inspection.success: inspection.state = 'success' else: inspection.state = 'waiting' @api.multi def action_approve(self): for inspection in self: if inspection.success: inspection.state = 'success' else: inspection.state = 'failed' @api.multi def action_cancel(self): self.write({'state': 'canceled'}) @api.multi def set_test(self, trigger_line, force_fill=False): for inspection in self: header = self._prepare_inspection_header( inspection.object_id, trigger_line) del header['state'] # don't change current status del header['auto_generated'] # don't change auto_generated flag del header['user'] # don't change current user inspection.write(header) inspection.inspection_lines.unlink() inspection.inspection_lines = inspection._prepare_inspection_lines( trigger_line.test, force_fill=force_fill) @api.multi def _make_inspection(self, object_ref, trigger_line): """Overridable hook method for creating inspection from test. :param object_ref: Object instance :param trigger_line: Trigger line instance :return: Inspection object """ inspection = self.create(self._prepare_inspection_header( object_ref, trigger_line)) inspection.set_test(trigger_line) return inspection @api.multi def _prepare_inspection_header(self, object_ref, trigger_line): """Overridable hook method for preparing inspection header. :param object_ref: Object instance :param trigger_line: Trigger line instance :return: List of values for creating the inspection """ return { 'object_id': object_ref and '%s,%s' % (object_ref._name, object_ref.id) or False, 'state': 'ready', 'test': trigger_line.test.id, 'user': trigger_line.user.id, 'auto_generated': True, } @api.multi def _prepare_inspection_lines(self, test, force_fill=False): new_data = [] for line in test.test_lines: data = self._prepare_inspection_line( test, line, fill=test.fill_correct_values or force_fill) new_data.append((0, 0, data)) return new_data @api.multi def _prepare_inspection_line(self, test, line, fill=None): data = { 'name': line.name, 'test_line': line.id, 'notes': line.notes, 'min_value': line.min_value, 'max_value': line.max_value, 'test_uom_id': line.uom_id.id, 'uom_id': line.uom_id.id, 'question_type': line.type, 'possible_ql_values': [x.id for x in line.ql_values] } if fill: if line.type == 'qualitative': # Fill with the first correct value found for value in line.ql_values: if value.ok: data['qualitative_value'] = value.id break else: # Fill with a value inside the interval data['quantitative_value'] = (line.min_value + line.max_value) * 0.5 return data
class File(models.Model): _name = "dms.file" _description = "File" _inherit = [ "portal.mixin", "dms.security.mixin", "dms.mixins.thumbnail", "mail.thread", "mail.activity.mixin", ] _order = "name asc" # ---------------------------------------------------------- # Database # ---------------------------------------------------------- name = fields.Char(string="Filename", required=True, index=True) active = fields.Boolean( string="Archived", default=True, help= "If a file is set to archived, it is not displayed, but still exists.", ) directory_id = fields.Many2one( comodel_name="dms.directory", string="Directory", domain="[('permission_create', '=', True)]", context="{'dms_directory_show_path': True}", ondelete="restrict", auto_join=True, required=True, index=True, ) storage_id = fields.Many2one( related="directory_id.storage_id", comodel_name="dms.storage", string="Storage", auto_join=True, readonly=True, store=True, ) is_hidden = fields.Boolean(string="Storage is Hidden", related="storage_id.is_hidden", readonly=True) company_id = fields.Many2one( related="storage_id.company_id", comodel_name="res.company", string="Company", readonly=True, store=True, index=True, ) path_names = fields.Char(compute="_compute_path", string="Path Names", readonly=True, store=False) path_json = fields.Text(compute="_compute_path", string="Path Json", readonly=True, store=False) color = fields.Integer(string="Color", default=0) category_id = fields.Many2one( comodel_name="dms.category", context="{'dms_category_show_path': True}", string="Category", ) tag_ids = fields.Many2many( comodel_name="dms.tag", relation="dms_file_tag_rel", column1="fid", column2="tid", string="Tags", ) content = fields.Binary( compute="_compute_content", inverse="_inverse_content", string="Content", attachment=False, prefetch=False, required=True, store=False, ) extension = fields.Char(compute="_compute_extension", string="Extension", readonly=True, store=True) res_mimetype = fields.Char(compute="_compute_mimetype", string="Type", readonly=True, store=True) size = fields.Integer(string="Size", readonly=True) checksum = fields.Char(string="Checksum/SHA1", readonly=True, size=40, index=True) content_binary = fields.Binary(string="Content Binary", attachment=False, prefetch=False, invisible=True) save_type = fields.Char( compute="_compute_save_type", string="Current Save Type", invisible=True, prefetch=False, ) migration = fields.Char( compute="_compute_migration", string="Migration Status", readonly=True, prefetch=False, compute_sudo=True, ) require_migration = fields.Boolean( compute="_compute_migration", store=True, compute_sudo=True, ) content_file = fields.Binary(attachment=True, string="Content File", prefetch=False, invisible=True) def check_access_token(self, access_token=False): res = False if access_token: if self.access_token and self.access_token == access_token: return True else: items = (self.env["dms.directory"].sudo().search([ ("access_token", "=", access_token) ])) if items: item = items[0] if self.directory_id.id == item.id: return True else: directory_item = self.directory_id while directory_item.parent_id: if directory_item.id == self.directory_id.id: return True directory_item = directory_item.parent_id # Fix last level if directory_item.id == self.directory_id.id: return True return res attachment_id = fields.Many2one( comodel_name="ir.attachment", string="Attachment File", prefetch=False, invisible=True, ondelete="cascade", ) res_model = fields.Char(string="Linked attachments model") res_id = fields.Integer(string="Linked attachments record ID") record_ref = fields.Reference( string="Record Referenced", compute="_compute_record_ref", selection=[], readonly=True, store=False, ) storage_id_save_type = fields.Selection(related="storage_id.save_type", store=False) @api.depends("res_model", "res_id") def _compute_record_ref(self): for record in self: record.record_ref = False if record.res_model and record.res_id: record.record_ref = "{},{}".format(record.res_model, record.res_id) def get_human_size(self): return human_size(self.size) def _get_share_url(self, redirect=False, signup_partner=False, pid=None): self.ensure_one() return "/my/dms/file/{}/download?access_token={}&db={}".format( self.id, self._portal_ensure_token(), self.env.cr.dbname, ) # ---------------------------------------------------------- # Helper # ---------------------------------------------------------- @api.model def _get_checksum(self, binary): return hashlib.sha1(binary or b"").hexdigest() @api.model def _get_content_inital_vals(self): return {"content_binary": False, "content_file": False} def _update_content_vals(self, vals, binary): new_vals = vals.copy() new_vals.update({ "checksum": self._get_checksum(binary), "size": binary and len(binary) or 0, }) if self.storage_id.save_type in ["file", "attachment"]: new_vals["content_file"] = self.content else: new_vals["content_binary"] = self.content and binary return new_vals @api.model def _get_binary_max_size(self): return int(self.env["ir.config_parameter"].sudo().get_param( "dms.binary_max_size", default=25)) @api.model def _get_forbidden_extensions(self): get_param = self.env["ir.config_parameter"].sudo().get_param extensions = get_param("dms.forbidden_extensions", default="") return [extension.strip() for extension in extensions.split(",")] def _get_thumbnail_placeholder_name(self): return self.extension and "file_%s.svg" % self.extension or "" # ---------------------------------------------------------- # Actions # ---------------------------------------------------------- def action_migrate(self, logging=True): record_count = len(self) index = 1 for dms_file in self: if logging: info = (index, record_count, dms_file.migration) _logger.info(_("Migrate File %s of %s [ %s ]") % info) index += 1 dms_file.write({"content": dms_file.with_context({}).content}) def action_save_onboarding_file_step(self): self.env.user.company_id.set_onboarding_step_done( "documents_onboarding_file_state") # ---------------------------------------------------------- # SearchPanel # ---------------------------------------------------------- @api.model def _search_panel_directory(self, **kwargs): search_domain = (kwargs.get("search_domain", []), ) category_domain = kwargs.get("category_domain", []) if category_domain and len(category_domain): return "=", category_domain[0][2] if search_domain and len(search_domain): for domain in search_domain[0]: if domain[0] == "directory_id": return domain[1], domain[2] return None, None @api.model def _search_panel_domain(self, field, operator, directory_id, comodel_domain=False): if not comodel_domain: comodel_domain = [] files_ids = self.search([("directory_id", operator, directory_id)]).ids return expression.AND([comodel_domain, [(field, "in", files_ids)]]) @api.model def search_panel_select_range(self, field_name, **kwargs): operator, directory_id = self._search_panel_directory(**kwargs) if directory_id and field_name == "directory_id": domain = [("parent_id", operator, directory_id)] values = (self.env["dms.directory"].with_context( directory_short_name=True).search_read( domain, ["display_name", "parent_id"])) return { "parent_field": "parent_id", "values": values if len(values) > 1 else [], } context = {} if field_name == "directory_id": context["directory_short_name"] = True return super(File, self.with_context(**context)).search_panel_select_range( field_name, **kwargs) @api.model def search_panel_select_multi_range(self, field_name, **kwargs): operator, directory_id = self._search_panel_directory(**kwargs) if field_name == "tag_ids": sql_query = """ SELECT t.name AS name, t.id AS id, c.name AS group_name, c.id AS group_id, COUNT(r.fid) AS count FROM dms_tag t JOIN dms_category c ON t.category_id = c.id LEFT JOIN dms_file_tag_rel r ON t.id = r.tid {directory_where_clause} GROUP BY c.name, c.id, t.name, t.id ORDER BY c.name, c.id, t.name, t.id; """ where_clause = "" params = [] if directory_id: file_ids = self.search([("directory_id", operator, directory_id)]).ids if file_ids: where_clause = "WHERE r.fid in %s" params.append(tuple(file_ids)) else: where_clause = "WHERE 1 = 0" # pylint: disable=sql-injection final_query = sql_query.format(directory_where_clause=where_clause) self.env.cr.execute(final_query, params) return self.env.cr.dictfetchall() if directory_id and field_name in ["directory_id", "category_id"]: comodel_domain = kwargs.pop("comodel_domain", []) directory_comodel_domain = self._search_panel_domain( "file_ids", operator, directory_id, comodel_domain) return super(File, self.with_context( directory_short_name=True)).search_panel_select_multi_range( field_name, comodel_domain=directory_comodel_domain, **kwargs) return super(File, self.with_context( directory_short_name=True)).search_panel_select_multi_range( field_name, **kwargs) # ---------------------------------------------------------- # Read # ---------------------------------------------------------- @api.depends("name", "directory_id", "directory_id.parent_path") def _compute_path(self): model = self.env["dms.directory"] data = {} for record in self: path_names = [] path_json = [] if record.directory_id.parent_path: for directory_id in reversed( list( map( int, record.directory_id.parent_path.split("/") [:-1]))): if not directory_id: break if directory_id not in data: data[directory_id] = model.browse(directory_id) path_names.append(data[directory_id].name) path_json.append({ "model": model._name, "name": data[directory_id].name, "id": directory_id, }) path_names.reverse() path_json.reverse() name = record.name_get() path_names.append(name[0][1]) path_json.append({ "model": record._name, "name": name[0][1], "id": isinstance(record.id, int) and record.id or 0, }) record.update({ "path_names": "/".join(path_names), "path_json": json.dumps(path_json), }) @api.depends("name") def _compute_extension(self): for record in self: record.extension = file.guess_extension(record.name) @api.depends("name", "content") def _compute_mimetype(self): for record in self: binary = base64.b64decode(record.with_context({}).content or "") record.res_mimetype = guess_mimetype( binary, default="application/octet-stream") @api.depends("content_binary", "content_file", "attachment_id") def _compute_content(self): bin_size = self.env.context.get("bin_size", False) for record in self: if record.content_file: context = { "human_size": True } if bin_size else { "base64": True } record.content = record.with_context(context).content_file elif record.content_binary: record.content = (record.content_binary if bin_size else base64.b64encode(record.content_binary)) elif record.attachment_id: context = { "human_size": True } if bin_size else { "base64": True } record.content = record.with_context( context).attachment_id.datas @api.depends("content_binary", "content_file") def _compute_save_type(self): for record in self: if record.content_file: record.save_type = "file" else: record.save_type = "database" @api.depends("storage_id", "storage_id.save_type") def _compute_migration(self): storage_model = self.env["dms.storage"] save_field = storage_model._fields["save_type"] values = save_field._description_selection(self.env) selection = {value[0]: value[1] for value in values} for record in self: storage_type = record.storage_id.save_type if storage_type == "attachment" or storage_type == record.save_type: record.migration = selection.get(storage_type) record.require_migration = False else: storage_label = selection.get(storage_type) file_label = selection.get(record.save_type) record.migration = "{} > {}".format(file_label, storage_label) record.require_migration = True def read(self, fields=None, load="_classic_read"): self.check_directory_access("read", {}, True) return super(File, self).read(fields, load=load) # ---------------------------------------------------------- # View # ---------------------------------------------------------- @api.onchange("category_id") def _change_category(self): res = {"domain": {"tag_ids": [("category_id", "=", False)]}} if self.category_id: res.update({ "domain": { "tag_ids": [ "|", ("category_id", "=", False), ("category_id", "child_of", self.category_id.id), ] } }) tags = self.tag_ids.filtered(lambda rec: not rec.category_id or rec. category_id == self.category_id) self.tag_ids = tags return res # ---------------------------------------------------------- # Security # ---------------------------------------------------------- @api.model def _get_directories_from_database(self, file_ids): if not file_ids: return self.env["dms.directory"] return self.env["dms.file"].browse(file_ids).mapped("directory_id") @api.model def _read_group_process_groupby(self, gb, query): if self.env.user.id == SUPERUSER_ID: return super(File, self)._read_group_process_groupby(gb, query) directories = (self.env["dms.directory"].with_context( prefetch_fields=False).search([])) if directories: where_clause = '"{table}"."{field}" = ANY (VALUES {ids})'.format( table=self._table, field="directory_id", ids=", ".join(map(lambda id: "(%s)" % id, directories.ids)), ) query.add_where(where_clause) # else: # query.where_clause += tuple("0=1") return super(File, self)._read_group_process_groupby(gb, query) @api.model def _search( self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None, ): result = super(File, self)._search(args, offset, limit, order, False, access_rights_uid) if self.env.user.id == SUPERUSER_ID: return len(result) if count else result # Fix access files with share button (public) if self.env.user.has_group("base.group_public"): return len(result) if count else result # operations if not result: return 0 if count else [] file_ids = set(result) directories = self._get_directories_from_database(result) for directory in directories - directories._filter_access("read"): file_ids -= set(directory.sudo().mapped("file_ids").ids) return len(file_ids) if count else list(file_ids) def _filter_access(self, operation): records = super(File, self)._filter_access(operation) if self.env.user.id == SUPERUSER_ID: return records directories = self._get_directories_from_database(records.ids) for directory in directories - directories._filter_access("read"): records -= self.browse(directory.sudo().mapped("file_ids").ids) return records def check_access(self, operation, raise_exception=False): res = super(File, self).check_access(operation, raise_exception) try: if self.env.user.has_group("base.group_portal"): res_access = res and self.check_directory_access(operation) return res_access and ( self.directory_id.id not in self.directory_id. _get_ids_without_access_groups(operation)) else: return res and self.check_directory_access(operation) except AccessError: if raise_exception: raise return False def check_directory_access(self, operation, vals=False, raise_exception=False): if not vals: vals = {} if self.env.user.id == SUPERUSER_ID: return True if "directory_id" in vals and vals["directory_id"]: records = self.env["dms.directory"].browse(vals["directory_id"]) else: records = self._get_directories_from_database(self.ids) return records.check_access(operation, raise_exception) # ---------------------------------------------------------- # Constrains # ---------------------------------------------------------- @api.constrains("name") def _check_name(self): for record in self: if not file.check_name(record.name): raise ValidationError(_("The file name is invalid.")) files = record.sudo().directory_id.file_ids.name_get() if list( filter( lambda file: file[1] == record.name and file[0] != record.id, files)): raise ValidationError( _("A file with the same name already exists.")) @api.constrains("extension") def _check_extension(self): for record in self: if (record.extension and record.extension in self._get_forbidden_extensions()): raise ValidationError( _("The file has a forbidden file extension.")) @api.constrains("size") def _check_size(self): for record in self: if record.size and record.size > self._get_binary_max_size( ) * 1024 * 1024: raise ValidationError( _("The maximum upload size is %s MB).") % self._get_binary_max_size()) @api.constrains("directory_id") def _check_directory_access(self): for record in self: if not record.directory_id.check_access("create", raise_exception=False): raise ValidationError( _("The directory has to have the permission to create files." )) # ---------------------------------------------------------- # Create, Update, Delete # ---------------------------------------------------------- def _inverse_content(self): updates = defaultdict(set) for record in self: values = self._get_content_inital_vals() binary = base64.b64decode(record.content or "") values = record._update_content_vals(values, binary) updates[tools.frozendict(values)].add(record.id) with self.env.norecompute(): for vals, ids in updates.items(): self.browse(ids).write(dict(vals)) def _create_model_attachment(self, vals): res_vals = vals.copy() directory = self.env["dms.directory"].sudo().browse( res_vals["directory_id"]) if directory and directory.res_model and directory.res_id: attachment = (self.env["ir.attachment"].with_context( dms_file=True).create({ "name": vals["name"], "datas": vals["content"], "res_model": directory.res_model, "res_id": directory.res_id, })) res_vals["attachment_id"] = attachment.id res_vals["res_model"] = attachment.res_model res_vals["res_id"] = attachment.res_id del res_vals["content"] return res_vals def copy(self, default=None): self.ensure_one() default = dict(default or []) if "directory_id" in default: model = self.env["dms.directory"] directory = model.browse(default["directory_id"]) names = directory.sudo().file_ids.mapped("name") else: names = self.sudo().directory_id.file_ids.mapped("name") default.update( {"name": file.unique_name(self.name, names, self.extension)}) self.check_directory_access("create", default, True) return super(File, self).copy(default) def write(self, vals): self.check_directory_access("write", vals, True) self.check_lock() return super(File, self).write(vals) def unlink(self): self.check_access_rights("unlink") self.check_directory_access("unlink", {}, True) self.check_lock() # We need to do sudo because we don't know when the related groups # will be deleted return super(File, self.sudo()).unlink() @api.model_create_multi def create(self, vals_list): new_vals_list = [] for vals in vals_list: if "res_model" not in vals and "res_id" not in vals: vals = self._create_model_attachment(vals) new_vals_list.append(vals) return super(File, self).create(new_vals_list) # ---------------------------------------------------------- # Locking fields and functions # ---------------------------------------------------------- locked_by = fields.Many2one(comodel_name="res.users", string="Locked by") is_locked = fields.Boolean(compute="_compute_locked", string="Locked") is_lock_editor = fields.Boolean(compute="_compute_locked", string="Editor") # ---------------------------------------------------------- # Locking # ---------------------------------------------------------- def lock(self): self.write({"locked_by": self.env.uid}) def unlock(self): self.write({"locked_by": None}) @api.model def _check_lock_editor(self, lock_uid): return lock_uid in (self.env.uid, SUPERUSER_ID) def check_lock(self): for record in self: if record.locked_by.exists() and not self._check_lock_editor( record.locked_by.id): message = _( "The record (%s [%s]) is locked, by an other user.") raise AccessError(message % (record._description, record.id)) # ---------------------------------------------------------- # Read, View # ---------------------------------------------------------- @api.depends("locked_by") def _compute_locked(self): for record in self: if record.locked_by.exists(): record.update({ "is_locked": True, "is_lock_editor": record.locked_by.id == record.env.uid, }) else: record.update({"is_locked": False, "is_lock_editor": False})
class DmsDirectory(models.Model): _name = "dms.directory" _description = "Directory" _inherit = [ "portal.mixin", "dms.security.mixin", "dms.mixins.thumbnail", "mail.thread", "mail.activity.mixin", "mail.alias.mixin", ] _rec_name = "complete_name" _order = "complete_name" _parent_store = True _parent_name = "parent_id" name = fields.Char(string="Name", required=True, index=True) parent_path = fields.Char(index=True) is_root_directory = fields.Boolean( string="Is Root Directory", default=False, help="""Indicates if the directory is a root directory. A root directory has a settings object, while a directory with a set parent inherits the settings form its parent.""", ) root_storage_id = fields.Many2one( comodel_name="dms.storage", string="Root Storage", ondelete="restrict", compute="_compute_directory_type", store=True, readonly=False, copy=True, ) storage_id = fields.Many2one( compute="_compute_storage", comodel_name="dms.storage", string="Storage", ondelete="restrict", auto_join=True, store=True, ) parent_id = fields.Many2one( comodel_name="dms.directory", domain="[('permission_create', '=', True)]", string="Parent Directory", ondelete="restrict", auto_join=True, index=True, store=True, readonly=False, compute="_compute_directory_type", copy=True, ) complete_name = fields.Char("Complete Name", compute="_compute_complete_name", store=True) child_directory_ids = fields.One2many( comodel_name="dms.directory", inverse_name="parent_id", string="Subdirectories", auto_join=False, copy=False, ) is_hidden = fields.Boolean(string="Storage is Hidden", related="storage_id.is_hidden", readonly=True) company_id = fields.Many2one( related="storage_id.company_id", comodel_name="res.company", string="Company", readonly=True, store=True, index=True, ) color = fields.Integer(string="Color", default=0) category_id = fields.Many2one( comodel_name="dms.category", context="{'dms_category_show_path': True}", string="Category", ) tag_ids = fields.Many2many( comodel_name="dms.tag", relation="dms_directory_tag_rel", domain="""[ '|', ['category_id', '=', False], ['category_id', 'child_of', category_id]] """, column1="did", column2="tid", string="Tags", compute="_compute_tags", readonly=False, store=True, ) user_star_ids = fields.Many2many( comodel_name="res.users", relation="dms_directory_star_rel", column1="did", column2="uid", string="Stars", ) starred = fields.Boolean( compute="_compute_starred", inverse="_inverse_starred", search="_search_starred", string="Starred", ) file_ids = fields.One2many( comodel_name="dms.file", inverse_name="directory_id", string="Files", auto_join=False, copy=False, ) count_directories = fields.Integer(compute="_compute_count_directories", string="Count Subdirectories Title") count_files = fields.Integer(compute="_compute_count_files", string="Count Files Title") count_directories_title = fields.Char(compute="_compute_count_directories", string="Count Subdirectories") count_files_title = fields.Char(compute="_compute_count_files", string="Count Files") count_elements = fields.Integer(compute="_compute_count_elements", string="Count Elements") count_total_directories = fields.Integer( compute="_compute_count_total_directories", string="Total Subdirectories") count_total_files = fields.Integer(compute="_compute_count_total_files", string="Total Files") count_total_elements = fields.Integer( compute="_compute_count_total_elements", string="Total Elements") size = fields.Integer(compute="_compute_size", string="Size") inherit_group_ids = fields.Boolean(string="Inherit Groups", default=True) alias_process = fields.Selection( selection=[("files", "Single Files"), ("directory", "Subdirectory")], required=True, default="directory", string="Unpack Emails as", help="""\ Define how incoming emails are processed:\n - Single Files: The email gets attached to the directory and all attachments are created as files.\n - Subdirectory: A new subdirectory is created for each email and the mail is attached to this subdirectory. The attachments are created as files of the subdirectory. """, ) def _get_share_url(self, redirect=False, signup_partner=False, pid=None): self.ensure_one() return "/my/dms/directory/{}?access_token={}&db={}".format( self.id, self._portal_ensure_token(), self.env.cr.dbname, ) def check_access_token(self, access_token=False): res = False if access_token: items = self.env["dms.directory"].search([("access_token", "=", access_token)]) if items: item = items[0] if item.id == self.id: return True else: directory_item = self while directory_item.parent_id: if directory_item.id == item.id: return True directory_item = directory_item.parent_id # Fix last level if directory_item.id == item.id: return True return res def _alias_get_creation_values(self): values = super(DmsDirectory, self)._alias_get_creation_values() values['alias_model_id'] = self.env['ir.model']._get( 'dms.directory').id if self.id: values['alias_defaults'] = defaults = ast.literal_eval( self.alias_defaults or "{}") defaults['parent_id'] = self.id return values @api.model def _get_parent_categories(self, access_token): self.ensure_one() directories = [self] current_directory = self if access_token: # Only show parent categories to access_token stop = False while current_directory.parent_id and not stop: if current_directory.access_token == access_token: stop = False else: directories.append(current_directory.parent_id) current_directory = current_directory.parent_id else: while (current_directory.parent_id and current_directory.parent_id.check_access("read", False)): directories.append(current_directory.parent_id) current_directory = current_directory.parent_id return directories[::-1] def _get_own_root_directories(self, user_id): ids = [] items = (self.env["dms.directory"].with_user(user_id).search([ ("is_hidden", "=", False) ])) for item in items: current_directory = item while (current_directory.parent_id and current_directory.parent_id.check_access("read", False)): current_directory = current_directory.parent_id if current_directory.id not in ids: ids.append(current_directory.id) return ids def check_access(self, operation, raise_exception=False): res = super(DmsDirectory, self).check_access(operation, raise_exception) if self.env.user.has_group("base.group_portal"): if self.id in self._get_ids_without_access_groups(operation): res = False # Fix show breadcrumb with share button (public) if self.env.user.has_group("base.group_public"): res = True return res allowed_model_ids = fields.Many2many(compute="_compute_allowed_model_ids", comodel_name="ir.model", store=False) model_id = fields.Many2one( comodel_name="ir.model", domain="[('id', 'in', allowed_model_ids)]", compute="_compute_model_id", inverse="_inverse_model_id", string="Model", store=True, ) res_model = fields.Char(string="Linked attachments model") res_id = fields.Integer(string="Linked attachments record ID") record_ref = fields.Reference(string="Record Referenced", compute="_compute_record_ref", selection=[]) storage_id_save_type = fields.Selection(related="storage_id.save_type", store=False) @api.depends("root_storage_id", "storage_id") def _compute_allowed_model_ids(self): for record in self: record.allowed_model_ids = False if record.root_storage_id and record.root_storage_id.model_ids: record.allowed_model_ids = record.root_storage_id.model_ids.ids elif record.storage_id and record.storage_id.model_ids: record.allowed_model_ids = record.storage_id.model_ids.ids @api.depends("res_model") def _compute_model_id(self): for record in self: if not record.res_model: record.model_id = False continue record.model_id = self.env["ir.model"].search([("model", "=", record.res_model)]) def _inverse_model_id(self): for record in self: record.res_model = record.model_id.model @api.depends("res_model", "res_id") def _compute_record_ref(self): for record in self: record.record_ref = False if record.res_model and record.res_id: record.record_ref = "{},{}".format(record.res_model, record.res_id) @api.depends("name", "complete_name") def _compute_display_name(self): if not self.env.context.get("directory_short_name", False): return super()._compute_display_name() for record in self: record.display_name = record.name def toggle_starred(self): updates = defaultdict(set) for record in self: vals = {"starred": not record.starred} updates[tools.frozendict(vals)].add(record.id) with self.env.norecompute(): for vals, ids in updates.items(): self.browse(ids).write(dict(vals)) self.recompute() # ---------------------------------------------------------- # Actions # ---------------------------------------------------------- def action_save_onboarding_directory_step(self): self.env.user.company_id.set_onboarding_step_done( "documents_onboarding_directory_state") # ---------------------------------------------------------- # SearchPanel # ---------------------------------------------------------- @api.model def _search_panel_directory(self, **kwargs): search_domain = (kwargs.get("search_domain", []), ) if search_domain and len(search_domain): for domain in search_domain[0]: if domain[0] == "parent_id": return domain[1], domain[2] return None, None # ---------------------------------------------------------- # Search # ---------------------------------------------------------- @api.model def _search( self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None, ): result = super(DmsDirectory, self)._search(args, offset, limit, order, False, access_rights_uid) if result: directory_ids = set(result) if self.env.user.has_group("base.group_portal"): exclude_ids = self._get_ids_without_access_groups("read") directory_ids -= set(exclude_ids) return directory_ids return result @api.model def _search_starred(self, operator, operand): if operator == "=" and operand: return [("user_star_ids", "in", [self.env.uid])] return [("user_star_ids", "not in", [self.env.uid])] @api.depends("name", "parent_id.complete_name") def _compute_complete_name(self): for category in self: if category.parent_id: category.complete_name = "{} / {}".format( category.parent_id.complete_name, category.name, ) else: category.complete_name = category.name @api.depends("root_storage_id", "parent_id") def _compute_storage(self): for record in self: if record.is_root_directory: record.storage_id = record.root_storage_id else: record.storage_id = record.parent_id.storage_id @api.depends("user_star_ids") def _compute_starred(self): for record in self: record.starred = self.env.user in record.user_star_ids @api.depends("child_directory_ids") def _compute_count_directories(self): for record in self: directories = len(record.child_directory_ids) record.count_directories = directories record.count_directories_title = _( "%s Subdirectories") % directories @api.depends("file_ids") def _compute_count_files(self): for record in self: files = len(record.file_ids) record.count_files = files record.count_files_title = _("%s Files") % files @api.depends("child_directory_ids", "file_ids") def _compute_count_elements(self): for record in self: elements = record.count_files elements += record.count_directories record.count_elements = elements def _compute_count_total_directories(self): for record in self: count = self.search_count([("id", "child_of", record.id)]) count = count - 1 if count > 0 else 0 record.count_total_directories = count def _compute_count_total_files(self): model = self.env["dms.file"] for record in self: record.count_total_files = model.search_count([ ("directory_id", "child_of", record.id) ]) def _compute_count_total_elements(self): for record in self: total_elements = record.count_total_files total_elements += record.count_total_directories record.count_total_elements = total_elements def _compute_size(self): sudo_model = self.env["dms.file"].sudo() for record in self: # Avoid NewId if not record.id: record.size = 0 continue recs = sudo_model.search_read( domain=[("directory_id", "child_of", record.id)], fields=["size"], ) record.size = sum(rec.get("size", 0) for rec in recs) @api.depends("inherit_group_ids", "parent_path") def _compute_groups(self): records = self.filtered(lambda record: record.parent_path) paths = [ list(map(int, rec.parent_path.split("/")[:-1])) for rec in records ] ids = paths and set(functools.reduce(operator.concat, paths)) or [] read = self.browse(ids).read(["inherit_group_ids", "group_ids"]) data = {entry.pop("id"): entry for entry in read} for record in records: complete_group_ids = set() for directory_id in reversed( list(map(int, record.parent_path.split("/")[:-1]))): if directory_id in data: complete_group_ids |= set(data[directory_id].get( "group_ids", [])) if not data[directory_id].get("inherit_group_ids"): break record.update( {"complete_group_ids": [(6, 0, list(complete_group_ids))]}) for record in self - records: if record.parent_id and record.inherit_group_ids: complete_groups = record.parent_id.complete_group_ids record.complete_group_ids = record.group_ids | complete_groups else: record.complete_group_ids = record.group_ids # ---------------------------------------------------------- # View # ---------------------------------------------------------- @api.depends("is_root_directory") def _compute_directory_type(self): for record in self: if record.is_root_directory: record.parent_id = None else: record.root_storage_id = None @api.depends("category_id") def _compute_tags(self): for record in self: tags = record.tag_ids.filtered( lambda rec: not rec.category_id or rec.category_id == record. category_id) record.tag_ids = tags # ---------------------------------------------------------- # Constrains # ---------------------------------------------------------- @api.constrains("parent_id") def _check_directory_recursion(self): if not self._check_recursion(): raise ValidationError( _("Error! You cannot create recursive directories.")) return True @api.constrains("is_root_directory", "root_storage_id", "parent_id") def _check_directory_storage(self): for record in self: if record.is_root_directory and not record.root_storage_id: raise ValidationError( _("A root directory has to have a root storage.")) if not record.is_root_directory and not record.parent_id: raise ValidationError( _("A directory has to have a parent directory.")) if record.parent_id and (record.is_root_directory or record.root_storage_id): raise ValidationError( _("A directory can't be a root and have a parent directory." )) @api.constrains("parent_id") def _check_directory_access(self): for record in self: if not record.parent_id.check_access("create", raise_exception=False): raise ValidationError( _("The parent directory has to have the permission " "to create directories.")) @api.constrains("name") def _check_name(self): for record in self: if not check_name(record.name): raise ValidationError(_("The directory name is invalid.")) if record.is_root_directory: childs = record.sudo( ).root_storage_id.root_directory_ids.name_get() else: childs = record.sudo().parent_id.child_directory_ids.name_get() if list( filter( lambda child: child[1] == record.name and child[0] != record.id, childs, )): raise ValidationError( _("A directory with the same name already exists.")) # ---------------------------------------------------------- # Create, Update, Delete # ---------------------------------------------------------- def _inverse_starred(self): starred_records = self.env["dms.directory"].sudo() not_starred_records = self.env["dms.directory"].sudo() for record in self: if not record.starred and self.env.user in record.user_star_ids: starred_records |= record elif record.starred and self.env.user not in record.user_star_ids: not_starred_records |= record not_starred_records.write({"user_star_ids": [(4, self.env.uid)]}) starred_records.write({"user_star_ids": [(3, self.env.uid)]}) def copy(self, default=None): self.ensure_one() default = dict(default or []) if "root_storage_id" in default: storage = self.env["dms.storage"].browse( default["root_storage_id"]) names = storage.sudo().root_directory_ids.mapped("name") elif "parent_id" in default: parent_directory = self.browse(default["parent_id"]) names = parent_directory.sudo().child_directory_ids.mapped("name") elif self.is_root_directory: names = self.sudo().root_storage_id.root_directory_ids.mapped( "name") else: names = self.sudo().parent_id.child_directory_ids.mapped("name") default.update({"name": unique_name(self.name, names)}) new = super().copy(default) for record in self.file_ids: record.copy({"directory_id": new.id}) for record in self.child_directory_ids: record.copy({"parent_id": new.id}) return new @api.model def get_alias_model_name(self, vals): return vals.get("alias_model", "dms.directory") def get_alias_values(self): values = super().get_alias_values() values["alias_defaults"] = {"parent_id": self.id} return values @api.model def message_new(self, msg_dict, custom_values=None): custom_values = custom_values if custom_values is not None else {} parent_directory_id = custom_values.get("parent_id", None) parent_directory = self.sudo().browse(parent_directory_id) if not parent_directory_id or not parent_directory.exists(): raise ValueError("No directory could be found!") if parent_directory.alias_process == "files": parent_directory._process_message(msg_dict) return parent_directory names = parent_directory.child_directory_ids.mapped("name") subject = slugify(msg_dict.get("subject", _("Alias-Mail-Extraction"))) defaults = dict( {"name": unique_name(subject, names, escape_suffix=True)}, **custom_values) directory = super().message_new(msg_dict, custom_values=defaults) directory._process_message(msg_dict) return directory def message_update(self, msg_dict, update_vals=None): self._process_message(msg_dict, extra_values=update_vals) return super().message_update(msg_dict, update_vals=update_vals) def _process_message(self, msg_dict, extra_values=False): names = self.sudo().file_ids.mapped("name") for attachment in msg_dict["attachments"]: uname = unique_name(attachment.fname, names, escape_suffix=True) self.env["dms.file"].sudo().create({ "content": base64.b64encode(attachment.content), "directory_id": self.id, "name": uname, }) names.append(uname) @api.model_create_multi def create(self, vals_list): for vals in vals_list: if vals.get("root_storage_id", False): vals["storage_id"] = vals["root_storage_id"] if vals.get("parent_id", False): parent = self.browse([vals["parent_id"]]) data = next(iter(parent.sudo().read(["storage_id"])), {}) vals["storage_id"] = self._convert_to_write(data).get( "storage_id") return super().create(vals_list) def write(self, vals): # Groups part if any(key in vals for key in ["group_ids", "inherit_group_ids"]): with self.env.norecompute(): res = super(DmsDirectory, self).write(vals) domain = [("id", "child_of", self.ids)] records = self.sudo().search(domain) records.modified(["group_ids"]) records.recompute() else: res = super().write(vals) if self and any(field for field in vals if field in ["root_storage_id", "parent_id"]): records = self.sudo().search([("id", "child_of", self.ids)]) - self if "root_storage_id" in vals: records.write({"storage_id": vals["root_storage_id"]}) elif "parent_id" in vals: parent = self.browse([vals["parent_id"]]) data = next(iter(parent.sudo().read(["storage_id"])), {}) records.write({ "storage_id": self._convert_to_write(data).get("storage_id") }) return res def unlink(self): if self and self.check_access("unlink", raise_exception=True): domain = [ "&", ("directory_id", "child_of", self.ids), "&", ("locked_by", "!=", self.env.uid), ("locked_by", "!=", False), ] if self.env["dms.file"].sudo().search(domain): raise AccessError( _("A file is locked, the folder cannot be deleted.")) self.env["dms.file"].sudo().search([("directory_id", "child_of", self.ids)]).unlink() return super(DmsDirectory, self.sudo().search([("id", "child_of", self.ids) ])).unlink() return super().unlink()