def do_transfer(self, conn=None): """Receive input attachments, process documents, send outputs""" self.ensure_one() transfer = self.transfer_ids.create({ 'gateway_id': self.id, 'allow_process': self._context.get('default_allow_process', self.automatic), }) self.lock_for_transfer(transfer) Model = self.env[self.model_id.model] try: # pylint: disable=broad-except if not self.safety: raise UserError(_("Missing safety configuration option")) else: section, _sep, key = self.safety.rpartition('.') conf_file_safety = config.get_misc(section or 'edi', key) if conf_file_safety is None: raise UserError(_("Missing configuration option '%s'") % self.safety) if not conf_file_safety: raise UserError(_("Gateway disabled via configuration " "option '%s'") % self.safety) if self.safety and conf_file_safety: if conn is not None: with self.env.cr.savepoint(), self.env.clear_upon_failure(): transfer.do_transfer(conn) else: with Model.connect(self) as auto_conn,\ self.env.cr.savepoint(),\ self.env.clear_upon_failure(): transfer.do_transfer(auto_conn) except Exception as err: transfer.raise_issue(_("Transfer failed: %s"), err) return transfer
def save_dimensions(self): if self.model_name and self.rec_id: rec = self.env[self.model_name].browse(self.rec_id)[0] size = int(config.get_misc('analytic', 'analytic_size', 10)) for n in range(1, size + 1): src = self.format_field_name(n,'a','id') rec[src] = self[src]
def get_jail_path(self): """Jail Path Query the config file in order to get a path, to which the local filesystem access is restricted. First, query jail_path. If it does not exist, return None. *None must be handled at the point of use.* """ jail_directory = config.get_misc('edi', 'jail_path', None) return jail_directory
def order_selection(self): order_selection = getattr(self, '_order_selection', None) if order_selection is None: size = int(config.get_misc('analytic', 'analytic_size', 5)) order_selection = [] for n in range(1, size + 1): order_selection.append((str(n), _(u"Analysis {}".format(n)))) setattr(self, '_order_selection', order_selection) return order_selection
def enabled(self): """Return True if a print strategy is enabled, False otherwise.""" self.ensure_one() if self.safety: section, _sep, key = self.safety.rpartition('.') if not config.get_misc(section or self._name, key): _logger.info('%s %s disabled, enable by configuring safety %s', self._name, self.name, '.'.join( (section or self._name, key))) return False return True
def check_edi_notifications_enabled(self): """Check safety config parameter is present and return True if it's enable, otherwise False.""" self.ensure_one() if self.safety: section, _sep, key = self.safety.rpartition(".") if config.get_misc(section or self._name, key): return True _logger.info( "%s %s disabled, enable by configuring safety.", self._name, self.name, ) return False
def _get_password(self): """Get password (from database record or from configuration file)""" self.ensure_one() if self.password: return self.password if self.config_password: section, _sep, key = self.config_password.rpartition('.') password = config.get_misc(section or 'edi', key) if password is None: raise UserError(_("Missing configuration option '%s'") % self.config_password) return password return None
def _onchange_account_id(self): if self.account_id: dimensions = self.account_id.nd_ids structures = self.env['analytic.structure'].search([('nd_id','in',dimensions.ids)]) used = [int(structure.ordering) for structure in structures] number = 0 size = int(config.get_misc('analytic', 'analytic_size', 10)) for n in range(1, size + 1): if n in used: src_data = 1 number += math.pow(2,n-1) self.d_bin = bin(int(number))[2:].zfill(10) if int(number): self.req_dimensions = True else: self.req_dimensions = False
def line_dimension_lines(self, move_line=None): res = [] if int(move_line.d_bin): size = int(config.get_misc('analytic', 'analytic_size', 10)) for n in range(1, size + 1): if int(move_line.d_bin[size - n]): src = self.format_field_name(n, 'a', 'id') structures = self.env['analytic.structure'].search([ ('model_name', '=', 'account_move_line'), ('ordering', '=', n) ]) res.append({ 'nd_name': structures.nd_id.name, 'code': move_line[src].code, 'name': move_line[src].name, }) return res
def _onchange_account_id(self): if self.account_id: dimensions = self.account_id.nd_ids structures = self.env['analytic.structure'].search([('nd_id','in',dimensions.ids),('model_name','=',self.model_db_name)]) used = [int(structure.ordering) for structure in structures] if self.model_name and self.rec_id: rec = self.env[self.model_name].browse(self.rec_id)[0] number = 0 size = int(config.get_misc('analytic', 'analytic_size', 10)) for n in range(1, size + 1): if n in used: if self.model_name and self.rec_id: src = self.format_field_name(n,'a','id') self[src] = rec[src] src_data = 1 number += math.pow(2,n-1) self.d_bin = bin(int(number))[2:].zfill(10)
# Before loading the module, if the analytic_size option is given, check it. # Its value must be an integer greater or equal to the default value. from odoo.tools import config errors = ["[analytic]"] try: assert int(config.get_misc('analytic', 'analytic_size', 5)) >= 5 except (ValueError, AssertionError): errors.append("analytic_size must be an integer greater/equal to 5.") try: assert config.get_misc('analytic', 'translate', False) in [True, False] except (AssertionError): errors.append("translate must be a boolean value.") if len(errors) > 1: config.parser.error("\n * ".join(errors)) #from . import MetaAnalytic #SARFRAZ from . import analytic_code from . import analytic_dimension from . import analytic_structure
def add_module_dependencies(cr, module_list): """ Select (new) dependencies from the modules in the list so that we can inject them into the graph at upgrade time. Used in the modified OpenUpgrade Server, not to be called from migration scripts Also take the OpenUpgrade configuration directives 'forced_deps' and 'autoinstall' into account. From any additional modules that these directives can add, the dependencies are added as well (but these directives are not checked for the occurrence of any of the dependencies). """ if not module_list: return module_list modules_in = list(module_list) forced_deps = safe_eval( config.get_misc('openupgrade', 'forced_deps_' + release.version, config.get_misc('openupgrade', 'forced_deps', '{}'))) autoinstall = safe_eval( config.get_misc('openupgrade', 'autoinstall_' + release.version, config.get_misc('openupgrade', 'autoinstall', '{}'))) for module in list(module_list): module_list += forced_deps.get(module, []) module_list += autoinstall.get(module, []) module_list = list(set(module_list)) dependencies = module_list while dependencies: cr.execute( """ SELECT DISTINCT dep.name FROM ir_module_module, ir_module_module_dependency dep WHERE module_id = ir_module_module.id AND ir_module_module.name in %s AND dep.name not in %s """, ( tuple(dependencies), tuple(module_list), )) dependencies = [x[0] for x in cr.fetchall()] module_list += dependencies # Select auto_install modules of which all dependencies # are fulfilled based on the modules we know are to be # installed cr.execute( """ SELECT name from ir_module_module WHERE state IN %s """, (('installed', 'to install', 'to upgrade'), )) modules = list(set(module_list + [row[0] for row in cr.fetchall()])) cr.execute( """ SELECT name from ir_module_module m WHERE auto_install IS TRUE AND state = 'uninstalled' AND NOT EXISTS( SELECT id FROM ir_module_module_dependency d WHERE d.module_id = m.id AND name NOT IN %s) """, (tuple(modules), )) auto_modules = [row[0] for row in cr.fetchall() if get_module_path(row[0])] if auto_modules: logger.info("Selecting autoinstallable modules %s", ','.join(auto_modules)) module_list += auto_modules # Set proper state for new dependencies so that any init scripts are run cr.execute( """ UPDATE ir_module_module SET state = 'to install' WHERE name IN %s AND name NOT IN %s AND state = 'uninstalled' """, (tuple(module_list), tuple(modules_in))) return module_list
def _setup_bound_dimension(cls, dimension, defaults, orm_name, name, bases, nmspc): """Bind a dimension to the model, creating a code for each record.""" if dimension is True: dimension = {} elif isinstance(dimension, str): dimension = {'name': dimension} dimension_name = dimension.get('name', None) if dimension_name is None: dimension_name = nmspc.get('_description', False) or orm_name column = dimension.get('column', 'analytic_id') ref_module = dimension.get('ref_module', '') ref_id = dimension.get('ref_id', None) if ref_id is None: ref_id = orm_name.replace('.', '_') + "_analytic_dimension_id" # To use an inherited, renamed parent field, you have to give its name. sync_parent = dimension.get('sync_parent', False) if sync_parent is True: sync_parent = nmspc.get('_parent_name', 'parent_id') rel_name = dimension.get('rel_name', tuple()) if rel_name is True: rel_name = u"Name" if isinstance(rel_name, str): rel_name = (rel_name, 'name') rel_code = dimension.get('rel_code', tuple()) if rel_code is True: rel_code = u"Code" if isinstance(rel_code, str): rel_code = (rel_code, 'code') rel_description = dimension.get('rel_description', tuple()) if rel_description is True: rel_description = u"Description" if isinstance(rel_description, str): rel_description = (rel_description, 'description') rel_active = dimension.get('rel_active', tuple()) if rel_active is True: rel_active = u"Active" if isinstance(rel_active, str): rel_active = (rel_active, 'active') rel_view_type = dimension.get('rel_view_type', tuple()) if rel_view_type is True: rel_view_type = u"View type" if isinstance(rel_view_type, str): rel_view_type = (rel_view_type, 'view_type') rel_disabled_per_company = dimension.get( 'rel_disabled_per_company', tuple() ) if rel_disabled_per_company is True: rel_disabled_per_company = u"Disabled in my company" if isinstance(rel_disabled_per_company, str): rel_disabled_per_company = ( rel_disabled_per_company, 'disabled_per_company' ) # By default, only use inherits if we can be sure there is no conflict # on the required fields 'name' and 'nd_id'. # There can still be conflicts on analytic_code's optional fields. use_inherits = dimension.get('use_inherits', None) if use_inherits is None: use_inherits = not ( any(field in nmspc for field in ('name', 'nd_id')) or nmspc.get('_inherits', False) or nmspc.get('_inherit', False) ) use_code_name_methods = dimension.get('use_code_name_methods', False) code_ref_ids = dimension.get('code_ref_ids', False) if code_ref_ids is True: code_ref_ids = ref_id code_ref_module = dimension.get('code_ref_module', '') if use_inherits: inherits = nmspc.get('_inherits', {}) inherits['analytic.code'] = column nmspc['_inherits'] = inherits if column not in nmspc: nmspc[column] = fields.Many2one('analytic.code', "Bound Analytic Code", auto_join=True, index=True, ondelete="cascade", required=True) # Default column for the underlying analytic code. else: if column not in nmspc: nmspc[column] = fields.Many2one('analytic.code', "Bound Analytic Code", required=True, ondelete='restrict') rel_cols = [ cols for cols in [ rel_name + ('name', 'Char', True, ''), rel_code + ('code', 'Char', False, ''), rel_description + ('description', 'Char', False, ''), rel_active + ('active', 'Boolean', False, True), rel_view_type + ('view_type', 'Boolean', False, False), ] if len(cols) == 6 ] if rel_cols: # NOT a method nor a class member. 'self' is the analytic_code OSV. def _record_from_code_id(self, cr, uid, ids, context=None): """Get the entries to update from the modified codes.""" osv = self.pool.get(orm_name) domain = [(column, 'in', ids)] return osv.search(cr, uid, domain, context=context) for string, model_col, code_col, dtype, req, default in rel_cols: nmspc[model_col] = getattr(fields, dtype)( string=string, related=".".join([column, code_col]), relation="analytic.code", required=req, ondelete='restrict', store=True ) if model_col not in defaults: defaults[model_col] = default # In order to preserve inheritance and possible overrides, work on a # new class that inherits the given bases, then make our model class # inherit from this class. superclass_name = '_{name}_SuperDimension'.format(name=name) # Set _register to False in order to prevent its instantiation. superclass = type(superclass_name, bases, {'_register': False}) # We must keep the old api here !!!!!!! # If we switch to the new, the method is call through a wrapper # then, 'self' is a !#@*ing (!) object of the same type of __cls__ # but totally temporary. # We don't want that cause we set _bound_dimension_id. # Keep the old api until we fix all this module. @AddMethod(superclass) def _get_bound_dimension_id(self): ref_module = self._dimension.get('ref_module', '') ref_id = self._dimension.get('ref_id', None) if ref_id is None: ref_id = self._name.replace('.', '_') + "_analytic_dimension_id" data_osv = self.env['ir.model.data'] nd_id = data_osv.sudo().get_object_reference(ref_module, ref_id)[1] return nd_id @AddMethod(superclass) def _setup_complete(self): """Load or create the analytic dimension bound to the model.""" super(superclass, self)._setup_complete() data_osv = self.env['ir.model.data'] try: self._bound_dimension_id = data_osv.sudo().get_object_reference(ref_module, ref_id)[1] except ValueError: vals = {'name': dimension_name, 'validated': True} #SARFRAZ# #self._bound_dimension_id = data_osv.sudo()._update('analytic.dimension', ref_module, vals, xml_id=ref_id, noupdate=True) if code_ref_ids: prefix = config.get_misc('analytic', 'code_ref_prefix', False) # This function is called as a method and can be overridden. @AddMethod(superclass) def _generate_code_ref_id(self, cr, uid, ids, context=None): data_osv = self.pool['ir.model.data'] records = self.browse(cr, uid, ids, context=None) if not isinstance(records, list): records = [records] for record in records: code = record[column] code_ref_id_builder = [prefix] if prefix else [] if 'company_id' in record and record.company_id: code_ref_id_builder.append(record.company_id.code) code_ref_id_builder.append('ANC') code_ref_id_builder.append(code_ref_ids) code_ref_id_builder.append(code.name) vals = { 'name': "_".join(code_ref_id_builder), 'module': code_ref_module, 'model': 'analytic.code', 'res_id': code.id, } data_osv.create(cr, uid, vals, context=context) @AddMethod(superclass) @api.model @api.returns(orm_name, lambda a: a.id) def create(self, vals, **kwargs): """Create the analytic code.""" code_vals = {} if sync_parent: cp = self._get_code_parent(vals) if cp is not None: code_vals['code_parent_id'] = cp # Direct changes to the 'bound analytic code' field are ignored unless the 'force_code_id' context key is passed as True. force_code_id = vals.pop(column, False) # Will be set if a new code is created new_code = False if self._context and self._context.get('force_code_id', False): self._force_code(force_code_id, code_vals) vals[column] = force_code_id else: if use_inherits: code_vals.update(vals) else: if self._name == 'sale.order' and not vals.get('name',False): if 'company_id' in vals: vals['name'] = self.env['ir.sequence'].with_context(force_company=vals['company_id']).next_by_code('sale.order') or _('New') else: vals['name'] = self.env['ir.sequence'].next_by_code('sale.order') or _('New') code_vals['name'] = vals.get('name') code_vals['code'] = vals.get('code') # OpenERP bug: related fields do not work properly on creation. for rel in rel_cols: model_col, code_col = rel[1:3] if model_col in vals: code_vals[code_col] = vals[model_col] elif model_col in self._defaults: code_vals[code_col] = self._defaults[model_col] # We have to create the code separately, even with inherits. code_osv = self.env['analytic.code'] code_vals['nd_id'] = self._get_bound_dimension_id() new_code = code_osv.create(code_vals) vals[column] = new_code.id res = super(superclass, self).create(vals, **kwargs) if code_ref_ids: self._generate_code_ref_id(res) if new_code: new_code.write({'origin_id': '{},{}'.format(self._name, res.id),}) return res @AddMethod(superclass) @api.one def write(self, vals, **kwargs): """Update the analytic code's name if it is not inherited, and its parent code if parent-child relations are synchronized. """ code_vals = {} if sync_parent: cp = self._get_code_parent(vals) if cp is not None: code_vals['code_parent_id'] = cp # Direct changes to the 'bound analytic code' field are ignored unless the 'force_code_id' context key is passed as True. force_code_id = vals.pop(column, False) if self._context and self._context.get('force_code_id', False): self._force_code(force_code_id, code_vals) vals[column] = force_code_id elif use_inherits: vals.update(code_vals) else: name_col = rel_name[1] if rel_name else 'name' if name_col in vals: code_vals['name'] = vals[name_col] code_col = rel_code[1] if rel_code else 'code' if code_col in vals: code_vals['code'] = vals[code_col] analytic_code = getattr(self, column) # If updating a single record with no code, create it. if not analytic_code: code_vals['nd_id'] = self._get_bound_dimension_id() if 'name' not in code_vals: code_vals['name'] = self[name_col] #self.read(cr, uid, new, [name_col], context=context)[name_col] if 'code' not in code_vals: code_vals['code'] = self[code_col] #self.read(cr, uid, new, [code_col], context=context)[code_col] code_vals['origin_id'] = '{},{}'.format(self._name, self.id) vals[column] = self.env['analytic.code'].create(code_vals).id elif code_vals: analytic_code.write(code_vals) res = super(superclass, self).write(vals, **kwargs) if code_ref_ids and analytic_code: self._generate_code_ref_id() return res @AddMethod(superclass) def unlink(self, **kwargs): """When removing this object, remove all associated analytic codes referenced by this object. Note: the method will fail if the code is referenced by any other object due to the RESTRICT constraint. That is the intended behavior. """ code_obj = self.env['analytic.code'] # Find all related codes code_ids = [record[column] for record in self] res = super(superclass, self).unlink() for code_id in code_ids: code_id.unlink(**kwargs) return res @AddMethod(superclass) def _force_code(self, cr, uid, force_code_id, code_vals, context=None): code_osv = self.pool['analytic.code'] if not force_code_id: raise ValueError("An analytic code ID MUST be specified if the force_code_id key is enabled in the context") force_code_dim = code_osv.read(cr, uid, force_code_id, ['nd_id'], context=context)['nd_id'][0] if force_code_dim != self._bound_dimension_id: raise ValueError("If specified, codes must belong to the bound analytic dimension {}".format(dimension_name)) if code_vals: code_osv.write(cr, uid, force_code_id, code_vals, context=context) if sync_parent: # This function is called as a method and can be overridden. @AddMethod(superclass) def _get_code_parent(self, cr, uid, vals, context=None): """If parent_id is in the submitted values, return the analytic code of this parent, to be used as the child's code's parent. """ parent_id = vals.get(sync_parent, None) if parent_id is not None: if parent_id: res = self.read(cr, uid, parent_id, [column], context=context)[column] return res[0] if res else False else: return False return None if use_code_name_methods: @AddMethod(superclass) def name_get(self, cr, uid, ids, context=None): """Return the analytic code's name.""" code_osv = self.pool.get('analytic.code') code_reads = self.read(cr, uid, ids, [column], context=context) c2m = { # Code IDs to model IDs code_read[column][0]: code_read['id'] for code_read in code_reads if code_read[column] is not False } names = code_osv.name_get(cr, uid, c2m.keys(), context=context) return [(c2m[cid], name) for cid, name in names if cid in c2m] @AddMethod(superclass) def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100): """Return the records whose analytic code matches the name.""" code_osv = self.pool.get('analytic.code') args.append(('nd_id', '=', self._bound_dimension_id)) names = code_osv.name_search(cr, uid, name, args, operator, context, limit) if not names: return [] dom = [(column, 'in', zip(*names)[0])] ids = self.search(cr, uid, dom, context=context) code_reads = self.read(cr, uid, ids, [column], context=context) c2m = { # Code IDs to model IDs code_read[column][0]: code_read['id'] for code_read in code_reads if code_read[column] is not False } return [ (c2m[cid], cname) for cid, cname in names if cid in c2m ] return (superclass,)
class AnalyticCode(models.Model): _name = 'analytic.code' _description = 'Analytic Code' _parent_name = 'code_parent_id' _parent_store = True _parent_order = 'name' _order = 'code' @api.depends('blacklist_ids') def _read_disabled_per_company(self): """Mark the code as disabled when it is in the blacklist (depending on the current user's company).""" company_id = self.env.user.company_id.id for anc in self: blacklist = (company.id for company in anc.blacklist_ids) anc.disabled_per_company = company_id in blacklist def _write_disabled_per_company(self): """Update the blacklist depending on the current user's company.""" company_id = self.env.user.company_id.id for anc in self: blacklist = (company.id for company in anc.blacklist_ids) to_write = None if anc.disabled_per_company and company_id not in blacklist: to_write = [(4, company_id)] # Link. elif not anc.disabled_per_company and company_id in blacklist: to_write = [(3, company_id)] # Unlink. if to_write: anc.write({'blacklist_ids': to_write}) return True def _search_disabled_per_company(self, operator, value): """Update the domain to take the blacklist into account (depending on the current user's company).""" company_id = self.env.user.company_id.id # We assume the criterion was "disabled_per_company = False". dom = [ '|', ('blacklist_ids', '=', False), ('blacklist_ids', '!=', company_id), ] if value is True: dom = ['!'] + dom return dom def _get_origin_id_selection(self): """Looks up the list of models that define dimensions""" registry = self.env.registry models = [ model for name, model in registry.items() if getattr(model, '_dimension', False) ] res = [(model._name, model._description or model._name) for model in models] return res name = fields.Char( "Name", size=128, translate=config.get_misc('analytic', 'translate', False), required=True, ) code = fields.Char("Code", size=16, translate=config.get_misc('analytic', 'translate', False)) nd_id = fields.Many2one( 'analytic.dimension', string="Dimension", ondelete='cascade', required=True, ) origin_id = fields.Reference( _get_origin_id_selection, string="Original Object", help="The object that created this code", ondelete='cascade', ) active = fields.Boolean( "Active", help=("Determines whether an analytic code is in the referential."), default=lambda *a: True) view_type = fields.Boolean( "View type", help= ("Determines whether an analytic code is not selectable (but still in the referential)." ), default=lambda *a: False) blacklist_ids = fields.Many2many( 'res.company', 'analytic_code_company_rel', 'code_id', 'company_id', "Blacklist", help=u"Companies the code is hidden in.", ) disabled_per_company = fields.Boolean( string="Disable in my company", compute=_read_disabled_per_company, inverse=_write_disabled_per_company, search=_search_disabled_per_company, help= ("Determines whether an analytic code is disabled for the current company." ), default=lambda *a: False) nd_name = fields.Char(related='nd_id.name', string="Dimension Name", store=False) description = fields.Char( "Description", size=512, translate=config.get_misc('analytic', 'translate', False), ) code_parent_id = fields.Many2one( 'analytic.code', "Parent Code", index=True, ondelete='restrict', ) child_ids = fields.One2many( 'analytic.code', 'code_parent_id', "Child Codes", ) parent_left = fields.Integer("Left parent", index=True) parent_right = fields.Integer("Right parent", index=True) parent_path = fields.Char(index=True) _constraints = [ # very useful base class constraint (models.Model._check_recursion, "Error ! You can not create recursive analytic codes.", ['parent_id' ]), ] @api.multi def name_get(self): res = [] for rec in self: name = (rec.code or ' ') + '-' + (rec.name or ' ') res += [(rec.id, name)] return res @api.model def name_search(self, name='', args=None, operator='ilike', limit=100): args = args or [] connector = '|' if operator in expression.NEGATIVE_TERM_OPERATORS: connector = '&' recs = self.search( [connector, ('code', operator, name), ('name', operator, name)] + args, limit=limit) return recs.name_get()