def order_selection(self, cr, uid, context=None):
     order_selection = getattr(self, '_order_selection', None)
     if order_selection is None:
         size = int(config.get_misc('analytic', 'analytic_size', 5))
         order_selection = []
         for n in xrange(1, size + 1):
             order_selection.append((str(n), _(u"Analysis {}".format(n))))
         setattr(self, '_order_selection', order_selection)
     return order_selection
 def order_selection(self, cr, uid, context=None):
     order_selection = getattr(self, "_order_selection", None)
     if order_selection is None:
         size = int(config.get_misc("analytic", "analytic_size", 5))
         order_selection = []
         for n in xrange(1, size + 1):
             order_selection.append((str(n), _(u"Analysis {}".format(n))))
         setattr(self, "_order_selection", order_selection)
     return order_selection
def add_module_dependencies(cr, module_list):
    """
    Select (new) dependencies from the modules in the list
    so that we can inject them into the graph at upgrade
    time. Used in the modified OpenUpgrade Server,
    not to be called from migration scripts

    Also take the OpenUpgrade configuration directives 'forced_deps'
    and 'autoinstall' into account. From any additional modules
    that these directives can add, the dependencies are added as
    well (but these directives are not checked for the occurrence
    of any of the dependencies).
    """
    if not module_list:
        return module_list

    forced_deps = safe_eval.safe_eval(
        config.get_misc(
            'openupgrade', 'forced_deps_' + release.version,
            config.get_misc('openupgrade', 'forced_deps', '{}')))

    autoinstall = safe_eval.safe_eval(
        config.get_misc(
            'openupgrade', 'autoinstall_' + release.version,
            config.get_misc('openupgrade', 'autoinstall', '{}')))

    for module in list(module_list):
        module_list += forced_deps.get(module, [])
        module_list += autoinstall.get(module, [])

    cr.execute("""
        SELECT ir_module_module_dependency.name
        FROM
            ir_module_module,
            ir_module_module_dependency
        WHERE
            module_id = ir_module_module.id
            AND ir_module_module.name in %s
        """, (tuple(module_list),))

    return list(set(module_list + [x[0] for x in cr.fetchall()]))
Example #4
0
 def _prepare_context(self, cr, uid, nctx, context=None):
     nctx.node_file_class = nodes.node_file
     # We can fill some more fields, but avoid any expensive function
     # that might be not worth preparing.
     nctx.extra_ctx['webdav_path'] = '/'+config.get_misc('webdav','vdir','webdav')
     usr_obj = self.pool.get('res.users')
     res = usr_obj.read(cr, uid, uid, ['login','lang'])
     if res:
         nctx.extra_ctx['username'] = res['login']
         nctx.extra_ctx['lang'] = res['lang']
     # TODO group
     return
Example #5
0
def add_module_dependencies(cr, module_list):
    """
    Select (new) dependencies from the modules in the list
    so that we can inject them into the graph at upgrade
    time. Used in the modified OpenUpgrade Server,
    not to be called from migration scripts

    Also take the OpenUpgrade configuration directives 'forced_deps'
    and 'autoinstall' into account. From any additional modules
    that these directives can add, the dependencies are added as
    well (but these directives are not checked for the occurrence
    of any of the dependencies).
    """
    if not module_list:
        return module_list

    forced_deps = safe_eval.safe_eval(
        config.get_misc('openupgrade', 'forced_deps_' + release.version,
                        config.get_misc('openupgrade', 'forced_deps', '{}')))

    autoinstall = safe_eval.safe_eval(
        config.get_misc('openupgrade', 'autoinstall_' + release.version,
                        config.get_misc('openupgrade', 'autoinstall', '{}')))

    for module in list(module_list):
        module_list += forced_deps.get(module, [])
        module_list += autoinstall.get(module, [])

    cr.execute(
        """
        SELECT ir_module_module_dependency.name
        FROM
            ir_module_module,
            ir_module_module_dependency
        WHERE
            module_id = ir_module_module.id
            AND ir_module_module.name in %s
        """, (tuple(module_list), ))

    return list(set(module_list + [x[0] for x in cr.fetchall()]))
Example #6
0
 def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False, timeout=None):
     if use_ssl:
         self.host = config.get_misc("httpsd", "interface", False)
         self.port = config.get_misc("httpsd", "port", 8071)
         if not self.host:
             self.host = config.get("xmlrpcs_interface")
             self.port = config.get("xmlrpcs_port")
     else:
         self.host = config.get_misc("httpd", "interface")
         self.port = config.get_misc("httpd", "port", 8069)
         if not self.host:
             self.host = config.get("xmlrpc_interface")
             self.port = config.get("xmlrpc_port") or self.port
     if self.host == "0.0.0.0" or not self.host:
         self.host = "127.0.0.1"
     self.port = int(self.port)
     if not config.get_misc("webdav", "enable", True):
         raise Exception("WebDAV is disabled, cannot continue")
     self.davpath = "/" + config.get_misc("webdav", "vdir", "webdav")
     self.user = user
     self.passwd = passwd
     self.dbg = dbg
     self.timeout = timeout or 5.0  # seconds, tests need to respond pretty fast!
     self.hdrs = {}
     if useragent:
         self.set_useragent(useragent)
Example #7
0
 def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False, timeout=None):
     if use_ssl:
         self.host = config.get_misc('httpsd', 'interface', False)
         self.port = config.get_misc('httpsd', 'port', 8071)
         if not self.host:
             self.host = config.get('xmlrpcs_interface')
             self.port = config.get('xmlrpcs_port')
     else:
         self.host = config.get_misc('httpd', 'interface')
         self.port = config.get_misc('httpd', 'port', 8069)
         if not self.host:
             self.host = config.get('xmlrpc_interface')
             self.port = config.get('xmlrpc_port') or self.port
     if self.host == '0.0.0.0' or not self.host:
         self.host = '127.0.0.1'
     self.port = int(self.port)
     if not config.get_misc('webdav','enable',True):
         raise Exception("WebDAV is disabled, cannot continue")
     self.davpath = '/' + config.get_misc('webdav','vdir','webdav')
     self.user = user
     self.passwd = passwd
     self.dbg = dbg
     self.timeout = timeout or 5.0 # seconds, tests need to respond pretty fast!
     self.hdrs = {}
     if useragent:
         self.set_useragent(useragent)
Example #8
0
 def __init__(self,
              user=None,
              passwd=None,
              dbg=0,
              use_ssl=False,
              useragent=False,
              timeout=None):
     if use_ssl:
         self.host = config.get_misc('httpsd', 'interface', False)
         self.port = config.get_misc('httpsd', 'port', 8071)
         if not self.host:
             self.host = config.get('xmlrpcs_interface')
             self.port = config.get('xmlrpcs_port')
     else:
         self.host = config.get_misc('httpd', 'interface')
         self.port = config.get_misc('httpd', 'port', 8069)
         if not self.host:
             self.host = config.get('xmlrpc_interface')
             self.port = config.get('xmlrpc_port') or self.port
     if self.host == '0.0.0.0' or not self.host:
         self.host = '127.0.0.1'
     self.port = int(self.port)
     if not config.get_misc('webdav', 'enable', True):
         raise Exception("WebDAV is disabled, cannot continue")
     self.davpath = '/' + config.get_misc('webdav', 'vdir', 'webdav')
     self.user = user
     self.passwd = passwd
     self.dbg = dbg
     self.timeout = timeout or 5.0  # seconds, tests need to respond pretty fast!
     self.hdrs = {}
     if useragent:
         self.set_useragent(useragent)
Example #9
0
    def __new__(cls, name, bases, nmspc):

        columns = nmspc['_columns']
        size = int(config.get_misc('analytic', 'analytic_size', 5))
        for n in xrange(1, size + 1):
            columns['ns{}_id'.format(n)] = fields.one2many(
                'analytic.structure',
                'nd_id',
                "Generated Subset of Structures",
                domain=[('ordering', '=', n)],
                auto_join=True,
            )
        return super(_dimension_meta, cls).__new__(cls, name, bases, nmspc)
    def __new__(cls, name, bases, nmspc):

        columns = nmspc['_columns']
        size = int(config.get_misc('analytic', 'analytic_size', 5))
        for n in xrange(1, size + 1):
            columns['ns{}_id'.format(n)] = fields.one2many(
                'analytic.structure',
                'nd_id',
                "Generated Subset of Structures",
                domain=[('ordering', '=', n)],
                auto_join=True,
            )
        return super(_dimension_meta, cls).__new__(cls, name, bases, nmspc)
Example #11
0
class analytic_dimension(osv.Model):

    __metaclass__ = _dimension_meta
    _name = 'analytic.dimension'
    _description = u"Analytic Dimension"

    _columns = {
        'name':
        fields.char(
            u"Name",
            size=128,
            translate=config.get_misc('analytic', 'translate', False),
            required=True,
        ),
        'nc_ids':
        fields.one2many('analytic.code', 'nd_id', u"Codes"),
        'ns_id':
        fields.one2many('analytic.structure', 'nd_id', u"Structures"),
    }

    _sql_constraints = [
        ('unique_name', 'unique(name)', u"Name must be unique"),
    ]
    def _setup_analytic_fields(cls, analytic, para, columns, defaults, orm_name, name, bases, nmspc):
        """Generate analytic and para-analytic fields on the model."""

        # If _analytic uses a shortcut, convert it into a prefix-model mapping.
        if analytic is True:
            analytic = {"a": orm_name.replace(".", "_")}
        elif isinstance(analytic, basestring):
            analytic = {"a": analytic}

        # Create a field that will be used for replacement in the view
        if analytic:
            columns["analytic_dimensions"] = fields.function(
                lambda self, cr, uid, ids, *a: {i: "" for i in ids},
                string=u"Analytic Dimensions",
                readonly=True,
                store=False,
            )

        col_pattern = "{pre}{n}_{suf}"
        size = int(config.get_misc("analytic", "analytic_size", 5))

        # Generate the fields directly into the _columns attribute.
        all_analytic = []

        for prefix, model_name in analytic.iteritems():
            # Analytic fields
            all_analytic.append((model_name, prefix, "id"))

            for n in xrange(1, size + 1):
                col_name = col_pattern.format(pre=prefix, n=n, suf="id")
                domain_field = "nd_id.ns{n}_id.model_name".format(n=n)
                columns[col_name] = fields.many2one(
                    "analytic.code",
                    "Generated Analytic Field",
                    domain=[
                        (domain_field, "=", model_name),
                        ("view_type", "=", False),
                        ("disabled_per_company", "=", False),
                    ],
                    track_visibility="onchange",
                )

        for key, value in para.iteritems():
            # Para-analytic fields
            prefix, suffix = key
            model_name = value["model"]
            all_analytic.append((model_name, prefix, suffix))
            if suffix == "id":
                raise ValueError("Para-analytic suffix cannot be 'id'")

            field_type = value["type"]
            args = value["args"]
            kwargs = value["kwargs"]
            for n in xrange(1, size + 1):
                col_name = col_pattern.format(pre=prefix, n=n, suf=suffix)
                columns[col_name] = field_type(*args, **kwargs)
                if "default" in value:
                    defaults[col_name] = value["default"]

        # In order to preserve inheritance, possible overrides, and OEMetaSL's
        # expected behavior, work on a new class that inherits the given bases,
        # then make our model class inherit from this class.
        superclass_name = "_{name}_SuperAnalytic".format(name=name)
        # Set _register to False in order to prevent its instantiation.
        superclass = type(superclass_name, bases, {"_register": False})

        @AddMethod(superclass)
        def fields_get(self, cr, uid, allfields=None, context=None, write_access=True):
            """Override this method to rename analytic fields."""

            res = super(superclass, self).fields_get(
                cr, uid, allfields=allfields, context=context, write_access=write_access
            )

            analytic_osv = self.pool.get("analytic.structure")

            for model_name, prefix, suffix in all_analytic:
                res = analytic_osv.analytic_fields_get(cr, uid, model_name, res, prefix, suffix, context=context)

            return res

        @AddMethod(superclass)
        def fields_view_get(self, cr, uid, view_id=None, view_type="form", context=None, toolbar=False, submenu=False):
            """Override this method to hide unused analytic fields."""

            res = super(superclass, self).fields_view_get(
                cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu
            )

            analytic_osv = self.pool.get("analytic.structure")

            for model_name, prefix, suffix in all_analytic:
                res = analytic_osv.analytic_fields_view_get(cr, uid, model_name, res, prefix, suffix, context=context)

            return res

        return (superclass,)
    def _setup_bound_dimension(cls, dimension, columns, defaults, orm_name, name, bases, nmspc):
        """Bind a dimension to the model, creating a code for each record."""

        if dimension is True:
            dimension = {}
        elif isinstance(dimension, basestring):
            dimension = {"name": dimension}

        dimension_name = dimension.get("name", None)
        if dimension_name is None:
            dimension_name = nmspc.get("_description", False) or orm_name

        column = dimension.get("column", "analytic_id")

        ref_module = dimension.get("ref_module", "")

        ref_id = dimension.get("ref_id", None)
        if ref_id is None:
            ref_id = orm_name.replace(".", "_") + "_analytic_dimension_id"

        # To use an inherited, renamed parent field, you have to give its name.
        sync_parent = dimension.get("sync_parent", False)
        if sync_parent is True:
            sync_parent = nmspc.get("_parent_name", "parent_id")

        rel_name = dimension.get("rel_name", tuple())
        if rel_name is True:
            rel_name = u"Name"
        if isinstance(rel_name, basestring):
            rel_name = (rel_name, "name")

        rel_description = dimension.get("rel_description", tuple())
        if rel_description is True:
            rel_description = u"Description"
        if isinstance(rel_description, basestring):
            rel_description = (rel_description, "description")

        rel_active = dimension.get("rel_active", tuple())
        if rel_active is True:
            rel_active = u"Active"
        if isinstance(rel_active, basestring):
            rel_active = (rel_active, "active")

        rel_view_type = dimension.get("rel_view_type", tuple())
        if rel_view_type is True:
            rel_view_type = u"View type"
        if isinstance(rel_view_type, basestring):
            rel_view_type = (rel_view_type, "view_type")

        rel_disabled_per_company = dimension.get("rel_disabled_per_company", tuple())
        if rel_disabled_per_company is True:
            rel_disabled_per_company = u"Disabled in my company"
        if isinstance(rel_disabled_per_company, basestring):
            rel_disabled_per_company = (rel_disabled_per_company, "disabled_per_company")

        # By default, only use inherits if we can be sure there is no conflict
        # on the required fields 'name' and 'nd_id'.
        # There can still be conflicts on analytic_code's optional fields.
        use_inherits = dimension.get("use_inherits", None)
        if use_inherits is None:
            use_inherits = not (
                any(col in columns for col in ("name", "nd_id"))
                or nmspc.get("_inherits", False)
                or nmspc.get("_inherit", False)
            )

        use_code_name_methods = dimension.get("use_code_name_methods", False)

        code_ref_ids = dimension.get("code_ref_ids", False)
        if code_ref_ids is True:
            code_ref_ids = ref_id

        code_ref_module = dimension.get("code_ref_module", "")

        if use_inherits:
            inherits = nmspc.get("_inherits", {})
            inherits["analytic.code"] = column
            nmspc["_inherits"] = inherits

        # Default column for the underlying analytic code.
        if column not in columns:
            columns[column] = fields.many2one(
                "analytic.code", u"Bound Analytic Code", required=True, ondelete="restrict"
            )

        rel_cols = [
            cols
            for cols in [
                rel_name + ("name", "char", True, ""),
                rel_description + ("description", "char", False, ""),
                rel_active + ("active", "boolean", False, True),
                rel_view_type + ("view_type", "boolean", False, False),
            ]
            if len(cols) == 6
        ]

        if rel_cols:
            # NOT a method nor a class member. 'self' is the analytic_code OSV.
            def _record_from_code_id(self, cr, uid, ids, context=None):
                """Get the entries to update from the modified codes."""
                osv = self.pool.get(orm_name)
                domain = [(column, "in", ids)]
                return osv.search(cr, uid, domain, context=context)

            for string, model_col, code_col, dtype, req, default in rel_cols:
                columns[model_col] = fields.related(
                    column,
                    code_col,
                    string=string,
                    type=dtype,
                    relation="analytic.code",
                    required=req,
                    store={"analytic.code": (_record_from_code_id, [code_col], 10)},
                )
                if model_col not in defaults:
                    defaults[model_col] = default

        # In order to preserve inheritance, possible overrides, and OEMetaSL's
        # expected behavior, work on a new class that inherits the given bases,
        # then make our model class inherit from this class.
        superclass_name = "_{name}_SuperDimension".format(name=name)
        # Set _register to False in order to prevent its instantiation.
        superclass = type(superclass_name, bases, {"_register": False})

        @AddMethod(superclass)
        def __init__(self, pool, cr):
            """Load or create the analytic dimension bound to the model."""

            super(superclass, self).__init__(pool, cr)

            data_osv = self.pool["ir.model.data"]
            try:
                self._bound_dimension_id = data_osv.get_object_reference(cr, SUPERUSER_ID, ref_module, ref_id)[1]
            except ValueError:
                vals = {"name": dimension_name, "validated": True}
                self._bound_dimension_id = data_osv._update(
                    cr, SUPERUSER_ID, "analytic.dimension", ref_module, vals, xml_id=ref_id, noupdate=True
                )

        if code_ref_ids:
            prefix = config.get_misc("analytic", "code_ref_prefix", False)

            # This function is called as a method and can be overridden.
            @AddMethod(superclass)
            def _generate_code_ref_id(self, cr, uid, ids, context=None):
                data_osv = self.pool["ir.model.data"]
                records = self.browse(cr, uid, ids, context=None)
                if not isinstance(records, list):
                    records = [records]

                for record in records:
                    code = record[column]
                    code_ref_id_builder = [prefix] if prefix else []
                    if "company_id" in record and record.company_id:
                        code_ref_id_builder.append(record.company_id.code)
                    code_ref_id_builder.append("ANC")
                    code_ref_id_builder.append(code_ref_ids)
                    code_ref_id_builder.append(code.name)

                    vals = {
                        "name": "_".join(code_ref_id_builder),
                        "module": code_ref_module,
                        "model": "analytic.code",
                        "res_id": code.id,
                    }
                    data_osv.create(cr, uid, vals, context=context)

        @AddMethod(superclass)
        def create(self, cr, uid, vals, context=None):
            """Create the analytic code."""

            code_vals = {}

            if sync_parent:
                cp = self._get_code_parent(cr, uid, vals, context=context)
                if cp is not None:
                    code_vals["code_parent_id"] = cp

            # Direct changes to the 'bound analytic code' field are ignored
            # unless the 'force_code_id' context key is passed as True.
            force_code_id = vals.pop(column, False)

            if context and context.get("force_code_id", False) == True:
                self._force_code(cr, uid, force_code_id, code_vals, context)
                vals[column] = force_code_id

            else:
                if use_inherits:
                    code_vals.update(vals)
                else:
                    code_vals["name"] = vals.get("name")

                # OpenERP bug: related fields do not work properly on creation.
                for rel in rel_cols:
                    model_col, code_col = rel[1:3]
                    if model_col in vals:
                        code_vals[code_col] = vals[model_col]
                    elif model_col in self._defaults:
                        code_vals[code_col] = self._defaults[model_col]

                # We have to create the code separately, even with inherits.
                code_osv = self.pool["analytic.code"]
                code_vals["nd_id"] = self._bound_dimension_id
                code_id = code_osv.create(cr, uid, code_vals, context=context)
                vals[column] = code_id

            res = super(superclass, self).create(cr, uid, vals, context=context)

            if code_ref_ids:
                self._generate_code_ref_id(cr, uid, res, context=context)

            return res

        @AddMethod(superclass)
        def write(self, cr, uid, ids, vals, context=None):
            """Update the analytic code's name if it is not inherited,
            and its parent code if parent-child relations are synchronized.
            """

            code_vals = {}
            new = False

            if not isinstance(ids, (list, tuple)):
                ids = [ids]

            if sync_parent:
                cp = self._get_code_parent(cr, uid, vals, context=context)
                if cp is not None:
                    code_vals["code_parent_id"] = cp

            # Direct changes to the 'bound analytic code' field are ignored
            # unless the 'force_code_id' context key is passed as True.
            force_code_id = vals.pop(column, False)

            if context and context.get("force_code_id", False) == True:
                self._force_code(cr, uid, force_code_id, code_vals, context)
                vals[column] = force_code_id

            elif use_inherits:
                vals.update(code_vals)

            else:
                name_col = rel_name[1] if rel_name else "name"
                if name_col in vals:
                    code_vals["name"] = vals[name_col]
                records = self.browse(cr, uid, ids, context=context)
                code_ids = [getattr(rec, column).id for rec in records]

                # If updating a single record with no code, create it.
                code_osv = self.pool["analytic.code"]
                if code_ids == [False]:
                    new = ids[0]
                    code_vals["nd_id"] = self._bound_dimension_id
                    if "name" not in code_vals:
                        code_vals["name"] = self.read(cr, uid, new, [name_col], context=context)[name_col]
                    vals[column] = code_osv.create(cr, uid, code_vals, context=context)
                elif code_vals:
                    code_osv.write(cr, uid, code_ids, code_vals, context=context)

            res = super(superclass, self).write(cr, uid, ids, vals, context=context)

            if code_ref_ids and new is not False:
                self._generate_code_ref_id(cr, uid, new, context=context)

            return res

        @AddMethod(superclass)
        def _force_code(self, cr, uid, force_code_id, code_vals, context=None):

            code_osv = self.pool["analytic.code"]

            if not force_code_id:
                raise ValueError(
                    "An analytic code ID MUST be specified if the " "force_code_id key is enabled in the context"
                )
            force_code_dim = code_osv.read(cr, uid, force_code_id, ["nd_id"], context=context)["nd_id"][0]
            if force_code_dim != self._bound_dimension_id:
                raise ValueError(
                    "If specified, codes must belong to the bound " "analytic dimension {}".format(dimension_name)
                )
            if code_vals:
                code_osv.write(cr, uid, force_code_id, code_vals, context=context)

        if sync_parent:
            # This function is called as a method and can be overridden.
            @AddMethod(superclass)
            def _get_code_parent(self, cr, uid, vals, context=None):
                """If parent_id is in the submitted values, return the analytic
                code of this parent, to be used as the child's code's parent.
                """
                parent_id = vals.get(sync_parent, None)
                if parent_id is not None:
                    if parent_id:
                        res = self.read(cr, uid, parent_id, [column], context=context)[column]
                        return res[0] if res else False
                    else:
                        return False
                return None

        if use_code_name_methods:

            @AddMethod(superclass)
            def name_get(self, cr, uid, ids, context=None):
                """Return the analytic code's name."""

                code_osv = self.pool.get("analytic.code")
                code_reads = self.read(cr, uid, ids, [column], context=context)
                c2m = {  # Code IDs to model IDs
                    code_read[column][0]: code_read["id"] for code_read in code_reads if code_read[column] is not False
                }
                names = code_osv.name_get(cr, uid, c2m.keys(), context=context)
                return [(c2m[cid], name) for cid, name in names if cid in c2m]

            @AddMethod(superclass)
            def name_search(self, cr, uid, name, args=None, operator="ilike", context=None, limit=100):
                """Return the records whose analytic code matches the name."""

                code_osv = self.pool.get("analytic.code")
                args.append(("nd_id", "=", self._bound_dimension_id))
                names = code_osv.name_search(cr, uid, name, args, operator, context, limit)
                if not names:
                    return []
                dom = [(column, "in", zip(*names)[0])]
                ids = self.search(cr, uid, dom, context=context)
                code_reads = self.read(cr, uid, ids, [column], context=context)
                c2m = {  # Code IDs to model IDs
                    code_read[column][0]: code_read["id"] for code_read in code_reads if code_read[column] is not False
                }
                return [(c2m[cid], cname) for cid, cname in names if cid in c2m]

        return (superclass,)
Example #14
0
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################


# Before loading the module, if the analytic_size option is given, check it.
# Its value must be an integer greater or equal to the default value.
from openerp.tools import config
errors = ["[analytic]"]
try:
    assert int(config.get_misc('analytic', 'analytic_size', 5)) >= 5
except (ValueError, AssertionError):
    errors.append("analytic_size must be an integer greater/equal to 5.")
try:
    assert config.get_misc('analytic', 'translate', False) in [True, False]
except (AssertionError):
    errors.append("translate must be a boolean value.")
if len(errors) > 1:
    config.parser.error("\n * ".join(errors))


import MetaAnalytic
import analytic_code
import analytic_dimension
import analytic_structure
Example #15
0
    def _setup_analytic_fields(cls, analytic, para, columns, defaults,
                               orm_name, name, bases, nmspc):
        """Generate analytic and para-analytic fields on the model."""

        # If _analytic uses a shortcut, convert it into a prefix-model mapping.
        if analytic is True:
            analytic = {'a': orm_name.replace('.', '_')}
        elif isinstance(analytic, basestring):
            analytic = {'a': analytic}

        # Create a field that will be used for replacement in the view
        if analytic:
            columns['analytic_dimensions'] = fields.function(
                lambda self, cr, uid, ids, *a: {i: ''
                                                for i in ids},
                string=u"Analytic Dimensions",
                readonly=True,
                store=False,
            )

        col_pattern = '{pre}{n}_{suf}'
        size = int(config.get_misc('analytic', 'analytic_size', 5))

        # Generate the fields directly into the _columns attribute.
        all_analytic = []

        for prefix, model_name in analytic.iteritems():
            # Analytic fields
            all_analytic.append((model_name, prefix, 'id'))

            for n in xrange(1, size + 1):
                col_name = col_pattern.format(pre=prefix, n=n, suf='id')
                domain_field = 'nd_id.ns{n}_id.model_name'.format(n=n)
                columns[col_name] = fields.many2one(
                    'analytic.code',
                    "Generated Analytic Field",
                    domain=[
                        (domain_field, '=', model_name),
                        ('view_type', '=', False),
                        ('disabled_per_company', '=', False),
                    ],
                    track_visibility='onchange',
                )

        for key, value in para.iteritems():
            # Para-analytic fields
            prefix, suffix = key
            model_name = value['model']
            all_analytic.append((model_name, prefix, suffix))
            if suffix == 'id':
                raise ValueError("Para-analytic suffix cannot be 'id'")

            field_type = value['type']
            args = value['args']
            kwargs = value['kwargs']
            for n in xrange(1, size + 1):
                col_name = col_pattern.format(pre=prefix, n=n, suf=suffix)
                columns[col_name] = field_type(*args, **kwargs)
                if 'default' in value:
                    defaults[col_name] = value['default']

        # In order to preserve inheritance, possible overrides, and OEMetaSL's
        # expected behavior, work on a new class that inherits the given bases,
        # then make our model class inherit from this class.
        superclass_name = '_{name}_SuperAnalytic'.format(name=name)
        # Set _register to False in order to prevent its instantiation.
        superclass = type(superclass_name, bases, {'_register': False})

        @AddMethod(superclass)
        def fields_get(self,
                       cr,
                       uid,
                       allfields=None,
                       context=None,
                       write_access=True):
            """Override this method to rename analytic fields."""

            res = super(superclass, self).fields_get(cr,
                                                     uid,
                                                     allfields=allfields,
                                                     context=context,
                                                     write_access=write_access)

            analytic_osv = self.pool.get('analytic.structure')

            for model_name, prefix, suffix in all_analytic:
                res = analytic_osv.analytic_fields_get(cr,
                                                       uid,
                                                       model_name,
                                                       res,
                                                       prefix,
                                                       suffix,
                                                       context=context)

            return res

        @AddMethod(superclass)
        def fields_view_get(self,
                            cr,
                            uid,
                            view_id=None,
                            view_type='form',
                            context=None,
                            toolbar=False,
                            submenu=False):
            """Override this method to hide unused analytic fields."""

            res = super(superclass, self).fields_view_get(cr,
                                                          uid,
                                                          view_id=view_id,
                                                          view_type=view_type,
                                                          context=context,
                                                          toolbar=toolbar,
                                                          submenu=submenu)

            analytic_osv = self.pool.get('analytic.structure')

            for model_name, prefix, suffix in all_analytic:
                res = analytic_osv.analytic_fields_view_get(cr,
                                                            uid,
                                                            model_name,
                                                            res,
                                                            prefix,
                                                            suffix,
                                                            context=context)

            return res

        return (superclass, )
Example #16
0
    def _setup_bound_dimension(cls, dimension, columns, defaults, orm_name,
                               name, bases, nmspc):
        """Bind a dimension to the model, creating a code for each record."""

        if dimension is True:
            dimension = {}
        elif isinstance(dimension, basestring):
            dimension = {'name': dimension}

        dimension_name = dimension.get('name', None)
        if dimension_name is None:
            dimension_name = nmspc.get('_description', False) or orm_name

        column = dimension.get('column', 'analytic_id')

        ref_module = dimension.get('ref_module', '')

        ref_id = dimension.get('ref_id', None)
        if ref_id is None:
            ref_id = orm_name.replace('.', '_') + "_analytic_dimension_id"

        # To use an inherited, renamed parent field, you have to give its name.
        sync_parent = dimension.get('sync_parent', False)
        if sync_parent is True:
            sync_parent = nmspc.get('_parent_name', 'parent_id')

        rel_name = dimension.get('rel_name', tuple())
        if rel_name is True:
            rel_name = u"Name"
        if isinstance(rel_name, basestring):
            rel_name = (rel_name, 'name')

        rel_description = dimension.get('rel_description', tuple())
        if rel_description is True:
            rel_description = u"Description"
        if isinstance(rel_description, basestring):
            rel_description = (rel_description, 'description')

        rel_active = dimension.get('rel_active', tuple())
        if rel_active is True:
            rel_active = u"Active"
        if isinstance(rel_active, basestring):
            rel_active = (rel_active, 'active')

        rel_view_type = dimension.get('rel_view_type', tuple())
        if rel_view_type is True:
            rel_view_type = u"View type"
        if isinstance(rel_view_type, basestring):
            rel_view_type = (rel_view_type, 'view_type')

        rel_disabled_per_company = dimension.get('rel_disabled_per_company',
                                                 tuple())
        if rel_disabled_per_company is True:
            rel_disabled_per_company = u"Disabled in my company"
        if isinstance(rel_disabled_per_company, basestring):
            rel_disabled_per_company = (rel_disabled_per_company,
                                        'disabled_per_company')

        # By default, only use inherits if we can be sure there is no conflict
        # on the required fields 'name' and 'nd_id'.
        # There can still be conflicts on analytic_code's optional fields.
        use_inherits = dimension.get('use_inherits', None)
        if use_inherits is None:
            use_inherits = not (any(col in columns
                                    for col in ('name', 'nd_id'))
                                or nmspc.get('_inherits', False)
                                or nmspc.get('_inherit', False))

        use_code_name_methods = dimension.get('use_code_name_methods', False)

        code_ref_ids = dimension.get('code_ref_ids', False)
        if code_ref_ids is True:
            code_ref_ids = ref_id

        code_ref_module = dimension.get('code_ref_module', '')

        if use_inherits:
            inherits = nmspc.get('_inherits', {})
            inherits['analytic.code'] = column
            nmspc['_inherits'] = inherits

        # Default column for the underlying analytic code.
        if column not in columns:
            columns[column] = fields.many2one('analytic.code',
                                              u"Bound Analytic Code",
                                              required=True,
                                              ondelete='restrict')

        rel_cols = [
            cols for cols in [
                rel_name + ('name', 'char', True, ''),
                rel_description + ('description', 'char', False, ''),
                rel_active + ('active', 'boolean', False, True),
                rel_view_type + ('view_type', 'boolean', False, False),
            ] if len(cols) == 6
        ]

        if rel_cols:
            # NOT a method nor a class member. 'self' is the analytic_code OSV.
            def _record_from_code_id(self, cr, uid, ids, context=None):
                """Get the entries to update from the modified codes."""
                osv = self.pool.get(orm_name)
                domain = [(column, 'in', ids)]
                return osv.search(cr, uid, domain, context=context)

            for string, model_col, code_col, dtype, req, default in rel_cols:
                columns[model_col] = fields.related(column,
                                                    code_col,
                                                    string=string,
                                                    type=dtype,
                                                    relation="analytic.code",
                                                    required=req,
                                                    store={
                                                        'analytic.code':
                                                        (_record_from_code_id,
                                                         [code_col], 10)
                                                    })
                if model_col not in defaults:
                    defaults[model_col] = default

        # In order to preserve inheritance, possible overrides, and OEMetaSL's
        # expected behavior, work on a new class that inherits the given bases,
        # then make our model class inherit from this class.
        superclass_name = '_{name}_SuperDimension'.format(name=name)
        # Set _register to False in order to prevent its instantiation.
        superclass = type(superclass_name, bases, {'_register': False})

        @AddMethod(superclass)
        def __init__(self, pool, cr):
            """Load or create the analytic dimension bound to the model."""

            super(superclass, self).__init__(pool, cr)

            data_osv = self.pool['ir.model.data']
            try:
                self._bound_dimension_id = data_osv.get_object_reference(
                    cr, SUPERUSER_ID, ref_module, ref_id)[1]
            except ValueError:
                vals = {'name': dimension_name, 'validated': True}
                self._bound_dimension_id = data_osv._update(
                    cr,
                    SUPERUSER_ID,
                    'analytic.dimension',
                    ref_module,
                    vals,
                    xml_id=ref_id,
                    noupdate=True)

        if code_ref_ids:
            prefix = config.get_misc('analytic', 'code_ref_prefix', False)

            # This function is called as a method and can be overridden.
            @AddMethod(superclass)
            def _generate_code_ref_id(self, cr, uid, ids, context=None):
                data_osv = self.pool['ir.model.data']
                records = self.browse(cr, uid, ids, context=None)
                if not isinstance(records, list):
                    records = [records]

                for record in records:
                    code = record[column]
                    code_ref_id_builder = [prefix] if prefix else []
                    if 'company_id' in record and record.company_id:
                        code_ref_id_builder.append(record.company_id.code)
                    code_ref_id_builder.append('ANC')
                    code_ref_id_builder.append(code_ref_ids)
                    code_ref_id_builder.append(code.name)

                    vals = {
                        'name': "_".join(code_ref_id_builder),
                        'module': code_ref_module,
                        'model': 'analytic.code',
                        'res_id': code.id,
                    }
                    data_osv.create(cr, uid, vals, context=context)

        @AddMethod(superclass)
        def create(self, cr, uid, vals, context=None):
            """Create the analytic code."""

            code_vals = {}

            if sync_parent:
                cp = self._get_code_parent(cr, uid, vals, context=context)
                if cp is not None:
                    code_vals['code_parent_id'] = cp

            # Direct changes to the 'bound analytic code' field are ignored
            # unless the 'force_code_id' context key is passed as True.
            force_code_id = vals.pop(column, False)

            if context and context.get('force_code_id', False) == True:
                self._force_code(cr, uid, force_code_id, code_vals, context)
                vals[column] = force_code_id

            else:
                if use_inherits:
                    code_vals.update(vals)
                else:
                    code_vals['name'] = vals.get('name')

                # OpenERP bug: related fields do not work properly on creation.
                for rel in rel_cols:
                    model_col, code_col = rel[1:3]
                    if model_col in vals:
                        code_vals[code_col] = vals[model_col]
                    elif model_col in self._defaults:
                        code_vals[code_col] = self._defaults[model_col]

                # We have to create the code separately, even with inherits.
                code_osv = self.pool['analytic.code']
                code_vals['nd_id'] = self._bound_dimension_id
                code_id = code_osv.create(cr, uid, code_vals, context=context)
                vals[column] = code_id

            res = super(superclass, self).create(cr,
                                                 uid,
                                                 vals,
                                                 context=context)

            if code_ref_ids:
                self._generate_code_ref_id(cr, uid, res, context=context)

            return res

        @AddMethod(superclass)
        def write(self, cr, uid, ids, vals, context=None):
            """Update the analytic code's name if it is not inherited,
            and its parent code if parent-child relations are synchronized.
            """

            code_vals = {}
            new = False

            if not isinstance(ids, (list, tuple)):
                ids = [ids]

            if sync_parent:
                cp = self._get_code_parent(cr, uid, vals, context=context)
                if cp is not None:
                    code_vals['code_parent_id'] = cp

            # Direct changes to the 'bound analytic code' field are ignored
            # unless the 'force_code_id' context key is passed as True.
            force_code_id = vals.pop(column, False)

            if context and context.get('force_code_id', False) == True:
                self._force_code(cr, uid, force_code_id, code_vals, context)
                vals[column] = force_code_id

            elif use_inherits:
                vals.update(code_vals)

            else:
                name_col = rel_name[1] if rel_name else 'name'
                if name_col in vals:
                    code_vals['name'] = vals[name_col]
                records = self.browse(cr, uid, ids, context=context)
                code_ids = [getattr(rec, column).id for rec in records]

                # If updating a single record with no code, create it.
                code_osv = self.pool['analytic.code']
                if code_ids == [False]:
                    new = ids[0]
                    code_vals['nd_id'] = self._bound_dimension_id
                    if 'name' not in code_vals:
                        code_vals['name'] = self.read(
                            cr, uid, new, [name_col],
                            context=context)[name_col]
                    vals[column] = code_osv.create(cr,
                                                   uid,
                                                   code_vals,
                                                   context=context)
                elif code_vals:
                    code_osv.write(cr,
                                   uid,
                                   code_ids,
                                   code_vals,
                                   context=context)

            res = super(superclass, self).write(cr,
                                                uid,
                                                ids,
                                                vals,
                                                context=context)

            if code_ref_ids and new is not False:
                self._generate_code_ref_id(cr, uid, new, context=context)

            return res

        @AddMethod(superclass)
        def _force_code(self, cr, uid, force_code_id, code_vals, context=None):

            code_osv = self.pool['analytic.code']

            if not force_code_id:
                raise ValueError(
                    "An analytic code ID MUST be specified if the " \
                    "force_code_id key is enabled in the context"
                )
            force_code_dim = code_osv.read(cr,
                                           uid,
                                           force_code_id, ['nd_id'],
                                           context=context)['nd_id'][0]
            if force_code_dim != self._bound_dimension_id:
                raise ValueError(
                    "If specified, codes must belong to the bound " \
                    "analytic dimension {}".format(dimension_name)
                )
            if code_vals:
                code_osv.write(cr,
                               uid,
                               force_code_id,
                               code_vals,
                               context=context)

        if sync_parent:
            # This function is called as a method and can be overridden.
            @AddMethod(superclass)
            def _get_code_parent(self, cr, uid, vals, context=None):
                """If parent_id is in the submitted values, return the analytic
                code of this parent, to be used as the child's code's parent.
                """
                parent_id = vals.get(sync_parent, None)
                if parent_id is not None:
                    if parent_id:
                        res = self.read(cr,
                                        uid,
                                        parent_id, [column],
                                        context=context)[column]
                        return res[0] if res else False
                    else:
                        return False
                return None

        if use_code_name_methods:

            @AddMethod(superclass)
            def name_get(self, cr, uid, ids, context=None):
                """Return the analytic code's name."""

                code_osv = self.pool.get('analytic.code')
                code_reads = self.read(cr, uid, ids, [column], context=context)
                c2m = {  # Code IDs to model IDs
                    code_read[column][0]: code_read['id']
                    for code_read in code_reads
                    if code_read[column] is not False
                }
                names = code_osv.name_get(cr, uid, c2m.keys(), context=context)
                return [(c2m[cid], name) for cid, name in names if cid in c2m]

            @AddMethod(superclass)
            def name_search(self,
                            cr,
                            uid,
                            name,
                            args=None,
                            operator='ilike',
                            context=None,
                            limit=100):
                """Return the records whose analytic code matches the name."""

                code_osv = self.pool.get('analytic.code')
                args.append(('nd_id', '=', self._bound_dimension_id))
                names = code_osv.name_search(cr, uid, name, args, operator,
                                             context, limit)
                if not names:
                    return []
                dom = [(column, 'in', zip(*names)[0])]
                ids = self.search(cr, uid, dom, context=context)
                code_reads = self.read(cr, uid, ids, [column], context=context)
                c2m = {  # Code IDs to model IDs
                    code_read[column][0]: code_read['id']
                    for code_read in code_reads
                    if code_read[column] is not False
                }
                return [(c2m[cid], cname) for cid, cname in names
                        if cid in c2m]

        return (superclass, )
Example #17
0
def add_module_dependencies(cr, module_list):
    """
    Select (new) dependencies from the modules in the list
    so that we can inject them into the graph at upgrade
    time. Used in the modified OpenUpgrade Server,
    not to be called from migration scripts

    Also take the OpenUpgrade configuration directives 'forced_deps'
    and 'autoinstall' into account. From any additional modules
    that these directives can add, the dependencies are added as
    well (but these directives are not checked for the occurrence
    of any of the dependencies).
    """
    if not module_list:
        return module_list

    forced_deps = safe_eval.safe_eval(
        config.get_misc(
            'openupgrade', 'forced_deps_' + release.version,
            config.get_misc('openupgrade', 'forced_deps', '{}')))

    autoinstall = safe_eval.safe_eval(
        config.get_misc(
            'openupgrade', 'autoinstall_' + release.version,
            config.get_misc('openupgrade', 'autoinstall', '{}')))

    for module in list(module_list):
        module_list += forced_deps.get(module, [])
        module_list += autoinstall.get(module, [])

    module_list = list(set(module_list))

    dependencies = module_list
    while dependencies:
        cr.execute("""
            SELECT DISTINCT dep.name
            FROM
                ir_module_module,
                ir_module_module_dependency dep
            WHERE
                module_id = ir_module_module.id
                AND ir_module_module.name in %s
                AND dep.name not in %s
            """, (tuple(dependencies), tuple(module_list),))

        dependencies = [x[0] for x in cr.fetchall()]
        module_list += dependencies

    # Select auto_install modules of which all dependencies
    # are fulfilled based on the modules we know are to be
    # installed
    cr.execute("""
        SELECT name from ir_module_module WHERE state IN %s
        """, (('installed', 'to install', 'to upgrade'),))
    modules = list(set(module_list + [row[0] for row in cr.fetchall()]))
    cr.execute("""
        SELECT name from ir_module_module m
        WHERE auto_install IS TRUE
            AND state = 'uninstalled'
            AND NOT EXISTS(
                SELECT id FROM ir_module_module_dependency d
                WHERE d.module_id = m.id
                AND name NOT IN %s)
         """, (tuple(modules),))
    auto_modules = [row[0] for row in cr.fetchall()]
    if auto_modules:
        logger.info(
            "Selecting autoinstallable modules %s", ','.join(auto_modules))
        module_list += auto_modules

    return module_list
Example #18
0
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################

# Before loading the module, if the analytic_size option is given, check it.
# Its value must be an integer greater or equal to the default value.
from openerp.tools import config

errors = ["[analytic]"]
try:
    assert int(config.get_misc('analytic', 'analytic_size', 5)) >= 5
except (ValueError, AssertionError):
    errors.append("analytic_size must be an integer greater/equal to 5.")
try:
    assert config.get_misc('analytic', 'translate', False) in [True, False]
except (AssertionError):
    errors.append("translate must be a boolean value.")
if len(errors) > 1:
    config.parser.error("\n * ".join(errors))

import MetaAnalytic
import analytic_code
import analytic_dimension
import analytic_structure
def add_module_dependencies(cr, module_list):
    """
    Select (new) dependencies from the modules in the list
    so that we can inject them into the graph at upgrade
    time. Used in the modified OpenUpgrade Server,
    not to be called from migration scripts

    Also take the OpenUpgrade configuration directives 'forced_deps'
    and 'autoinstall' into account. From any additional modules
    that these directives can add, the dependencies are added as
    well (but these directives are not checked for the occurrence
    of any of the dependencies).
    """
    if not module_list:
        return module_list

    modules_in = list(module_list)
    forced_deps = safe_eval(
        config.get_misc('openupgrade', 'forced_deps_' + release.version,
                        config.get_misc('openupgrade', 'forced_deps', '{}')))

    autoinstall = safe_eval(
        config.get_misc('openupgrade', 'autoinstall_' + release.version,
                        config.get_misc('openupgrade', 'autoinstall', '{}')))

    for module in list(module_list):
        module_list += forced_deps.get(module, [])
        module_list += autoinstall.get(module, [])

    module_list = list(set(module_list))

    dependencies = module_list
    while dependencies:
        cr.execute(
            """
            SELECT DISTINCT dep.name
            FROM
                ir_module_module,
                ir_module_module_dependency dep
            WHERE
                module_id = ir_module_module.id
                AND ir_module_module.name in %s
                AND dep.name not in %s
            """, (
                tuple(dependencies),
                tuple(module_list),
            ))

        dependencies = [x[0] for x in cr.fetchall()]
        module_list += dependencies

    # Select auto_install modules of which all dependencies
    # are fulfilled based on the modules we know are to be
    # installed
    cr.execute(
        """
        SELECT name from ir_module_module WHERE state IN %s
        """, (('installed', 'to install', 'to upgrade'), ))
    modules = list(set(module_list + [row[0] for row in cr.fetchall()]))
    cr.execute(
        """
        SELECT name from ir_module_module m
        WHERE auto_install IS TRUE
            AND state = 'uninstalled'
            AND NOT EXISTS(
                SELECT id FROM ir_module_module_dependency d
                WHERE d.module_id = m.id
                AND name NOT IN %s)
         """, (tuple(modules), ))
    auto_modules = [row[0] for row in cr.fetchall() if get_module_path(row[0])]
    if auto_modules:
        logger.info("Selecting autoinstallable modules %s",
                    ','.join(auto_modules))
        module_list += auto_modules

    # Set proper state for new dependencies so that any init scripts are run
    cr.execute(
        """
        UPDATE ir_module_module SET state = 'to install'
        WHERE name IN %s AND name NOT IN %s AND state = 'uninstalled'
        """, (tuple(module_list), tuple(modules_in)))
    return module_list
Example #20
0
class analytic_code(osv.Model):
    _name = 'analytic.code'
    _description = u"Analytic Code"

    _parent_name = 'code_parent_id'
    _parent_store = True
    _parent_order = 'name'
    _order = 'parent_left'

    def _read_disabled_per_company(self, cr, uid, ids, field_name, arg,
                                   context):
        """Mark the code as disabled when it is in the blacklist (depending on
        the current user's company).
        """

        anc_obj = self.pool['analytic.code']
        user_obj = self.pool['res.users']

        company_id = user_obj.read(cr,
                                   uid, [uid], ['company_id'],
                                   context=context)[0]['company_id'][0]

        ret = {}

        for anc in anc_obj.browse(cr, uid, ids, context=context):
            blacklist = (company.id for company in anc.blacklist_ids)
            ret[anc.id] = company_id in blacklist

        return ret

    def _write_disabled_per_company(self, cr, uid, anc_id, field_name,
                                    field_value, arg, context):
        """Update the blacklist depending on the current user's company.
        """

        anc_obj = self.pool['analytic.code']
        user_obj = self.pool['res.users']

        company_id = user_obj.read(cr,
                                   uid, [uid], ['company_id'],
                                   context=context)[0]['company_id'][0]

        anc = anc_obj.browse(cr, uid, anc_id, context=context)
        blacklist = (company.id for company in anc.blacklist_ids)

        to_write = None
        if field_value and company_id not in blacklist:
            to_write = [(4, company_id)]  # Link.
        elif not field_value and company_id in blacklist:
            to_write = [(3, company_id)]  # Unlink.

        if to_write:
            anc_obj.write(cr,
                          uid, [anc_id], {'blacklist_ids': to_write},
                          context=context)

        return True

    def _search_disabled_per_company(self, cr, uid, model_again, field_name,
                                     criterion, context):
        """Update the domain to take the blacklist into account (depending on
        the current user's company).
        """

        user_obj = self.pool['res.users']

        company_id = user_obj.read(cr,
                                   uid, [uid], ['company_id'],
                                   context=context)[0]['company_id'][0]

        # We assume the criterion was "disabled_per_company = False".
        dom = [
            '|',
            ('blacklist_ids', '=', False),
            ('blacklist_ids', '!=', company_id),  # Not blacklists_ids.id!
        ]
        if criterion[0][2] is True:
            dom = ['!'] + dom
        return dom

    _columns = {
        'name':
        fields.char(
            u"Name",
            size=128,
            translate=config.get_misc('analytic', 'translate', False),
            required=True,
        ),
        'nd_id':
        fields.many2one(
            'analytic.dimension',
            string=u"Dimension",
            ondelete='cascade',
            required=True,
        ),
        'active':
        fields.boolean(
            u"Active",
            help=(
                u"Determines whether an analytic code is in the referential."),
        ),
        'view_type':
        fields.boolean(
            u"View type",
            help=(
                u"Determines whether an analytic code is not selectable (but "
                u"still in the referential)."),
        ),
        'blacklist_ids':
        fields.many2many(
            'res.company',
            'analytic_code_company_rel',
            'code_id',
            'company_id',
            u"Blacklist",
            help=u"Companies the code is hidden in.",
        ),
        'disabled_per_company':
        fields.function(
            _read_disabled_per_company,
            fnct_inv=_write_disabled_per_company,
            fnct_search=_search_disabled_per_company,
            method=True,
            type='boolean',
            store=False,  # Not persistent as it depends on the company.
            string=u"Disabled in my company",
            help=(u"Determines whether an analytic code is disabled for the "
                  u"current company."),
        ),
        'nd_name':
        fields.related('nd_id',
                       'name',
                       type='char',
                       string=u"Dimension Name",
                       store=False),
        'description':
        fields.char(
            u"Description",
            size=512,
            translate=config.get_misc('analytic', 'translate', False),
        ),
        'code_parent_id':
        fields.many2one(
            'analytic.code',
            u"Parent Code",
            select=True,
            ondelete='restrict',
        ),
        'child_ids':
        fields.one2many(
            'analytic.code',
            'code_parent_id',
            u"Child Codes",
        ),
        'parent_left':
        fields.integer(u"Left parent", select=True),
        'parent_right':
        fields.integer(u"Right parent", select=True),
    }

    _defaults = {
        'active': lambda *a: True,
        'view_type': lambda *a: False,
        'disabled_per_company': lambda *a: False,
    }

    _constraints = [
        # very useful base class constraint
        (osv.Model._check_recursion,
         u"Error ! You can not create recursive analytic codes.",
         ['parent_id']),
    ]

    def name_get(self, cr, uid, ids, context=None):
        if not ids:
            return []

        if isinstance(ids, (int, long)):
            ids = [ids]

        reads = self.read(cr,
                          uid,
                          ids, ['name', 'description'],
                          context=context)
        res = []
        for record in reads:
            name = record['name']
            if record['description']:
                name = name + ' ' + record['description']
            res.append((record['id'], name))

        return res

    def name_search(self,
                    cr,
                    uid,
                    name,
                    args=None,
                    operator='ilike',
                    context=None,
                    limit=100):
        if not args:
            args = []

        if not context:
            context = {}

        ids = []
        if name:
            ids.extend(
                self.search(
                    cr,
                    uid,
                    [
                        '|', ('name', operator, name),
                        ('description', operator, name)
                    ] + args,
                    limit=limit,
                    context=context,
                ))
            return self.name_get(cr, uid, ids)
        else:
            return super(analytic_code, self).name_search(cr,
                                                          uid,
                                                          name=name,
                                                          args=args,
                                                          operator=operator,
                                                          context=context,
                                                          limit=limit)