def merge_default_ir_values(cr): """Merge 'default' ir.values records into ir.default records. We only consider 'default' ir.values with non empty values. Delete ir.values record at the end.""" cr.execute(""" SELECT id, create_date, write_date, create_uid, write_uid, user_id, company_id, name, model, value, key2 FROM ir_values WHERE key = 'default' AND value IS NOT NULL;""") query = """ INSERT INTO ir_default ( create_date, write_date, create_uid, write_uid, user_id, company_id, field_id, json_value, condition) VALUES %s; DELETE FROM ir_values WHERE id = %s;""" for r in cr.fetchall(): cr.execute(""" SELECT id FROM ir_model_fields WHERE name = %s AND model = %s; """, (r[7], r[8])) model_field = cr.fetchone() if model_field and model_field[0]: # taken from odoo 10 to get pickled value and odoo 11 to store JSON # value value = pickle.loads(bytes(r[9], 'utf-8')) json_value = json.dumps(value, ensure_ascii=False) values = (r[1], r[2], r[3], r[4], r[5], r[6], model_field[0], json_value, r[10]) cr.execute(query, (values, r[0])) return True
def get_cache(self, domain, fields): if not self.cache or domain != self.get_product_domain() or fields != self.get_product_fields(): self.product_domain = str(domain) self.product_fields = str(fields) self.refresh_cache() return cPickle.loads(self.cache)
def _value_unpickle(self): for record in self: value = record.value if record.key == 'default' and value: # default values are pickled on the fly with tools.ignore(Exception): value = str(pickle.loads(value)) record.value_unpickle = value
def get_cache(self, domain, fields): if not self.cache or domain != self.get_product_domain( ) or fields != self.get_product_fields(): self.product_domain = str(domain) self.product_fields = str(fields) self.refresh_cache() return cPickle.loads(self.cache)
def get_cache(self, domain, fields): if not self.cache or domain != self.get_partner_domain( ) or fields != self.get_partner_fields(): self.partner_domain = str(domain) self.partner_fields = str(fields) self.refresh_cache() cache = base64.decodestring(self.cache) return cPickle.loads(cache)
def get_defaults(self, model, condition=False): """Returns any default values that are defined for the current model and user, (and match ``condition``, if specified), previously registered via :meth:`~.set_default`. Defaults are global to a model, not field-specific, but an optional ``condition`` can be provided to restrict matching default values to those that were defined for the same condition (usually based on another field's value). Default values also have priorities depending on whom they apply to: only the highest priority value will be returned for any field. See :meth:`~.set_default` for more details. :param string model: model name :param string condition: optional condition specification that can be used to restrict the applicability of the default values (e.g. based on another field's value). This is an opaque string as far as the API is concerned, but client stacks typically use single-field conditions in the form ``'key=stringified_value'``. (Currently, the condition is trimmed to 200 characters, so values that share the same first 200 characters always match) :return: list of default values tuples of the form ``(id, field_name, value)`` (``id`` is the ID of the default entry, usually irrelevant) """ # use a direct SQL query for performance reasons, # this is called very often query = """ SELECT v.id, v.name, v.value FROM ir_values v LEFT JOIN res_users u ON (v.user_id = u.id) WHERE v.key = %%s AND v.model = %%s AND (v.user_id = %%s OR v.user_id IS NULL) AND (v.company_id IS NULL OR v.company_id = (SELECT company_id FROM res_users WHERE id = %%s) ) %s ORDER BY v.user_id, u.company_id""" params = ('default', model, self._uid, self._uid) if condition: query = query % 'AND v.key2 = %s' params += (condition[:200], ) else: query = query % 'AND v.key2 IS NULL' self._cr.execute(query, params) # keep only the highest priority default for each field defaults = {} for row in self._cr.dictfetchall(): value = pickle.loads(row['value'].encode('utf-8')) defaults.setdefault(row['name'], (row['id'], row['name'], value)) return defaults.values()
def get_defaults(self, model, condition=False): """Returns any default values that are defined for the current model and user, (and match ``condition``, if specified), previously registered via :meth:`~.set_default`. Defaults are global to a model, not field-specific, but an optional ``condition`` can be provided to restrict matching default values to those that were defined for the same condition (usually based on another field's value). Default values also have priorities depending on whom they apply to: only the highest priority value will be returned for any field. See :meth:`~.set_default` for more details. :param string model: model name :param string condition: optional condition specification that can be used to restrict the applicability of the default values (e.g. based on another field's value). This is an opaque string as far as the API is concerned, but client stacks typically use single-field conditions in the form ``'key=stringified_value'``. (Currently, the condition is trimmed to 200 characters, so values that share the same first 200 characters always match) :return: list of default values tuples of the form ``(id, field_name, value)`` (``id`` is the ID of the default entry, usually irrelevant) """ # use a direct SQL query for performance reasons, # this is called very often query = """ SELECT v.id, v.name, v.value FROM ir_values v LEFT JOIN res_users u ON (v.user_id = u.id) WHERE v.key = %%s AND v.model = %%s AND (v.user_id = %%s OR v.user_id IS NULL) AND (v.company_id IS NULL OR v.company_id = (SELECT company_id FROM res_users WHERE id = %%s) ) %s ORDER BY v.user_id, u.company_id""" params = ('default', model, self._uid, self._uid) if condition: query = query % 'AND v.key2 = %s' params += (condition[:200],) else: query = query % 'AND v.key2 IS NULL' self._cr.execute(query, params) # keep only the highest priority default for each field defaults = {} for row in self._cr.dictfetchall(): value = pickle.loads(row['value'].encode('utf-8')) defaults.setdefault(row['name'], (row['id'], row['name'], value)) return defaults.values()
def get_default(self, model, field_name, for_all_users=True, company_id=False, condition=False): """ Return the default value defined for model, field_name, users, company and condition. Return ``None`` if no such default exists. """ search_criteria = [ ('key', '=', 'default'), ('key2', '=', condition and condition[:200]), ('model', '=', model), ('name', '=', field_name), ('user_id', '=', False if for_all_users else self._uid), ('company_id', '=', company_id) ] defaults = self.search(search_criteria) return pickle.loads(defaults.value.encode('utf-8')) if defaults else None
def _dj_value_to_xmlid(self, field, rec): if rec.get(self._value_key) and rec.get('key') == 'action': # relation to actions rec[self._value_key] = property_to_xmlid( self.env, rec[self._value_key]) elif rec.get(self._value_key) and rec.get('key') == 'default': # handle relation fields field = self._dj_get_relation_field(rec) if field: # DO NOT USE `value_unpickle` + browse record # otherwise when changing `value` we are going # to break `value_unpickle` computation rec_id = int(pickle.loads(rec[self._value_key])) model = self.env[field['relation']] rec[self._value_key] = \ model.browse(rec_id)._dj_export_xmlid()
def read(self, fields=None, load='_classic_read'): """Convert values to xmlid.""" res = super(IRValues, self).read(fields=fields, load=load) if not self.env.context.get('xmlid_value_reference'): return res # wipe cache otherwise we gonna get the std value in any case self.invalidate_cache(['value']) for rec in res: if rec.get('value') and rec.get('key') == 'action': # relation to actions rec['value'] = property_to_xmlid(self.env, rec['value']) elif rec.get('value') and rec.get('key') == 'default': # handle relation fields field = self._get_relation_field(rec) if field: # DO NOT USE `value_unpickle` + browse record # otherwise when changing `value` we are going # to break `value_unpickle` computation rec_id = int(pickle.loads(rec['value'])) model = self.env[field['relation']] rec['value'] = model.browse(rec_id)._dj_export_xmlid() return res
def reverse_anonymize_database(self): """Set the 'clear' state to defined fields""" self.ensure_one() IrModelFieldsAnonymization = self.env['ir.model.fields.anonymization'] # check that all the defined fields are in the 'anonymized' state state = IrModelFieldsAnonymization._get_global_state() if state == 'clear': raise UserError(_("The database is not currently anonymized, you cannot reverse the anonymization.")) elif state == 'unstable': raise UserError(_("The database anonymization is currently in an unstable state. Some fields are anonymized," " while some fields are not anonymized. You should try to solve this problem before trying to do anything.")) if not self.file_import: raise UserError('%s: %s' % (_('Error !'), _("It is not possible to reverse the anonymization process without supplying the anonymization export file."))) # reverse the anonymization: # load the pickle file content into a data structure: data = pickle.loads(base64.decodestring(self.file_import)) fixes = self.env['ir.model.fields.anonymization.migration.fix'].search_read([ ('target_version', '=', '.'.join(str(v) for v in version_info[:2])) ], ['model_name', 'field_name', 'query', 'query_type', 'sequence']) fixes = group(fixes, ('model_name', 'field_name')) for line in data: queries = [] table_name = self.env[line['model_id']]._table if line['model_id'] in self.env else None # check if custom sql exists: key = (line['model_id'], line['field_id']) custom_updates = fixes.get(key) if custom_updates: custom_updates.sort(key=itemgetter('sequence')) queries = [(record['query'], record['query_type']) for record in custom_updates if record['query_type']] elif table_name: queries = [('update "%(table)s" set "%(field)s" = %%(value)s where id = %%(id)s' % { 'table': table_name, 'field': line['field_id'], }, 'sql')] for query in queries: if query[1] == 'sql': self.env.cr.execute(query[0], { 'value': line['value'], 'id': line['id'] }) elif query[1] == 'python': safe_eval(query[0] % line) else: raise Exception("Unknown query type '%s'. Valid types are: sql, python." % (query['query_type'], )) # update the anonymization fields: ano_fields = IrModelFieldsAnonymization.search([('state', '!=', 'not_existing')]) ano_fields.write({'state': 'clear'}) # add a result message in the wizard: self.msg = '\n'.join(["Successfully reversed the anonymization.", ""]) # create a new history record: history = self.env['ir.model.fields.anonymization.history'].create({ 'date': fields.Datetime.now(), 'field_ids': [[6, 0, ano_fields.ids]], 'msg': self.msg, 'filepath': False, 'direction': 'anonymized -> clear', 'state': 'done' }) return { 'res_id': self.id, 'view_id': self.env.ref('anonymization.view_ir_model_fields_anonymize_wizard_form').ids, 'view_type': 'form', "view_mode": 'form', 'res_model': 'ir.model.fields.anonymize.wizard', 'type': 'ir.actions.act_window', 'context': {'step': 'just_desanonymized'}, 'target': 'new' }
def reverse_anonymize_database(self): """Set the 'clear' state to defined fields""" self.ensure_one() IrModelFieldsAnonymization = self.env['ir.model.fields.anonymization'] # check that all the defined fields are in the 'anonymized' state state = IrModelFieldsAnonymization._get_global_state() if state == 'clear': raise UserError( _("The database is not currently anonymized, you cannot reverse the anonymization." )) elif state == 'unstable': raise UserError( _("The database anonymization is currently in an unstable state. Some fields are anonymized," " while some fields are not anonymized. You should try to solve this problem before trying to do anything." )) if not self.file_import: raise UserError('%s: %s' % ( _('Error !'), _("It is not possible to reverse the anonymization process without supplying the anonymization export file." ))) # reverse the anonymization: # load the pickle file content into a data structure: data = pickle.loads(base64.decodestring(self.file_import)) fixes = self.env[ 'ir.model.fields.anonymization.migration.fix'].search_read([ ('target_version', '=', '.'.join( str(v) for v in version_info[:2])) ], ['model_name', 'field_name', 'query', 'query_type', 'sequence']) fixes = group(fixes, ('model_name', 'field_name')) for line in data: queries = [] table_name = self.env[line['model_id']]._table if line[ 'model_id'] in self.env else None # check if custom sql exists: key = (line['model_id'], line['field_id']) custom_updates = fixes.get(key) if custom_updates: custom_updates.sort(key=itemgetter('sequence')) queries = [(record['query'], record['query_type']) for record in custom_updates if record['query_type']] elif table_name: queries = [( 'update "%(table)s" set "%(field)s" = %%(value)s where id = %%(id)s' % { 'table': table_name, 'field': line['field_id'], }, 'sql')] for query in queries: if query[1] == 'sql': self.env.cr.execute(query[0], { 'value': line['value'], 'id': line['id'] }) elif query[1] == 'python': safe_eval(query[0] % line) else: raise Exception( "Unknown query type '%s'. Valid types are: sql, python." % (query['query_type'], )) # update the anonymization fields: ano_fields = IrModelFieldsAnonymization.search([('state', '!=', 'not_existing')]) ano_fields.write({'state': 'clear'}) # add a result message in the wizard: self.msg = '\n'.join(["Successfully reversed the anonymization.", ""]) # create a new history record: history = self.env['ir.model.fields.anonymization.history'].create({ 'date': fields.Datetime.now(), 'field_ids': [[6, 0, ano_fields.ids]], 'msg': self.msg, 'filepath': False, 'direction': 'anonymized -> clear', 'state': 'done' }) return { 'res_id': self.id, 'view_id': self.env.ref( 'anonymization.view_ir_model_fields_anonymize_wizard_form'). ids, 'view_type': 'form', "view_mode": 'form', 'res_model': 'ir.model.fields.anonymize.wizard', 'type': 'ir.actions.act_window', 'context': { 'step': 'just_desanonymized' }, 'target': 'new' }