def test_frozendict_hash(self): """ Ensure that a frozendict is hashable. """ # dict with simple values hash(frozendict({'name': 'Joe', 'age': 42})) # dict with tuples, lists, and embedded dicts hash(frozendict({ 'user_id': (42, 'Joe'), 'line_ids': [(0, 0, {'values': [42]})], }))
def action_view_invoie(self, invoices=False): res = super().action_view_invoice(invoices) ctx = dict(self._context) ctx.update({"default_payment_method": self.payment_method.id}) self.env.args = frozendict(ctx) res["context"] = ctx return res
def _recompute(self, field, recs): # determine the fields to recompute fs = self.env[field.model_name]._field_computed[field] ns = [f.name for f in fs if f.store] # evaluate fields, and group record ids by update updates = defaultdict(set) for rec in recs: try: vals = {n: rec[n] for n in ns} except MissingError: continue vals = rec._convert_to_write(vals) updates[frozendict(vals)].add(rec.id) # update records in batch when possible with recs.env.norecompute(): for vals, ids in updates.items(): target = recs.browse(ids) try: target._write(dict(vals)) except MissingError: # retry without missing records target.exists()._write(dict(vals)) # mark computed fields as done for f in fs: recs._recompute_done(f)
def message_route_process(self, message, message_dict, routes): ctx = self.env.context.copy() ctx["incoming_routes"] = routes ctx["incoming_to"] = message_dict.get("to") self.env.context = frozendict(ctx) return super(MailThread, self).message_route_process(message, message_dict, routes)
def test_frozendict_immutable(self): """ Ensure that a frozendict is immutable. """ vals = {'name': 'Joe', 'age': 42} frozen_vals = frozendict(vals) # check __setitem__, __delitem__ with self.assertRaises(Exception): frozen_vals['surname'] = 'Jack' with self.assertRaises(Exception): frozen_vals['name'] = 'Jack' with self.assertRaises(Exception): del frozen_vals['name'] # check update, setdefault, pop, popitem, clear with self.assertRaises(Exception): frozen_vals.update({'surname': 'Jack'}) with self.assertRaises(Exception): frozen_vals.update({'name': 'Jack'}) with self.assertRaises(Exception): frozen_vals.setdefault('surname', 'Jack') with self.assertRaises(Exception): frozen_vals.pop('surname', 'Jack') with self.assertRaises(Exception): frozen_vals.pop('name', 'Jack') with self.assertRaises(Exception): frozen_vals.popitem() with self.assertRaises(Exception): frozen_vals.clear()
def inter_call(self, data): cr, uid, context = self.env.args context = dict(context) context.update({'depends': {}}) self.env.args = cr, uid, frozendict(context) res = base_module_save._create_module(self, data) mod_obj = self.env['ir.model.data'] model_data_ids = mod_obj.\ search([('model', '=', 'ir.ui.view'), ('name', '=', 'module_create_form_view')]) resource_id = model_data_ids.read(fields=['res_id'])[0]['res_id'] context.update(res) res_id = self.create({ 'module_filename': ustr(res['module_filename']), 'module_file': ustr(res['module_file']), 'name': ustr(res['name']), 'directory_name': ustr(res['directory_name']), 'version': ustr(res['version']), 'author': ustr(res['author']), 'website': ustr(res['website']), 'category': ustr(res['category']), 'description': ustr(res['description']), }) return { 'name': _('Module Recording'), 'view_type': 'form', 'view_mode': 'form', 'res_id': res_id.id, 'res_model': 'base.module.record.objects', 'views': [(resource_id, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', }
def _compute_qty(self): orderpoints_contexts = defaultdict( lambda: self.env['stock.warehouse.orderpoint']) for orderpoint in self: if not orderpoint.product_id or not orderpoint.location_id: orderpoint.qty_on_hand = False orderpoint.qty_forecast = False continue orderpoint_context = orderpoint._get_product_context() product_context = frozendict({ **self.env.context, **orderpoint_context }) orderpoints_contexts[product_context] |= orderpoint for orderpoint_context, orderpoints_by_context in orderpoints_contexts.items( ): products_qty = orderpoints_by_context.product_id.with_context( orderpoint_context)._product_available() products_qty_in_progress = orderpoints_by_context._quantity_in_progress( ) for orderpoint in orderpoints_by_context: orderpoint.qty_on_hand = products_qty[ orderpoint.product_id.id]['qty_available'] orderpoint.qty_forecast = products_qty[ orderpoint.product_id. id]['virtual_available'] + products_qty_in_progress[ orderpoint.id]
def _is_end_of_seq_chain(self): """Tells whether or not these elements are the last ones of the sequence chain. :return: True if self are the last elements of the chain. """ batched = defaultdict(lambda: {'last_rec': self.browse(), 'seq_list': []}) for record in self.filtered(lambda x: x[x._sequence_field]): seq_format, format_values = record._get_sequence_format_param(record[record._sequence_field]) seq = format_values.pop('seq') batch = batched[(seq_format, frozendict(format_values))] batch['seq_list'].append(seq) if batch['last_rec'].sequence_number < record.sequence_number: batch['last_rec'] = record for values in batched.values(): # The sequences we are deleting are not sequential seq_list = values['seq_list'] if max(seq_list) - min(seq_list) != len(seq_list) - 1: return False # last_rec must have the highest number in the database record = values['last_rec'] if not record._is_last_from_seq_chain(): return False return True
def _set_lang(self, lang, obj=None): self.localcontext.update(lang=lang) if obj is None and 'objects' in self.localcontext: obj = self.localcontext['objects'] if obj and obj.env.context['lang'] != lang: ctx_copy = dict(self.env.context) ctx_copy.update(lang=lang) obj.env.context = frozendict(ctx_copy)
def _set_images(self): for fname in self._get_images_for_test(): fname_split = fname.split('.') if not fname_split[0] in _file_cache: with Image.open(os.path.join(dir_path, fname), 'r') as img: base64_img = image_to_base64(img, 'JPEG') primary, secondary = self.env['base.document.layout'].create( {})._parse_logo_colors(base64_img) _img = frozendict({ 'img': base64_img, 'colors': { 'primary_color': primary, 'secondary_color': secondary, }, }) _file_cache[fname_split[0]] = _img self.company_imgs = frozendict(_file_cache)
def toggle_starred(self): updates = defaultdict(set) for record in self: vals = {"starred": not record.starred} updates[tools.frozendict(vals)].add(record.id) with self.env.norecompute(): for vals, ids in updates.items(): self.browse(ids).write(dict(vals)) self.recompute()
def set_lang(self, lang, obj=None): _logger.exception('AEROO setLang======================= %s' % lang) self.localcontext.update(lang=lang) if obj is None and 'objects' in self.localcontext: obj = self.localcontext['objects'] if obj and obj.env.context['lang'] != lang: ctx_copy = dict(self.env.context) ctx_copy.update(lang=lang) obj.env.context = frozendict(ctx_copy) obj.invalidate_cache()
def _inverse_content(self): updates = defaultdict(set) for record in self: values = self._get_content_inital_vals() binary = base64.b64decode(record.content or "") values = record._update_content_vals(values, binary) updates[tools.frozendict(values)].add(record.id) with self.env.norecompute(): for vals, ids in updates.items(): self.browse(ids).write(dict(vals))
def test_company_sticky_with_context(self): context = frozendict({'nothing_to_see_here': True}) companies_1 = frozendict({'allowed_company_ids': [1]}) companies_2 = frozendict({'allowed_company_ids': [2]}) User = self.env['res.users'].with_context(context) self.assertEqual(User.env.context, context) User = User.with_context(**companies_1) self.assertEqual(User.env.context, dict(context, **companies_1)) # 'allowed_company_ids' is replaced if present in keys User = User.with_context(**companies_2) self.assertEqual(User.env.context, dict(context, **companies_2)) # 'allowed_company_ids' is replaced if present in new context User = User.with_context(companies_1) self.assertEqual(User.env.context, companies_1) # 'allowed_company_ids' is sticky User = User.with_context(context) self.assertEqual(User.env.context, dict(context, **companies_1))
def portal_order_page(self, order_id, report_type=None, access_token=None, message=False, download=False, **kw): env = request.env order = env["sale.order"].sudo().browse(order_id) new_context = {"sale_privilege_level_partner_id": order.partner_id.id} env.context = frozendict(request.env.context, **new_context) return super().portal_order_page(order_id, report_type, access_token, message, download, **kw)
def _inverse_content(self): updates = defaultdict(set) for record in self: vals = { 'content_binary': record.content, 'size': len(base64.b64decode(record.content or "")), } init = self._get_content_vals() updates[tools.frozendict({**init, **vals})].add(record.id) with self.env.norecompute(): for vals, ids in updates.items(): self.browse(ids).write(dict(vals)) self.recompute()
def process(model, id_vals): print('***id_vals***', id_vals) # group record ids by vals, to update in batch when possible updates = defaultdict(list) for rid, vals in id_vals.items(): updates[frozendict(vals)].append(rid) for vals, ids in updates.items(): recs = model.browse(ids) try: recs._write(vals) except MissingError: recs.exists()._write(vals)
def _query(self, with_clause="", fields=frozendict(), groupby="", from_clause=""): fields = dict( fields, event_reservation_type_id=""" , t.event_reservation_type_id as event_reservation_type_id """, ) groupby += ", t.event_reservation_type_id" return super()._query(with_clause, fields, groupby, from_clause)
def _inverse_content(self): records = self.filtered(lambda rec: rec.storage.save_type == 'lobject') updates = defaultdict(set) for record in records: vals = { 'content_lobject': record.content, 'size': len(base64.b64decode(record.content or "")), } init = self._get_content_vals() updates[tools.frozendict({**init, **vals})].add(record.id) with self.env.norecompute(): for vals, ids in updates.items(): self.browse(ids).write(dict(vals)) super(File, self - records)._inverse_content()
def _inverse_content(self): records = self.filtered(lambda rec: rec.storage.save_type == 'file') updates = defaultdict(set) for record in records: values = self._get_content_inital_vals() binary = base64.b64decode(record.content or "") values = self._update_content_vals(record, values, binary) values.update({ 'content_file': record.content and binary, }) updates[tools.frozendict(values)].add(record.id) with self.env.norecompute(): for vals, ids in updates.items(): self.browse(ids).write(dict(vals)) super(File, self - records)._inverse_content()
def _recompute(self, field, recs): # determine the fields to recompute fs = self.env[field.model_name]._field_computed[field] ns = [f.name for f in fs if f.store] # evaluate fields, and group record ids by update updates = defaultdict(set) for rec in recs.exists(): vals = rec._convert_to_write({n: rec[n] for n in ns}) updates[frozendict(vals)].add(rec.id) # update records in batch when possible with recs.env.norecompute(): for vals, ids in updates.iteritems(): recs.browse(ids)._write(dict(vals)) # mark computed fields as done map(recs._recompute_done, fs)
def _get_id(self, model, id): if isinstance(id, tuple): id = id[0] if (model, id) in self.blank_dict: res_id = self.blank_dict[(model, id)] return res_id, False dt = self.env['ir.model.data'] obj = dt.search([('model', '=', model), ('res_id', '=', id)]) if not obj: return False, None obj = obj[0] cr, uid, context = self.env.args context = dict(context) self.env.args = cr, uid, frozendict(context) depends = context.get('depends') depends[obj.module] = True return obj.module + '.' + obj.name, obj.noupdate
def _compute_available_actions(self): tags_domain = [('criteria_tags', '=', False)] category_domain = [('criteria_category', '=', False)] directory_domain = [('criteria_directory', '=', False)] no_prefetch = self.with_context(prefetch_fields=False) directory_ids = no_prefetch.mapped('directory.id') category_ids = self.mapped('category.id') tags_ids = self.mapped('tags.id') if directory_ids: directory_domain = expression.OR([ directory_domain, [('criteria_directory', 'parent_of', directory_ids)], ]) if category_ids: category_domain = expression.OR([ category_domain, [('criteria_category', 'parent_of', category_ids)], ]) if tags_ids: tags_domain = expression.OR([ tags_domain, [('criteria_tags', 'in', tags_ids)], ]) updatesdict = defaultdict(set) actiondict = defaultdict(lambda: [set(), set()]) action_model = self.env['muk_dms_actions.action'] actions = action_model.search( expression.AND([directory_domain, category_domain, tags_domain])) for action in actions: is_single = action.is_single_file_action domain = literal_eval(action.filter_domain) action_files = self.search( expression.AND([[['id', 'in', self.ids]], domain])) for file_id in action_files.ids: if not is_single: actiondict[file_id][1].add(action.id) actiondict[file_id][0].add(action.id) for id, vals in actiondict.items(): actions_values = { 'actions': [(6, 0, list(vals[0]))], 'actions_multi': [(6, 0, list(vals[1]))] } updatesdict[frozendict(actions_values)].add(id) for values, ids in updatesdict.items(): self.browse(ids).update(dict(values))
def __new__(cls, cr, uid, context): assert context is not None args = (cr, uid, context) # if env already exists, return it env, envs = None, cls.envs for env in envs: if env.args == args: return env # otherwise create environment, and add it in the set self = object.__new__(cls) self.cr, self.uid, self.context = self.args = (cr, uid, frozendict(context)) self.registry = Registry(cr.dbname) self.cache = envs.cache self._protected = StackMap() # {field: ids, ...} self.dirty = defaultdict(set) # {record: set(field_name), ...} self.all = envs envs.add(self) return self
def _update_automatic_thumbnail(self): updates = defaultdict(set) for record in self: try: thumbnail = self._make_thumbnail(record) except Exception: message = "Thumnail creation failed for file %s with ID %s." _logger.exception(message % (record.name, record.id)) thumbnail = None if thumbnail: values = {'automatic_thumbnail': thumbnail} tools.image_resize_images( values, big_name='automatic_thumbnail', medium_name='automatic_thumbnail_medium', small_name='automatic_thumbnail_small') updates[tools.frozendict(values)].add(record.id) with self.env.norecompute(): for vals, ids in updates.items(): self.browse(ids).write(dict(vals))
def _bulk_recompute(self): """ Recompute stored function fields. The fields and records to recompute have been determined by method :meth:`modified`. """ while self.env.has_todo(): field, recs = self.env.get_todo() # determine the fields to recompute fs = self.env[field.model_name]._field_computed[field] ns = [f.name for f in fs if f.store] # evaluate fields, and group record ids by update updates = defaultdict(set) for rec in recs.exists(): vals = rec._convert_to_write({n: rec[n] for n in ns}) updates[frozendict(vals)].add(rec.id) # update records in batch when possible with recs.env.norecompute(): for vals, ids in updates.iteritems(): recs.browse(ids)._write(dict(vals)) # mark computed fields as done map(recs._recompute_done, fs)
def __new__(cls, cr, uid, context, su=False): if uid == SUPERUSER_ID: su = True assert context is not None args = (cr, uid, context, su) # if env already exists, return it env, envs = None, cls.envs for env in envs: if env.args == args: return env # otherwise create environment, and add it in the set self = object.__new__(cls) args = (cr, uid, frozendict(context), su) self.cr, self.uid, self.context, self.su = self.args = args self.registry = Registry(cr.dbname) self.cache = envs.cache self._protected = envs.protected # proxy to shared data structure self.all = envs envs.add(self) return self
def test_skip_if_no_connector_export(self): class MyEventListener(Component): _name = "my.event.listener" _inherit = "base.event.listener" def on_record_create(self, record, fields=None): assert True class MyOtherEventListener(Component): _name = "my.other.event.listener" _inherit = "base.connector.listener" @skip_if(lambda self, record, fields=None: self.no_connector_export(record)) def on_record_create(self, record, fields=None): raise AssertionError() self.env.context = frozendict(self.env.context, no_connector_export=True) work = EventWorkContext( model_name="res.users", env=self.env, components_registry=self.comp_registry ) # get the collecter to notify the event # we don't mind about the collection and the model here, # the events we test are global self.collecter = self.comp_registry["base.event.collecter"](work) self._build_components( components.core.BaseConnectorComponent, components.listener.ConnectorListener, MyEventListener, MyOtherEventListener, ) # collect the event and notify it record = mock.Mock(name="record") collected = self.collecter.collect_events("on_record_create") self.assertEqual(2, len(collected.events)) collected.notify(record)
def _compute_amount(self): """ Inject the product price with proper rounding in the context from which account.tax::compute_all() is able to retrieve it. The alternate context is patched onto self because it can be a NewId passed in the onchange the env of which does not support `with_context`. """ for line in self: orig_context = None # This is always executed for allowing other modules to use this # with different conditions than discount != 0 discounted_price_unit = line._get_discounted_price_unit() if discounted_price_unit != line.price_unit: precision = line.order_id.currency_id.decimal_places company = line.company_id or self.env.user.company_id if company.tax_calculation_rounding_method == 'round_globally': precision += 5 orig_context = self.env.context price = round(line.product_qty * discounted_price_unit, precision) self.env.context = frozendict(self.env.context, base_values=(price, price, price)) super(PurchaseOrderLine, line)._compute_amount() if orig_context is not None: self.env.context = orig_context
def _onchange_name(self): if self.invoice_id.type == 'in_invoice' and self.name: # don't call prediction when the name change is triggered by a change of product if self.name != self._get_invoice_line_name_from_product(): # don't predict the account if it has already be filled predict_account = not bool(self.account_id) if self.env.user.has_group('account.group_products_in_bills') and not self.product_id: predicted_product_id = self._predict_product(self.name) # We only change the product if we manage to predict its value if predicted_product_id: # We pass a context key to tell that we don't want the product # onchange function to override the description that was entered by the user self.env.context = frozendict(self.env.context, skip_product_onchange_rename=True) self.product_id = predicted_product_id # the account has been set via the onchange, there's no need to predict it any longer predict_account = False if predict_account: predicted_account_id = self._predict_account(self.name, self.partner_id) # We only change the account if we manage to predict its value if predicted_account_id: self.account_id = predicted_account_id else: self.account_id = self.with_context(set_default_account=True, journal_id=self.invoice_id.journal_id.id)._default_account()