def generate_recipients(self, results, res_ids): """Generates the recipients of the template. Default values can ben generated instead of the template values if requested by template or context. Emails (email_to, email_cc) can be transformed into partners if requested in the context. """ self.ensure_one() if self.use_default_to or self._context.get('tpl_force_default_to'): default_recipients = self.env['mail.thread'].message_get_default_recipients(res_model=self.model, res_ids=res_ids) for res_id, recipients in pycompat.items(default_recipients): results[res_id].pop('partner_to', None) results[res_id].update(recipients) for res_id, values in pycompat.items(results): partner_ids = values.get('partner_ids', list()) if self._context.get('tpl_partners_only'): mails = tools.email_split(values.pop('email_to', '')) + tools.email_split(values.pop('email_cc', '')) for mail in mails: partner_id = self.env['res.partner'].find_or_create(mail) partner_ids.append(partner_id) partner_to = values.pop('partner_to', '') if partner_to: # placeholders could generate '', 3, 2 due to some empty field values tpl_partner_ids = [int(pid) for pid in partner_to.split(',') if pid] partner_ids += self.env['res.partner'].sudo().browse(tpl_partner_ids).exists().ids results[res_id]['partner_ids'] = partner_ids return results
def check_cache(self): """ Check the cache consistency. """ from odoo.fields import SpecialValue # make a full copy of the cache, and invalidate it cache_dump = dict( (field, dict(field_cache)) for field, field_cache in pycompat.items(self.cache) ) self.invalidate_all() # re-fetch the records, and compare with their former cache invalids = [] for field, field_dump in pycompat.items(cache_dump): records = self[field.model_name].browse(f for f in field_dump if f) for record in records: try: cached = field_dump[record.id] cached = cached.get() if isinstance(cached, SpecialValue) else cached value = field.convert_to_record(cached, record) fetched = record[field.name] if fetched != value: info = {'cached': value, 'fetched': fetched} invalids.append((field, record, info)) except (AccessError, MissingError): pass if invalids: raise UserError('Invalid cache for fields\n' + pformat(invalids))
def __call__(self, path=None, path_args=None, **kw): path = path or self.path for key, value in pycompat.items(self.args): kw.setdefault(key, value) path_args = OrderedSet(path_args or []) | self.path_args paths, fragments = {}, [] for key, value in pycompat.items(kw): if value and key in path_args: if isinstance(value, models.BaseModel): paths[key] = slug(value) else: paths[key] = u"%s" % value elif value: if isinstance(value, list) or isinstance(value, set): fragments.append( werkzeug.url_encode([(key, item) for item in value])) else: fragments.append(werkzeug.url_encode([(key, value)])) for key in path_args: value = paths.get(key) if value is not None: path += '/' + key + '/' + value if fragments: path += '?' + '&'.join(fragments) return path
def modules_to_install(self): """ selects all modules to install: * checked boolean fields * return values of hook methods. Hook methods are of the form ``_if_%(addon_name)s``, and are called if the corresponding addon is marked for installation. They take the arguments cr, uid, ids and context, and return an iterable of addon names * additionals, additionals are setup through the ``_install_if`` class variable. ``_install_if`` is a dict of {iterable:iterable} where key and value are iterables of addon names. If all the addons in the key are selected for installation (warning: addons added through hooks don't count), then the addons in the value are added to the set of modules to install * not already installed """ base = set(module_name for installer in self.read() for module_name, to_install in pycompat.items(installer) if self._fields[module_name].type == 'boolean' and to_install) hooks_results = set() for module in base: hook = getattr(self, '_if_%s'% module, None) if hook: hooks_results.update(hook() or set()) additionals = set(module for requirements, consequences in pycompat.items(self._install_if) if base.issuperset(requirements) for module in consequences) return (base | hooks_results | additionals) - set(self.already_installed())
def check_cache(self): """ Check the cache consistency. """ from odoo.fields import SpecialValue # make a full copy of the cache, and invalidate it cache_dump = dict((field, dict(field_cache)) for field, field_cache in pycompat.items(self.cache)) self.invalidate_all() # re-fetch the records, and compare with their former cache invalids = [] for field, field_dump in pycompat.items(cache_dump): records = self[field.model_name].browse(f for f in field_dump if f) for record in records: try: cached = field_dump[record.id] cached = cached.get() if isinstance( cached, SpecialValue) else cached value = field.convert_to_record(cached, record) fetched = record[field.name] if fetched != value: info = {'cached': value, 'fetched': fetched} invalids.append((field, record, info)) except (AccessError, MissingError): pass if invalids: raise UserError('Invalid cache for fields\n' + pformat(invalids))
def render_template(self, template_txt, model, res_ids, post_process=False): """ Render the given template text, replace mako expressions ``${expr}`` with the result of evaluating these expressions with an evaluation context containing: - ``user``: Model of the current user - ``object``: record of the document record this mail is related to - ``context``: the context passed to the mail composition wizard :param str template_txt: the template text to render :param str model: model name of the document record this mail is related to. :param int res_ids: list of ids of document records those mails are related to. """ multi_mode = True if isinstance(res_ids, pycompat.integer_types): multi_mode = False res_ids = [res_ids] results = dict.fromkeys(res_ids, u"") # try to load the template try: mako_env = mako_safe_template_env if self.env.context.get('safe') else mako_template_env template = mako_env.from_string(tools.ustr(template_txt)) except Exception: _logger.info("Failed to load template %r", template_txt, exc_info=True) return multi_mode and results or results[res_ids[0]] # prepare template variables records = self.env[model].browse(it for it in res_ids if it) # filter to avoid browsing [None] res_to_rec = dict.fromkeys(res_ids, None) for record in records: res_to_rec[record.id] = record variables = { 'format_date': lambda date, format=False, context=self._context: format_date(self.env, date, format), 'format_tz': lambda dt, tz=False, format=False, context=self._context: format_tz(self.env, dt, tz, format), 'format_amount': lambda amount, currency, context=self._context: format_amount(self.env, amount, currency), 'user': self.env.user, 'ctx': self._context, # context kw would clash with mako internals } for res_id, record in pycompat.items(res_to_rec): variables['object'] = record try: render_result = template.render(variables) except Exception: _logger.info("Failed to render template %r using values %r" % (template, variables), exc_info=True) raise UserError(_("Failed to render template %r using values %r")% (template, variables)) if render_result == u"False": render_result = u"" results[res_id] = render_result if post_process: for res_id, result in pycompat.items(results): results[res_id] = self.render_post_process(result) return multi_mode and results or results[res_ids[0]]
def enumerate_pages(self, query_string=None): """ Available pages in the website/CMS. This is mostly used for links generation and can be overridden by modules setting up new HTML controllers for dynamic pages (e.g. blog). By default, returns template views marked as pages. :param str query_string: a (user-provided) string, fetches pages matching the string :returns: a list of mappings with two keys: ``name`` is the displayable name of the resource (page), ``url`` is the absolute URL of the same. :rtype: list({name: str, url: str}) """ router = request.httprequest.app.get_db_router(request.db) # Force enumeration to be performed as public user url_set = set() for rule in router.iter_rules(): if not self.rule_is_enumerable(rule): continue converters = rule._converters or {} if query_string and not converters and ( query_string not in rule.build([{}], append_unknown=False)[1]): continue values = [{}] # converters with a domain are processed after the other ones convitems = sorted(pycompat.items(converters), key=lambda x: hasattr(x[1], 'domain') and (x[1].domain != '[]')) for (i, (name, converter)) in enumerate(convitems): newval = [] for val in values: query = i == len(convitems) - 1 and query_string for value_dict in converter.generate(uid=self.env.uid, query=query, args=val): newval.append(val.copy()) value_dict[name] = value_dict['loc'] del value_dict['loc'] newval[-1].update(value_dict) values = newval for value in values: domain_part, url = rule.build(value, append_unknown=False) page = {'loc': url} for key, val in pycompat.items(value): if key.startswith('__'): page[key[2:]] = val if url in ('/sitemap.xml', ): continue if url in url_set: continue url_set.add(url) yield page
def render_template(self, template_txt, model, res_ids, post_process=False): """ Render the given template text, replace mako expressions ``${expr}`` with the result of evaluating these expressions with an evaluation context containing: - ``user``: browse_record of the current user - ``object``: record of the document record this mail is related to - ``context``: the context passed to the mail composition wizard :param str template_txt: the template text to render :param str model: model name of the document record this mail is related to. :param int res_ids: list of ids of document records those mails are related to. """ multi_mode = True if isinstance(res_ids, pycompat.integer_types): multi_mode = False res_ids = [res_ids] results = dict.fromkeys(res_ids, u"") # try to load the template try: mako_env = mako_safe_template_env if self.env.context.get('safe') else mako_template_env template = mako_env.from_string(tools.ustr(template_txt)) except Exception: _logger.info("Failed to load template %r", template_txt, exc_info=True) return multi_mode and results or results[res_ids[0]] # prepare template variables records = self.env[model].browse(it for it in res_ids if it) # filter to avoid browsing [None] res_to_rec = dict.fromkeys(res_ids, None) for record in records: res_to_rec[record.id] = record variables = { 'format_date': lambda date, format=False, context=self._context: format_date(self.env, date, format), 'format_tz': lambda dt, tz=False, format=False, context=self._context: format_tz(self.env, dt, tz, format), 'user': self.env.user, 'ctx': self._context, # context kw would clash with mako internals } for res_id, record in pycompat.items(res_to_rec): variables['object'] = record try: render_result = template.render(variables) except Exception: _logger.info("Failed to render template %r using values %r" % (template, variables), exc_info=True) raise UserError(_("Failed to render template %r using values %r")% (template, variables)) if render_result == u"False": render_result = u"" results[res_id] = render_result if post_process: for res_id, result in pycompat.items(results): results[res_id] = self.render_post_process(result) return multi_mode and results or results[res_ids[0]]
def get_details_by_rule_category(self, payslip_lines): PayslipLine = self.env['hr.payslip.line'] RuleCateg = self.env['hr.salary.rule.category'] def get_recursive_parent(current_rule_category, rule_categories=None): if rule_categories: rule_categories = current_rule_category | rule_categories else: rule_categories = current_rule_category if current_rule_category.parent_id: return get_recursive_parent(current_rule_category.parent_id, rule_categories) else: return rule_categories res = {} result = {} if payslip_lines: self.env.cr.execute(""" SELECT pl.id, pl.category_id, pl.slip_id FROM hr_payslip_line as pl LEFT JOIN hr_salary_rule_category AS rc on (pl.category_id = rc.id) WHERE pl.id in %s GROUP BY rc.parent_id, pl.sequence, pl.id, pl.category_id ORDER BY pl.sequence, rc.parent_id""", (tuple(payslip_lines.ids),)) for x in self.env.cr.fetchall(): result.setdefault(x[2], {}) result[x[2]].setdefault(x[1], []) result[x[2]][x[1]].append(x[0]) for payslip_id, lines_dict in pycompat.items(result): res.setdefault(payslip_id, []) for rule_categ_id, line_ids in pycompat.items(lines_dict): rule_categories = RuleCateg.browse(rule_categ_id) lines = PayslipLine.browse(line_ids) level = 0 for parent in get_recursive_parent(rule_categories): res[payslip_id].append({ 'rule_category': parent.name, 'name': parent.name, 'code': parent.code, 'level': level, 'total': sum(lines.mapped('total')), }) level += 1 for line in lines: res[payslip_id].append({ 'rule_category': line.name, 'name': line.name, 'code': line.code, 'total': line.total, 'level': level }) return res
def process(self, products, ppg=PPG): # Compute products positions on the grid minpos = 0 index = 0 maxy = 0 x = 0 for p in products: x = min(max(p.website_size_x, 1), PPR) y = min(max(p.website_size_y, 1), PPR) if index >= ppg: x = y = 1 pos = minpos while not self._check_place(pos % PPR, pos / PPR, x, y): pos += 1 # if 21st products (index 20) and the last line is full (PPR products in it), break # (pos + 1.0) / PPR is the line where the product would be inserted # maxy is the number of existing lines # + 1.0 is because pos begins at 0, thus pos 20 is actually the 21st block # and to force python to not round the division operation if index >= ppg and ((pos + 1.0) / PPR) > maxy: break if x == 1 and y == 1: # simple heuristic for CPU optimization minpos = pos / PPR for y2 in range(y): for x2 in range(x): self.table[(pos / PPR) + y2][(pos % PPR) + x2] = False self.table[pos / PPR][pos % PPR] = { 'product': p, 'x': x, 'y': y, 'class': " ".join(x.html_class for x in p.website_style_ids if x.html_class) } if index <= ppg: maxy = max(maxy, y + (pos / PPR)) index += 1 # Format table according to HTML needs rows = sorted(pycompat.items(self.table)) rows = [r[1] for r in rows] for col in range(len(rows)): cols = sorted(pycompat.items(rows[col])) x += len(cols) rows[col] = [r[1] for r in cols if r[1]] return rows
def enumerate_pages(self, query_string=None): """ Available pages in the website/CMS. This is mostly used for links generation and can be overridden by modules setting up new HTML controllers for dynamic pages (e.g. blog). By default, returns template views marked as pages. :param str query_string: a (user-provided) string, fetches pages matching the string :returns: a list of mappings with two keys: ``name`` is the displayable name of the resource (page), ``url`` is the absolute URL of the same. :rtype: list({name: str, url: str}) """ router = request.httprequest.app.get_db_router(request.db) # Force enumeration to be performed as public user url_set = set() for rule in router.iter_rules(): if not self.rule_is_enumerable(rule): continue converters = rule._converters or {} if query_string and not converters and (query_string not in rule.build([{}], append_unknown=False)[1]): continue values = [{}] # converters with a domain are processed after the other ones convitems = sorted( pycompat.items(converters), key=lambda x: hasattr(x[1], 'domain') and (x[1].domain != '[]')) for (i, (name, converter)) in enumerate(convitems): newval = [] for val in values: query = i == len(convitems)-1 and query_string for value_dict in converter.generate(uid=self.env.uid, query=query, args=val): newval.append(val.copy()) value_dict[name] = value_dict['loc'] del value_dict['loc'] newval[-1].update(value_dict) values = newval for value in values: domain_part, url = rule.build(value, append_unknown=False) page = {'loc': url} for key, val in pycompat.items(value): if key.startswith('__'): page[key[2:]] = val if url in ('/sitemap.xml',): continue if url in url_set: continue url_set.add(url) yield page
def __new__(typ, name, parents, attrs): methods = {k: v for k, v in pycompat.items(attrs) if callable(v)} attrs = {k: v for k, v in pycompat.items(attrs) if not callable(v)} def init(self, **kw): for key, val in pycompat.items(attrs): setattr(self, key, val) for key, val in pycompat.items(kw): assert key in attrs setattr(self, key, val) methods['__init__'] = init methods['__getitem__'] = getattr return type.__new__(typ, name, parents, methods)
def _process_registration_details(self, details): ''' Process data posted from the attendee details form. ''' registrations = {} global_values = {} for key, value in pycompat.items(details): counter, field_name = key.split('-', 1) if counter == '0': global_values[field_name] = value else: registrations.setdefault(counter, dict())[field_name] = value for key, value in pycompat.items(global_values): for registration in pycompat.values(registrations): registration[key] = value return list(pycompat.values(registrations))
def test_render_static_xml_with_t_call(self): view1 = self.env['ir.ui.view'].create({ 'name': "dummy", 'type': 'qweb', 'arch': """ <t t-name="base.dummy"> <cac:fruit xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2"> <cac:table> <cbc:td>Appel</cbc:td> <cbc:td>Pineappel</cbc:td> </cac:table> </cac:fruit> </t> """ }) self.env.cr.execute("INSERT INTO ir_model_data(name, model, res_id, module)" "VALUES ('dummy', 'ir.ui.view', %s, 'base')" % view1.id) # view2 will t-call view1 view2 = self.env['ir.ui.view'].create({ 'name': "dummy2", 'type': 'qweb', 'arch': """ <t t-name="base.dummy2"> <root xmlns="urn:oasis:names:specification:ubl:schema:xsd:Invoice-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2"> <cac:line t-foreach="[1, 2]" t-as="i" t-call="base.dummy"/> </root> </t> """ }) result = view2.render() result_etree = etree.fromstring(result) # check that the root tag has all its xmlns expected_ns = { None: 'urn:oasis:names:specification:ubl:schema:xsd:Invoice-2', 'cac': 'urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2', 'cbc': 'urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2' } self.assertSetEqual(set(pycompat.items(expected_ns)) - set(pycompat.items(result_etree.nsmap)), set()) # check that the t-call did its work cac_lines = result_etree.findall('.//cac:line', namespaces={'cac': 'urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2'}) self.assertEquals(len(cac_lines), 2) self.assertEquals(result.count('Appel'), 2) # check that the t-call dit not output again the xmlns declaration self.assertEquals(result.count('xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2"'), 1)
def _set_pad_value(self, vals): # Update the pad if the `pad_content_field` is modified for k, field in pycompat.items(self._fields): if hasattr(field, 'pad_content_field') and vals.get(field.pad_content_field): company = self.env.user.sudo().company_id myPad = EtherpadLiteClient(company.pad_key, company.pad_server + '/api') path = self[k].split('/p/')[1] myPad.setText(path, (html2plaintext(vals[field.pad_content_field]).encode('utf-8'))) # Update the `pad_content_field` if the pad is modified for k, v in list(pycompat.items(vals)): field = self._fields[k] if hasattr(field, 'pad_content_field'): vals[field.pad_content_field] = self.pad_get_content(v)
def get_groups_by_application(self): """ Return all groups classified by application (module category), as a list:: [(app, kind, groups), ...], where ``app`` and ``groups`` are recordsets, and ``kind`` is either ``'boolean'`` or ``'selection'``. Applications are given in sequence order. If ``kind`` is ``'selection'``, ``groups`` are given in reverse implication order. """ def linearize(app, gs): # determine sequence order: a group appears after its implied groups order = {g: len(g.trans_implied_ids & gs) for g in gs} # check whether order is total, i.e., sequence orders are distinct if len(set(pycompat.values(order))) == len(gs): return (app, 'selection', gs.sorted(key=order.get)) else: return (app, 'boolean', gs) # classify all groups by application by_app, others = defaultdict(self.browse), self.browse() for g in self.get_application_groups([]): if g.category_id: by_app[g.category_id] += g else: others += g # build the result res = [] for app, gs in sorted(pycompat.items(by_app), key=lambda it: it[0].sequence or 0): res.append(linearize(app, gs)) if others: res.append((self.env['ir.module.category'], 'boolean', others)) return res
def _get_line_description(self, order_id, product_id, attributes=None): if not attributes: attributes = {} order = self.sudo().browse(order_id) product_context = dict(self.env.context) product_context.setdefault('lang', order.partner_id.lang) product = self.env['product.product'].with_context(product_context).browse(product_id) name = product.display_name # add untracked attributes in the name untracked_attributes = [] for k, v in pycompat.items(attributes): # attribute should be like 'attribute-48-1' where 48 is the product_id, 1 is the attribute_id and v is the attribute value attribute_value = self.env['product.attribute.value'].sudo().browse(int(v)) if attribute_value and not attribute_value.attribute_id.create_variant: untracked_attributes.append(attribute_value.name) if untracked_attributes: name += '\n%s' % (', '.join(untracked_attributes)) if product.description_sale: name += '\n%s' % (product.description_sale) return name
def test_08_survey_urls(self): def validate_url(url): """ Reference: https://github.com/django/django/blob/master/django/core/validators.py """ url_regex = re.compile( r'^https?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... r'localhost|' # localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4 r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6 r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) return True if url_regex.match(url) else False base_url = self.env['ir.config_parameter'].get_param('web.base.url') urltypes = {'public': 'start', 'print': 'print', 'result': 'results'} for urltype, urltxt in pycompat.items(urltypes): survey_url = getattr(self.survey1, urltype + '_url') survey_url_relative = getattr(self.survey1.with_context({'relative_url': True}), urltype + '_url') self.assertTrue(validate_url(survey_url)) url = "survey/%s/%s" % (urltxt, slug(self.survey1)) full_url = urls.url_join(base_url, url) self.assertEqual(full_url, survey_url) self.assertEqual('/' + url, survey_url_relative) if urltype == 'public': url_html = '<a href="%s">Click here to start survey</a>' self.assertEqual(url_html % full_url, getattr(self.survey1, urltype + '_url_html'), msg="Public URL is incorrect") self.assertEqual(url_html % ('/' + url), getattr(self.survey1.with_context({'relative_url': True}), urltype + '_url_html'), msg="Public URL is incorrect.")
def _parse_import_data(self, data, import_fields, options): # Get fields of type date/datetime all_fields = self.env[self.res_model].fields_get() for name, field in pycompat.items(all_fields): if field['type'] in ('date', 'datetime') and name in import_fields: # Parse date index = import_fields.index(name) dt = datetime.datetime server_format = DEFAULT_SERVER_DATE_FORMAT if field['type'] == 'date' else DEFAULT_SERVER_DATETIME_FORMAT if options.get('%s_format' % field['type'], server_format) != server_format: user_format = ustr(options.get('%s_format' % field['type'])).encode('utf-8') for num, line in enumerate(data): if line[index]: try: line[index] = dt.strftime(dt.strptime(ustr(line[index]).encode('utf-8'), user_format), server_format) except ValueError as e: raise ValueError(_("Column %s contains incorrect values. Error in line %d: %s") % (name, num + 1, e)) except Exception as e: raise ValueError(_("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e)) elif field['type'] in ('float', 'monetary') and name in import_fields: # Parse float, sometimes float values from file have currency symbol or () to denote a negative value # We should be able to manage both case index = import_fields.index(name) self._parse_float_from_data(data, index, name, options) return data
def update_from_db(self, cr): if not len(self): return # update the graph with values from the database (if exist) ## First, we set the default values for each package in graph additional_data = { key: { 'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'installed_version': None } for key in pycompat.keys(self) } ## Then we get the values from the database cr.execute( 'SELECT name, id, state, demo AS dbdemo, latest_version AS installed_version' ' FROM ir_module_module' ' WHERE name IN %s', (tuple(additional_data), )) ## and we update the default values with values from the database additional_data.update((x['name'], x) for x in cr.dictfetchall()) for package in pycompat.values(self): for k, v in pycompat.items(additional_data[package.name]): setattr(package, k, v)
def _create_account_move_line(self, move, credit_account_id, debit_account_id, journal_id): # group quants by cost quant_cost_qty = defaultdict(lambda: 0.0) for quant in self: quant_cost_qty[quant.cost] += quant.qty AccountMove = self.env['account.move'] for cost, qty in pycompat.items(quant_cost_qty): move_lines = move._prepare_account_move_line( qty, cost, credit_account_id, debit_account_id) if move_lines: date = self._context.get('force_period_date', fields.Date.context_today(self)) new_account_move = AccountMove.create({ 'journal_id': journal_id, 'line_ids': move_lines, 'date': date, 'ref': move.picking_id.name }) new_account_move.post()
def _postprocess_args(cls, arguments, rule): """ post process arg to set uid on browse records """ for name, arg in list(pycompat.items(arguments)): if isinstance(arg, models.BaseModel) and arg._uid is UID_PLACEHOLDER: arguments[name] = arg.sudo(request.uid) if not arg.exists(): return cls._handle_exception(werkzeug.exceptions.NotFound())
def import_zipfile(self, module_file, force=False): if not module_file: raise Exception(_("No file sent.")) if not zipfile.is_zipfile(module_file): raise UserError(_('File is not a zip file!')) success = [] errors = dict() module_names = [] with zipfile.ZipFile(module_file, "r") as z: for zf in z.filelist: if zf.file_size > MAX_FILE_SIZE: raise UserError(_("File '%s' exceed maximum allowed file size") % zf.filename) with tempdir() as module_dir: z.extractall(module_dir) dirs = [d for d in os.listdir(module_dir) if os.path.isdir(opj(module_dir, d))] for mod_name in dirs: module_names.append(mod_name) try: # assert mod_name.startswith('theme_') path = opj(module_dir, mod_name) self.import_module(mod_name, path, force=force) success.append(mod_name) except Exception as e: _logger.exception('Error while importing module') errors[mod_name] = exception_to_unicode(e) r = ["Successfully imported module '%s'" % mod for mod in success] for mod, error in pycompat.items(errors): r.append("Error while importing module '%s': %r" % (mod, error)) return '\n'.join(r), module_names
def check(self, mode): """ Check access rights of operation ``mode`` on ``self`` for the current user. Raise an AccessError in case conditions are not met. """ if self.env.user._is_admin(): return # collect translated field records (model_ids) and other translations trans_ids = [] model_ids = defaultdict(list) model_fields = defaultdict(list) for trans in self: if trans.type == 'model': mname, fname = trans.name.split(',') model_ids[mname].append(trans.res_id) model_fields[mname].append(fname) else: trans_ids.append(trans.id) # check for regular access rights on other translations if trans_ids: records = self.browse(trans_ids) records.check_access_rights(mode) records.check_access_rule(mode) # check for read/write access on translated field records fmode = 'read' if mode == 'read' else 'write' for mname, ids in pycompat.items(model_ids): records = self.env[mname].browse(ids) records.check_access_rights(fmode) records.check_field_access_rights(fmode, model_fields[mname]) records.check_access_rule(fmode)
def onchange_template_id_wrapper(self): self.ensure_one() values = self.onchange_template_id(self.template_id.id, self.composition_mode, self.model, self.res_id)['value'] for fname, value in pycompat.items(values): setattr(self, fname, value)
def buckaroo_return(self, **post): """ Buckaroo.""" _logger.info('Buckaroo: entering form_feedback with post data %s', pprint.pformat(post)) # debug request.env['payment.transaction'].sudo().form_feedback(post, 'buckaroo') post = {key.upper(): value for key, value in pycompat.items(post)} return_url = post.get('ADD_RETURNDATA') or '/' return werkzeug.utils.redirect(return_url)
def create(self, vals): create_vals = vals.copy() if 'url' not in create_vals: raise ValueError('URL field required') else: create_vals['url'] = VALIDATE_URL(vals['url']) search_domain = [] for fname, value in pycompat.items(create_vals): search_domain.append((fname, '=', value)) result = self.search(search_domain, limit=1) if result: return result if not create_vals.get('title'): create_vals['title'] = self._get_title_from_url(create_vals['url']) # Prevent the UTMs to be set by the values of UTM cookies for (key, fname, cook) in self.env['utm.mixin'].tracking_fields(): if fname not in create_vals: create_vals[fname] = False link = super(link_tracker, self).create(create_vals) code = self.env['link.tracker.code'].get_random_code_string() self.env['link.tracker.code'].create({'code': code, 'link_id': link.id}) return link
def scheduler_manage_contract_expiration(self): # This method is called by a cron task # It manages the state of a contract, possibly by posting a message on the vehicle concerned and updating its status date_today = fields.Date.from_string(fields.Date.today()) in_fifteen_days = fields.Date.to_string(date_today + relativedelta(days=+15)) nearly_expired_contracts = self.search([('state', '=', 'open'), ('expiration_date', '<', in_fifteen_days)]) res = {} for contract in nearly_expired_contracts: if contract.vehicle_id.id in res: res[contract.vehicle_id.id] += 1 else: res[contract.vehicle_id.id] = 1 Vehicle = self.env['fleet.vehicle'] for vehicle, value in pycompat.items(res): Vehicle.browse(vehicle).message_post(body=_('%s contract(s) will expire soon and should be renewed and/or closed!') % value) nearly_expired_contracts.write({'state': 'diesoon'}) expired_contracts = self.search([('state', '!=', 'expired'), ('expiration_date', '<',fields.Date.today() )]) expired_contracts.write({'state': 'expired'}) futur_contracts = self.search([('state', 'not in', ['futur', 'closed']), ('start_date', '>', fields.Date.today())]) futur_contracts.write({'state': 'futur'}) now_running_contracts = self.search([('state', '=', 'futur'), ('start_date', '<=', fields.Date.today())]) now_running_contracts.write({'state': 'open'})
def __str__(self): params = [] for arg in self.args: params.append(repr(arg)) for item in sorted(pycompat.items(self.kwargs)): params.append("%s=%r" % item) return ', '.join(params)
def transfer_field_to_modifiers(field, modifiers): default_values = {} state_exceptions = {} for attr in ('invisible', 'readonly', 'required'): state_exceptions[attr] = [] default_values[attr] = bool(field.get(attr)) for state, modifs in pycompat.items(field.get("states",{})): for modif in modifs: if default_values[modif[0]] != modif[1]: state_exceptions[modif[0]].append(state) for attr, default_value in pycompat.items(default_values): if state_exceptions[attr]: modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])] else: modifiers[attr] = default_value
def _search_res_name(self, operator, operand): """Returns a domain with ids of workitem whose `operator` matches with the given `operand`""" if not operand: return [] condition_name = [None, operator, operand] self.env.cr.execute(""" SELECT w.id, w.res_id, m.model FROM marketing_campaign_workitem w \ LEFT JOIN marketing_campaign_activity a ON (a.id=w.activity_id)\ LEFT JOIN marketing_campaign c ON (c.id=a.campaign_id)\ LEFT JOIN ir_model m ON (m.id=c.object_id) """) res = self.env.cr.fetchall() workitem_map = {} matching_workitems = [] for id, res_id, model in res: workitem_map.setdefault(model, {}).setdefault(res_id, set()).add(id) for model, id_map in pycompat.items(workitem_map): Model = self.env[model] condition_name[0] = Model._rec_name condition = [('id', 'in', list(id_map)), condition_name] for record in Model.search(condition): matching_workitems.extend(id_map[record.id]) return [('id', 'in', list(set(matching_workitems)))]
def cart_options_update_json(self, product_id, add_qty=1, set_qty=0, goto_shop=None, lang=None, **kw): if lang: request.website = request.website.with_context(lang=lang) order = request.website.sale_get_order(force_create=1) product = request.env['product.product'].browse(int(product_id)) option_ids = product.optional_product_ids.mapped('product_variant_ids').ids optional_product_ids = [] for k, v in pycompat.items(kw): if "optional-product-" in k and int(kw.get(k.replace("product", "add"))) and int(v) in option_ids: optional_product_ids.append(int(v)) attributes = self._filter_attributes(**kw) value = {} if add_qty or set_qty: value = order._cart_update( product_id=int(product_id), add_qty=int(add_qty), set_qty=int(set_qty), attributes=attributes, optional_product_ids=optional_product_ids ) # options have all time the same quantity for option_id in optional_product_ids: order._cart_update( product_id=option_id, set_qty=value.get('quantity'), attributes=attributes, linked_line_id=value.get('line_id') ) return str(order.cart_quantity)
def price_get(self, prod_id, qty, partner=None): """ Multi pricelist, mono product - returns price per pricelist """ return { key: price[0] for key, price in pycompat.items( self.price_rule_get(prod_id, qty, partner=partner)) }
def _get_inventory_lines_values(self): # TDE CLEANME: is sql really necessary ? I don't think so locations = self.env['stock.location'].search([('id', 'child_of', [self.location_id.id])]) domain = ' location_id in %s' args = (tuple(locations.ids),) vals = [] Product = self.env['product.product'] # Empty recordset of products available in stock_quants quant_products = self.env['product.product'] # Empty recordset of products to filter products_to_filter = self.env['product.product'] # case 0: Filter on company if self.company_id: domain += ' AND company_id = %s' args += (self.company_id.id,) #case 1: Filter on One owner only or One product for a specific owner if self.partner_id: domain += ' AND owner_id = %s' args += (self.partner_id.id,) #case 2: Filter on One Lot/Serial Number if self.lot_id: domain += ' AND lot_id = %s' args += (self.lot_id.id,) #case 3: Filter on One product if self.product_id: domain += ' AND product_id = %s' args += (self.product_id.id,) products_to_filter |= self.product_id #case 4: Filter on A Pack if self.package_id: domain += ' AND package_id = %s' args += (self.package_id.id,) #case 5: Filter on One product category + Exahausted Products if self.category_id: categ_products = Product.search([('categ_id', '=', self.category_id.id)]) domain += ' AND product_id = ANY (%s)' args += (categ_products.ids,) products_to_filter |= categ_products self.env.cr.execute("""SELECT product_id, sum(quantity) as product_qty, location_id, lot_id as prod_lot_id, package_id, owner_id as partner_id FROM stock_quant WHERE %s GROUP BY product_id, location_id, lot_id, package_id, partner_id """ % domain, args) for product_data in self.env.cr.dictfetchall(): # replace the None the dictionary by False, because falsy values are tested later on for void_field in [item[0] for item in pycompat.items(product_data) if item[1] is None]: product_data[void_field] = False product_data['theoretical_qty'] = product_data['product_qty'] if product_data['product_id']: product_data['product_uom_id'] = Product.browse(product_data['product_id']).uom_id.id quant_products |= Product.browse(product_data['product_id']) vals.append(product_data) if self.exhausted: exhausted_vals = self._get_exhausted_inventory_line(products_to_filter, quant_products) vals.extend(exhausted_vals) return vals
def _compute_analytic(self, domain=None): lines = {} force_so_lines = self.env.context.get("force_so_lines") if not domain: if not self.ids and not force_so_lines: return True # To filter on analyic lines linked to an expense expense_type_id = self.env.ref('account.data_account_type_expenses', raise_if_not_found=False) expense_type_id = expense_type_id and expense_type_id.id domain = [('so_line', 'in', self.ids), ('amount', '<=', 0.0)] data = self.env['account.analytic.line'].read_group( domain, ['so_line', 'unit_amount', 'product_uom_id'], ['product_uom_id', 'so_line'], lazy=False ) # If the unlinked analytic line was the last one on the SO line, the qty was not updated. if force_so_lines: for line in force_so_lines: lines.setdefault(line, 0.0) for d in data: if not d['product_uom_id']: continue line = self.browse(d['so_line'][0]) lines.setdefault(line, 0.0) uom = self.env['product.uom'].browse(d['product_uom_id'][0]) if line.product_uom.category_id == uom.category_id: qty = uom._compute_quantity(d['unit_amount'], line.product_uom) else: qty = d['unit_amount'] lines[line] += qty for line, qty in pycompat.items(lines): line.qty_delivered = qty return True
def _compute_analytic(self, domain=None): lines = {} force_so_lines = self.env.context.get("force_so_lines") if not domain: if not self.ids and not force_so_lines: return True # To filter on analyic lines linked to an expense expense_type_id = self.env.ref( 'account.data_account_type_expenses', raise_if_not_found=False) expense_type_id = expense_type_id and expense_type_id.id domain = [('so_line', 'in', self.ids), ('amount', '<=', 0.0)] data = self.env['account.analytic.line'].read_group( domain, ['so_line', 'unit_amount', 'product_uom_id'], ['product_uom_id', 'so_line'], lazy=False) # If the unlinked analytic line was the last one on the SO line, the qty was not updated. if force_so_lines: for line in force_so_lines: lines.setdefault(line, 0.0) for d in data: if not d['product_uom_id']: continue line = self.browse(d['so_line'][0]) lines.setdefault(line, 0.0) uom = self.env['product.uom'].browse(d['product_uom_id'][0]) if line.product_uom.category_id == uom.category_id: qty = uom._compute_quantity(d['unit_amount'], line.product_uom) else: qty = d['unit_amount'] lines[line] += qty for line, qty in pycompat.items(lines): line.qty_delivered = qty return True
def _get_classified_fields(self): """ return a dictionary with the fields classified by category:: { 'default': [('default_foo', 'model', 'foo'), ...], 'group': [('group_bar', [browse_group], browse_implied_group), ...], 'module': [('module_baz', browse_module), ...], 'other': ['other_field', ...], } """ IrModule = self.env['ir.module.module'] Groups = self.env['res.groups'] ref = self.env.ref defaults, groups, modules, others = [], [], [], [] for name, field in pycompat.items(self._fields): if name.startswith('default_') and hasattr(field, 'default_model'): defaults.append((name, field.default_model, name[8:])) elif name.startswith('group_') and field.type in ('boolean', 'selection') and \ hasattr(field, 'implied_group'): field_group_xmlids = getattr(field, 'group', 'base.group_user').split(',') field_groups = Groups.concat(*(ref(it) for it in field_group_xmlids)) groups.append((name, field_groups, ref(field.implied_group))) elif name.startswith('module_') and field.type in ('boolean', 'selection'): module = IrModule.sudo().search([('name', '=', name[7:])], limit=1) modules.append((name, module)) else: others.append(name) return {'default': defaults, 'group': groups, 'module': modules, 'other': others}
def _update_values(self, src_partners, dst_partner): """ Update values of dst_partner with the ones from the src_partners. :param src_partners : recordset of source res.partner :param dst_partner : record of destination res.partner """ _logger.debug('_update_values for dst_partner: %s for src_partners: %r', dst_partner.id, src_partners.ids) model_fields = dst_partner._fields def write_serializer(item): if isinstance(item, models.BaseModel): return item.id else: return item # get all fields that are not computed or x2many values = dict() for column, field in pycompat.items(model_fields): if field.type not in ('many2many', 'one2many') and field.compute is None: for item in itertools.chain(src_partners, [dst_partner]): if item[column]: values[column] = write_serializer(item[column]) # remove fields that can not be updated (id and parent_id) values.pop('id', None) parent_id = values.pop('parent_id', None) dst_partner.write(values) # try to update the parent_id if parent_id and parent_id != dst_partner.id: try: dst_partner.write({'parent_id': parent_id}) except ValidationError: _logger.info('Skip recursive partner hierarchies for parent_id %s of partner: %s', parent_id, dst_partner.id)
def _get_line_description(self, order_id, product_id, attributes=None): if not attributes: attributes = {} order = self.sudo().browse(order_id) product_context = dict(self.env.context) product_context.setdefault('lang', order.partner_id.lang) product = self.env['product.product'].with_context( product_context).browse(product_id) name = product.display_name # add untracked attributes in the name untracked_attributes = [] for k, v in pycompat.items(attributes): # attribute should be like 'attribute-48-1' where 48 is the product_id, 1 is the attribute_id and v is the attribute value attribute_value = self.env['product.attribute.value'].sudo( ).browse(int(v)) if attribute_value and not attribute_value.attribute_id.create_variant: untracked_attributes.append(attribute_value.name) if untracked_attributes: name += '\n%s' % (', '.join(untracked_attributes)) if product.description_sale: name += '\n%s' % (product.description_sale) return name
def _postprocess_args(cls, arguments, rule): super(Http, cls)._postprocess_args(arguments, rule) for key, val in pycompat.items(arguments): # Replace uid placeholder by the current request.uid if isinstance(val, models.BaseModel) and isinstance( val._uid, RequestUID): arguments[key] = val.sudo(request.uid) try: _, path = rule.build(arguments) assert path is not None except Exception as e: return cls._handle_exception(e, code=404) if getattr(request, 'website_multilang', False) and request.httprequest.method in ('GET', 'HEAD'): generated_path = werkzeug.url_unquote_plus(path) current_path = werkzeug.url_unquote_plus(request.httprequest.path) if generated_path != current_path: if request.lang != request.website.default_lang_code: path = '/' + request.lang + path if request.httprequest.query_string: path += '?' + request.httprequest.query_string return werkzeug.utils.redirect(path, code=301)
def _test_get_pricelist_available_promocode(self): christmas_pl = self.ref('website_sale.list_christmas') public_pl = self.ref('product.list0') self.args = { 'show': False, 'current_pl': public_pl, } country_list = { False: True, 'BE': True, 'IT': True, 'US': True, 'CA': False } for country, result in pycompat.items(country_list): self.args['country'] = country # mock patch method could not pass env context available = self.website.is_pricelist_available(christmas_pl) if result: self.assertTrue(available, 'AssertTrue failed for %s' % country) else: self.assertFalse(available, 'AssertFalse failed for %s' % country)
def extra_info(self, **post): # Check that this option is activated extra_step = request.env.ref('website_sale.extra_info_option') if not extra_step.active: return request.redirect("/shop/payment") # check that cart is valid order = request.website.sale_get_order() redirection = self.checkout_redirection(order) if redirection: return redirection # if form posted if 'post_values' in post: values = {} for field_name, field_value in pycompat.items(post): if field_name in request.env['sale.order']._fields and field_name.startswith('x_'): values[field_name] = field_value if values: order.write(values) return request.redirect("/shop/payment") values = { 'website_sale_order': order, 'post': post, 'escape': lambda x: x.replace("'", r"\'") } values.update(request.env['sale.order']._get_website_data(order)) return request.render("website_sale.extra_info", values)
def values_postprocess(self, order, mode, values, errors, error_msg): new_values = {} authorized_fields = request.env['ir.model']._get( 'res.partner')._get_form_writable_fields() for k, v in pycompat.items(values): # don't drop empty value, it could be a field to reset if k in authorized_fields and v is not None: new_values[k] = v else: # DEBUG ONLY if k not in ('field_required', 'partner_id', 'callback', 'submitted'): # classic case _logger.debug( "website_sale postprocess: %s value has been dropped (empty or not writable)" % k) new_values['customer'] = True new_values[ 'team_id'] = request.website.salesteam_id and request.website.salesteam_id.id lang = request.lang if request.lang in request.website.mapped( 'language_ids.code') else None if lang: new_values['lang'] = lang if mode == ('edit', 'billing') and order.partner_id.type == 'contact': new_values['type'] = 'other' if mode[1] == 'shipping': new_values['parent_id'] = order.partner_id.commercial_partner_id.id new_values['type'] = 'delivery' return new_values, errors, error_msg
def compute_landed_cost(self): AdjustementLines = self.env['stock.valuation.adjustment.lines'] AdjustementLines.search([('cost_id', 'in', self.ids)]).unlink() digits = dp.get_precision('Product Price')(self._cr) towrite_dict = {} for cost in self.filtered(lambda cost: cost.picking_ids): total_qty = 0.0 total_cost = 0.0 total_weight = 0.0 total_volume = 0.0 total_line = 0.0 all_val_line_values = cost.get_valuation_lines() for val_line_values in all_val_line_values: for cost_line in cost.cost_lines: val_line_values.update({'cost_id': cost.id, 'cost_line_id': cost_line.id}) self.env['stock.valuation.adjustment.lines'].create(val_line_values) total_qty += val_line_values.get('quantity', 0.0) total_cost += val_line_values.get('former_cost', 0.0) total_weight += val_line_values.get('weight', 0.0) total_volume += val_line_values.get('volume', 0.0) total_line += 1 for line in cost.cost_lines: value_split = 0.0 for valuation in cost.valuation_adjustment_lines: value = 0.0 if valuation.cost_line_id and valuation.cost_line_id.id == line.id: if line.split_method == 'by_quantity' and total_qty: per_unit = (line.price_unit / total_qty) value = valuation.quantity * per_unit elif line.split_method == 'by_weight' and total_weight: per_unit = (line.price_unit / total_weight) value = valuation.weight * per_unit elif line.split_method == 'by_volume' and total_volume: per_unit = (line.price_unit / total_volume) value = valuation.volume * per_unit elif line.split_method == 'equal': value = (line.price_unit / total_line) elif line.split_method == 'by_current_cost_price' and total_cost: per_unit = (line.price_unit / total_cost) value = valuation.former_cost * per_unit else: value = (line.price_unit / total_line) if digits: value = tools.float_round(value, precision_digits=digits[1], rounding_method='UP') fnc = min if line.price_unit > 0 else max value = fnc(value, line.price_unit - value_split) value_split += value if valuation.id not in towrite_dict: towrite_dict[valuation.id] = value else: towrite_dict[valuation.id] += value if towrite_dict: for key, value in pycompat.items(towrite_dict): AdjustementLines.browse(key).write({'additional_landed_cost': value}) return True