class CommerceVariant(models.Model): _inherit = "commerce.variant" packaging = Serialized( compute="_compute_packaging", help="Technical field to store packaging for the shop", ) @api.depends("record_id.packaging_ids.qty") def _compute_packaging(self): for rec in self: rec.packaging = rec._get_variant_packaging() def _get_variant_packaging(self): res = [] ctx = self._get_variant_packaging_ctx() rec = self.record_id.with_context(ctx) contained_mapping = rec.packaging_contained_mapping or {} packaging = rec._ordered_packaging() for pkg in packaging: pkg_info = pkg._asdict() pkg_info["contained"] = contained_mapping.get(str(pkg.id)) res.append(pkg_info) return res def _get_variant_packaging_ctx(self): return { "lang": self.lang_id.code, # consider only packaging that can be sold "_packaging_filter": lambda x: x.can_be_sold, # to support multilang shop we rely on packaging type's name # which is already translatable. "_packaging_name_getter": lambda x: x.packaging_type_id.name, }
class ShopinvaderVariant(models.Model): _inherit = "shopinvader.variant" manufactured_for_partners = Serialized( compute="_compute_manufactured_for_partners") def _compute_manufactured_for_partners(self): pids = self.mapped("product_tmpl_id").ids # Use raw sql because accessing the m2m can be very slow query = """ SELECT product_template_id,ARRAY_AGG(res_partner_id) FROM %s WHERE product_template_id in %s GROUP BY product_template_id """ rel_table = (self.env["product.template"]. _fields["manufactured_for_partner_ids"].relation) self.env.cr.execute(query, (AsIs(rel_table), tuple(pids))) mapping = dict(self.env.cr.fetchall()) for record in self: # No value hack to make Algolia search via facets work properly. # We must lookup products that have this field valued or empty. # On algolia you cannot filter by given value and EMPTY value # at the same time. Hence, we set a default value for no value. record.manufactured_for_partners = mapping.get( record.product_tmpl_id.id, ["_NOVALUE_"])
class ShopinvaderProductLinkMixin(models.AbstractModel): _name = "shopinvader.product.link.mixin" _description = "Shopinvader product link mixin" product_links = Serialized( string="Product template links", compute="_compute_product_links" ) def _compute_product_links(self): for record in self: record.product_links = record._get_product_links_by_type( record._get_product_links().filtered(lambda x: x.is_link_active) ) def _get_product_links(self): return self.product_template_link_ids def _get_product_links_by_type(self, links): """Retrieve variants as list of ids by link type. Eg: {"up_selling": [{id: 1}, {id: 2}, {id: 3}]} :return: dict """ self.ensure_one() grouped = defaultdict(self.env["product.template.link"].browse) for link in links.sorted(lambda x: x.id): code = self._product_link_code(link) grouped[code] |= link res = defaultdict(list) for code, links in grouped.items(): for link in links: data = self._get_product_link_data(link) if data: res[code].append(data) return res def _product_link_code(self, link): """Normalize link code, default to `generic` when missing.""" return slugify(link.type_id.code or "generic").replace("-", "_") def _get_product_link_data(self, link): target = self._product_link_target(link) if not target: return {} variant = self._product_link_target_variant(target) if variant: return {"id": variant.record_id.id} return {} def _product_link_target(self, link): """Retrieve the target of the link.""" raise NotImplementedError() def _product_link_target_variant(self, target): """Retrieve variant ids for given target product :return: set """ raise NotImplementedError()
class ShopinvaderVariant(models.Model): _inherit = "shopinvader.variant" stock_data = Serialized(compute="_compute_stock_data") def _get_stock_export_key(self): self.ensure_one() line = self.env["ir.exports.line"].search([ ("export_id", "=", self.index_id.exporter_id.id), ("name", "=", "stock_data"), ]) if line.target: return line.target.split(":")[1] else: return line.name def _prepare_stock_data(self): stock_field = self.backend_id.product_stock_field_id.name return {"qty": self[stock_field]} def _compute_stock_data(self): result = defaultdict(dict) for backend in self.mapped("backend_id"): loc_records = self.filtered(lambda s: s.backend_id == backend) for ( wh_key, wh_ids, ) in backend._get_warehouse_list_for_export().items(): for loc_record in loc_records.with_context(warehouse=wh_ids): result[loc_record. id][wh_key] = loc_record._prepare_stock_data() for record in self: record.stock_data = result[record.id]
class ShopinvaderVariant(models.Model): _inherit = "shopinvader.variant" packaging = Serialized( compute="_compute_packaging", help="Technical field to store packaging for the shop", store=True, ) @api.depends(lambda self: self._compute_packaging_depends()) def _compute_packaging(self): for rec in self: rec.packaging = rec._get_variant_packaging() def _compute_packaging_depends(self): return ( "lang_id", "record_id.sell_only_by_packaging", "record_id.packaging_ids.qty", "record_id.packaging_ids.can_be_sold", "record_id.packaging_ids.shopinvader_display", "record_id.packaging_ids.barcode", "record_id.packaging_ids.packaging_type_id.name", ) def _get_variant_packaging(self): res = [] ctx = self._get_variant_packaging_ctx(self.backend_id) rec = self.record_id.with_context(ctx) contained_mapping = rec.packaging_contained_mapping or {} packaging = rec._ordered_packaging() can_be_sold_info = { x["id"]: x["can_be_sold"] for x in self.packaging_ids.read(["can_be_sold"]) } for pkg in packaging: pkg_info = self._prepare_qty_by_packaging_values(pkg, pkg.qty) pkg_info["contained"] = contained_mapping.get(str(pkg.id)) pkg_info["can_be_sold"] = can_be_sold_info.get(pkg.id, False) if pkg.is_unit: pkg_info["can_be_sold"] = not self.sell_only_by_packaging res.append(pkg_info) return res def _get_variant_packaging_ctx(self, backend): return { "lang": self.lang_id.code, # consider only packaging that can be displayed "_packaging_filter": lambda x: x.shopinvader_display, "_packaging_values_handler": self._prepare_qty_by_packaging_values, } def _prepare_qty_by_packaging_values(self, packaging, qty_per_pkg): return { "id": packaging.id, "qty": qty_per_pkg, "name": packaging.name, "is_unit": packaging.is_unit, "barcode": packaging.barcode, }
class ShopinvaderBrand(models.Model): _inherit = "shopinvader.brand" serialized_tag_ids = Serialized(compute="_compute_serialized_tag_ids") @api.depends("tag_ids") def _compute_serialized_tag_ids(self): for rec in self: rec.serialized_tag_ids = rec.tag_ids.ids
class StockMoveLine(models.Model): _inherit = "stock.move.line" tray_source_matrix = Serialized(string="Source Cell", compute="_compute_tray_matrix") tray_dest_matrix = Serialized(string="Destination Cell", compute="_compute_tray_matrix") @api.depends("location_id", "location_dest_id") def _compute_tray_matrix(self): for record in self: record.tray_source_matrix = record.location_id.tray_matrix record.tray_dest_matrix = record.location_dest_id.tray_matrix def _action_show_tray(self, location_from): assert location_from in ("source", "dest") self.ensure_one() view = self.env.ref("stock_location_tray.view_stock_move_line_tray") context = self.env.context.copy() if location_from == "source": name = _("Source Tray") context["show_source_tray"] = True else: name = _("Destination Tray") context["show_dest_tray"] = True return { "name": name, "type": "ir.actions.act_window", "view_type": "form", "view_mode": "form", "res_model": "stock.move.line", "views": [(view.id, "form")], "view_id": view.id, "target": "new", "res_id": self.id, "context": context, } def action_show_source_tray(self): return self._action_show_tray("source") def action_show_dest_tray(self): return self._action_show_tray("dest")
class AccountInvoiceLine(models.Model): _inherit = "account.invoice.line" facturae_fields = Serialized() receiver_contract_reference = fields.Char(sparse="facturae_fields") receiver_contract_date = fields.Date(sparse="facturae_fields") receiver_transaction_reference = fields.Char(sparse="facturae_fields") receiver_transaction_date = fields.Date(sparse="facturae_fields") issuer_contract_reference = fields.Char(sparse="facturae_fields") issuer_contract_date = fields.Date(sparse="facturae_fields") issuer_transaction_reference = fields.Char(sparse="facturae_fields") issuer_transaction_date = fields.Date(sparse="facturae_fields") file_reference = fields.Char(sparse="facturae_fields") file_date = fields.Date(sparse="facturae_fields")
class GithubEvent(models.Model): _name = "github.event" _description = "Github Event" _order = 'id desc' payload = fields.Text() payload_serialized = Serialized( compute='_compute_payload_serialized' ) @api.depends('payload') def _compute_payload_serialized(self): events_with_payloads = self.filtered(lambda e: e.payload) for event in events_with_payloads: event.payload_serialized = json.loads(event.payload) action = fields.Char() def _get_value_from_payload(self, path): """Get a value from the payload. :param path: a doted notation of the path to access the value. :return: the value contained at the given path. """ section = self.payload_serialized keys = path.split('.') for key in keys[:-1]: if not isinstance(section, dict) or key not in section: raise ValidationError(_( "The payload does not contain a value at the path {}." ).format(path)) section = section[key] return section[keys[-1]] def process(self): """Process a github event. This method is intended to be inherited by other modules to add extra behavior when processing a github event. """ self.action = self._get_value_from_payload('action')
class SeIndexConfig(models.Model): _name = "se.index.config" _description = "Elasticsearch index configuration" name = fields.Char(required=True) body = Serialized(required=True, default={}) # This field is used since no widget exists to edit a serialized field # into the web fontend body_str = fields.Text( compute="_compute_body_str", inverse="_inverse_body_str", default="{}" ) @api.depends("body") def _compute_body_str(self): for rec in self: rec.body_str = json.dumps(rec.body) def _inverse_body_str(self): for rec in self: rec.body = json.loads(rec.body_str or "{}")
class ProductImageImportWizard(models.Model): _name = "storage.import.product_image" _description = "Handle import of storage product images" @api.model def _default_csv_header(self): product_identifier = self._get_product_identifier_field() flds = [product_identifier, "tag", "path"] return ",".join(flds) storage_backend_id = fields.Many2one("storage.backend", "Storage Backend", required=True) product_model = fields.Selection( [ ("product.template", "Product template"), ("product.product", "Product variants"), ], string="Product Model", required=True, ) source_type = fields.Selection( [ ("url", "URL"), ("zip_file", "Zip file"), ("external_storage", "External storage"), ], string="Source type", required=True, default="url", ) filename = fields.Char() filename_zip = fields.Char() file_csv = fields.Binary(string="CSV file", required=True) csv_delimiter = fields.Char( string="CSV file delimiter", default=",", required=True, ) csv_column_default_code = fields.Char( string="Product Reference column", help="The CSV File column name that holds the product reference.", default="default_code", required=True, ) csv_column_tag_name = fields.Char( string="Image Tag Name column", help="The CSV File column name that holds the image tag name.", default="tag", required=True, ) csv_column_file_path = fields.Char( string="Image file path column", help="The CSV File column name that holds the image file path or url.", default="path", required=True, ) csv_delimiter = fields.Char(string="CSV file delimiter", default=",", required=True) source_zipfile = fields.Binary("ZIP with images", required=False) source_storage_backend_id = fields.Many2one("storage.backend", "Storage Backend with images") external_csv_path = fields.Char( string="Path to CSV file", help="Relative path of the CSV file located in the external storage", ) options = Serialized(readonly=True) overwrite = fields.Boolean("Overwrite image with same name", sparse="options", default=False) create_missing_tags = fields.Boolean(sparse="options", default=False) chunk_size = fields.Integer( sparse="options", default=10, help="How many lines will be handled in each job.", ) report = Serialized(readonly=True) report_html = fields.Html(readonly=True, compute="_compute_report_html") state = fields.Selection( [("new", "New"), ("scheduled", "Scheduled"), ("done", "Done")], string="Import state", default="new", ) done_on = fields.Datetime() @api.depends("report") def _compute_report_html(self): # TODO: add tests tmpl = self.env.ref("storage_import_image_advanced.report_html") for record in self: if not record.report: record.report_html = "" continue report_html = tmpl._render({"record": record}) record.report_html = report_html @api.model def _get_base64(self, file_path): res = {} binary = None mimetype = None binary = getattr(self, "_read_from_" + self.source_type)(file_path) if binary: mimetype = magic.from_buffer(binary, mime=True) res = {"mimetype": mimetype, "b64": base64.encodestring(binary)} return res def _read_from_url(self, file_path): if validators.url(file_path): return urlopen(file_path).read() return None def _read_from_zip_file(self, file_path): if not self.source_zipfile: raise exceptions.UserError(_("No zip file provided!")) file_content = base64.b64decode(self.source_zipfile) with closing(io.BytesIO(file_content)) as zip_file: with ZipFile(zip_file, "r") as z: try: return z.read(file_path) except KeyError: # File missing return None def _read_from_external_storage(self, file_path): if not self.source_storage_backend_id: raise exceptions.UserError(_("No storage backend provided!")) return self.source_storage_backend_id._get_bin_data(file_path) def _read_csv(self): if self.file_csv: return base64.b64decode(self.file_csv) elif self.external_csv_path: return self.source_storage_backend_id._get_bin_data( self.external_csv_path) def _get_lines(self): lines = [] product_identifier_field = self._get_product_identifier_field() mapping = { product_identifier_field: self.csv_column_default_code, "tag_name": self.csv_column_tag_name, "file_path": self.csv_column_file_path, } with closing(io.BytesIO(self._read_csv())) as binary_file: csv_file = (line.decode("utf8") for line in binary_file) reader = csv.DictReader(csv_file, delimiter=self.csv_delimiter) csv.field_size_limit(sys.maxsize) for row in reader: try: line = { key: row[column] for key, column in mapping.items() } except KeyError as e: _logger.error(e) raise exceptions.UserError(_("CSV Schema Incompatible")) lines.append(line) return lines def _get_options(self): return self.options or {} def action_import(self): self.report = self.report_html = False self.state = "scheduled" # Generate N chunks to split in several jobs. chunks = gen_chunks(self._get_lines(), chunksize=self._get_options().get("chunk_size")) for i, (chunk, is_last_chunk) in enumerate(chunks, 1): self.with_delay().do_import(lines=chunk, last_chunk=is_last_chunk) _logger.info( "Generated job for chunk nr %d. Is last: %s.", i, "yes" if is_last_chunk else "no", ) def do_import(self, lines=None, last_chunk=False): lines = lines or self._get_lines() report = self._do_import(lines, self.product_model, options=self._get_options()) # Refresh report extendable_keys = [ "created", "file_not_found", "missing", "missing_tags", ] prev_report = self.report or {} for k, v in report.items(): if k in extendable_keys and prev_report.get(k): report[k] = sorted(set(prev_report[k] + v)) # Lock as writing can come from several jobs sql = "SELECT id FROM %s WHERE ID IN %%s FOR UPDATE" % self._table self.env.cr.execute(sql, (tuple(self.ids), ), log_exceptions=False) self.write({ "report": report, "state": "done" if last_chunk else self.state, "done_on": fields.Datetime.now() if last_chunk else False, }) return report def _get_product_identifier_field(self): """Override if you want to use another field as product identifier""" return "default_code" def _do_import(self, lines, product_model, options=None): tag_obj = self.env["image.tag"] image_obj = self.env["storage.image"] relation_obj = self.env["product.image.relation"] prod_tmpl_attr_value_obj = self.env["product.template.attribute.value"] product_identifier_field = self._get_product_identifier_field() report = { "created": set(), "file_not_found": set(), "missing": [], "missing_tags": [], } options = options or {} # do all query at once lines_by_code = {x[product_identifier_field]: x for x in lines} all_codes = list(lines_by_code.keys()) _fields = [product_identifier_field, "product_tmpl_id"] if product_model == "product.template": # exclude template id _fields = _fields[:1] else: _fields.append("product_template_attribute_value_ids") product_identifier_field = self._get_product_identifier_field() products = self.env[product_model].search_read( [(product_identifier_field, "in", all_codes)], _fields) existing_by_code = {x[product_identifier_field]: x for x in products} report["missing"] = sorted( [code for code in all_codes if not existing_by_code.get(code)]) all_tags = [x["tag_name"] for x in lines if x["tag_name"]] tags = tag_obj.search_read([("name", "in", all_tags)], ["name"]) tag_by_name = {x["name"]: x["id"] for x in tags} missing_tags = set(all_tags).difference(set(tag_by_name.keys())) if missing_tags: if options.get("create_missing_tags"): for tag_name in missing_tags: tag_by_name[tag_name] = tag_obj.create({ "name": tag_name }).id else: report["missing_tags"] = sorted(missing_tags) for prod in products: line = lines_by_code[prod[product_identifier_field]] file_path = line["file_path"] file_vals = self._prepare_file_values(file_path) if not file_vals: report["file_not_found"].add(prod[product_identifier_field]) continue file_vals.update({ "name": file_vals["name"], "alt_name": file_vals["name"] }) # storage_file = file_obj.create(file_vals) tag_id = tag_by_name.get(line["tag_name"]) if product_model == "product.template": tmpl_id = prod["id"] elif product_model == "product.product": # TODO: test product.product import tmpl_id = prod["product_tmpl_id"][0] image = image_obj.create(file_vals) if options.get("overwrite"): domain = [ ("image_id.name", "=", image.name), ("tag_id", "=", tag_id), ("product_tmpl_id", "=", tmpl_id), ] relation_obj.search(domain).unlink() img_relation_values = { "image_id": image.id, "tag_id": tag_id, "product_tmpl_id": tmpl_id, } # Assign specific product attribute values if (product_model == "product.product" and prod["product_template_attribute_value_ids"]): attr_values = prod_tmpl_attr_value_obj.browse( prod["product_template_attribute_value_ids"]) img_relation_values["attribute_value_ids"] = [( 6, 0, attr_values.mapped("product_attribute_value_id").ids, )] relation_obj.create(img_relation_values) image._compute_main_thumbs() report["created"].add(prod[product_identifier_field]) report["created"] = sorted(report["created"]) report["file_not_found"] = sorted(report["file_not_found"]) return report def _prepare_file_values(self, file_path, filetype="image"): name = os.path.basename(file_path) file_data = self._get_base64(file_path) if not file_data: return {} vals = { "data": file_data["b64"], "name": name, "file_type": filetype, "mimetype": file_data["mimetype"], "backend_id": self.storage_backend_id.id, } return vals @api.model def _cron_cleanup_obsolete(self, days=7): from_date = fields.Datetime.now().replace(hour=23, minute=59, second=59) limit_date = date_utils.subtract(from_date, days) records = self.search([("state", "=", "done"), ("done_on", "<=", limit_date)]) records.unlink() _logger.info("Cleanup obsolete images import. %d records found.", len(records)) def _report_label_for(self, key): labels = { "created": _("Created"), "file_not_found": _("Image file not found"), "missing": _("Product not found"), "missing_tags": _("Tags not found"), } return labels.get(key, key)
class Product(models.Model): _inherit = "product.product" packaging_contained_mapping = Serialized( compute="_compute_packaging_contained_mapping", help="Technical field to store contained packaging. ", ) @api.depends("packaging_ids.qty") def _compute_packaging_contained_mapping(self): for rec in self: rec.packaging_contained_mapping = rec._packaging_contained_mapping( ) def _packaging_contained_mapping(self): """Produce a mapping of packaging and contained packagings. Used mainly for `product_qty_by_packaging` but can be used to display info as you prefer. :returns: a dictionary in the form {pkg.id: [contained packages]} """ res = {} packaging = self._ordered_packaging() for i, pkg in enumerate(packaging): if pkg.is_unit: # skip minimal unit continue res[pkg.id] = self._product_qty_by_packaging( packaging[i + 1:], pkg.qty) return res def product_qty_by_packaging(self, prod_qty, with_contained=False): """Calculate quantity by packaging. The minimal quantity is always represented by the UoM of the product. Limitation: fractional quantities are lost. :prod_qty: total qty to satisfy. :with_contained: include calculation of contained packagings. eg: 1 pallet contains 4 big boxes and 6 little boxes. :returns: list of dict in the form [{id: 1, qty: qty_per_package, name: package_name}] If `with_contained` is passed, each element will include the quantity of smaller packaging, like: {contained: [{id: 1, qty: 4, name: "Big box"}]} """ self.ensure_one() return self._product_qty_by_packaging( self._ordered_packaging(), prod_qty, with_contained=with_contained, ) def _ordered_packaging(self): """Prepare packaging ordered by qty and exclude empty ones. Use ctx key `_packaging_filter` to pass a function to filter packaging to be considered. Use ctx key `_packaging_name_getter` to pass a function to change the display name of the packaging. """ custom_filter = self.env.context.get("_packaging_filter", lambda x: x) name_getter = self.env.context.get("_packaging_name_getter", lambda x: x.name) packagings = sorted( [ Packaging(x.id, name_getter(x), x.qty, False) for x in self.packaging_ids.filtered(custom_filter) # Exclude the ones w/ zero qty as they are useless for the math if x.qty ], reverse=True, key=lambda x: x.qty, ) # Add minimal unit packagings.append( # NOTE: the ID here could clash w/ one of the packaging's. # If you create a mapping based on IDs, keep this in mind. # You can use `is_unit` to check this. Packaging(self.uom_id.id, self.uom_id.name, self.uom_id.factor, True)) return packagings def _product_qty_by_packaging(self, pkg_by_qty, qty, with_contained=False): """Produce a list of dictionaries of packaging info.""" # TODO: refactor to handle fractional quantities (eg: 0.5 Kg) res = [] prepare_values = self.env.context.get( "_packaging_values_handler", self._prepare_qty_by_packaging_values) for pkg in pkg_by_qty: qty_per_pkg, qty = self._qty_by_pkg(pkg.qty, qty) if qty_per_pkg: value = prepare_values(pkg, qty_per_pkg) if with_contained: contained = None if not pkg.is_unit: mapping = self.packaging_contained_mapping # integer keys are serialized as strings :/ contained = mapping.get(str(pkg.id)) value["contained"] = contained res.append(value) if not qty: break return res def _qty_by_pkg(self, pkg_qty, qty): """Calculate qty needed for given package qty.""" qty_per_pkg = 0 while (float_compare(qty - pkg_qty, 0.0, precision_digits=self.uom_id.rounding) >= 0.0): qty -= pkg_qty qty_per_pkg += 1 return qty_per_pkg, qty def _prepare_qty_by_packaging_values(self, packaging, qty_per_pkg): return { "id": packaging.id, "qty": qty_per_pkg, "name": packaging.name, "is_unit": packaging.is_unit, }
class StockLocation(models.Model): _inherit = "stock.location" tray_type_id = fields.Many2one( comodel_name="stock.location.tray.type", ondelete="restrict" ) cell_in_tray_type_id = fields.Many2one( string="Cell Tray Type", related="location_id.tray_type_id", readonly=True ) tray_cell_contains_stock = fields.Boolean( compute="_compute_tray_cell_contains_stock", help="Used to know if a cell of a Tray location is empty.", ) tray_matrix = Serialized(string="Cells", compute="_compute_tray_matrix") cell_name_format = fields.Char( string="Name Format for Cells", default=lambda self: self._default_cell_name_format(), help="Cells sub-locations generated in a tray will be named" " after this format. Replacement fields between curly braces are used" " to inject positions. {x}, {y}, and {z} will be replaced by their" " corresponding position. Complex formatting (such as padding, ...)" " can be done using the format specification at " " https://docs.python.org/3/library/string.html#formatstrings", ) def _default_cell_name_format(self): return "x{x:0>2}y{y:0>2}" @api.depends("quant_ids.quantity") def _compute_tray_cell_contains_stock(self): for location in self: if not location.cell_in_tray_type_id: # Not a tray cell so the value is irrelevant, # best to skip them for performance. location.tray_cell_contains_stock = False continue quants = location.quant_ids.filtered(lambda r: r.quantity > 0) location.tray_cell_contains_stock = bool(quants) @api.depends("quant_ids.quantity", "tray_type_id", "location_id.tray_type_id") def _compute_tray_matrix(self): for location in self: if not (location.tray_type_id or location.cell_in_tray_type_id): location.tray_matrix = {} continue location.tray_matrix = location._tray_matrix_for_widget() def _tray_matrix_for_widget(self): selected = self._tray_cell_coords() cells = self._tray_cell_matrix() return { # x, y: position of the selected cell "selected": selected, # 0 is empty, 1 is not "cells": cells, } def action_tray_matrix_click(self, coordX, coordY): self.ensure_one() if self.cell_in_tray_type_id: tray = self.location_id else: tray = self location = self.search( [ ("id", "child_of", tray.ids), # we receive positions counting from 0 but they are stored # in the "human" format starting from 1 ("posx", "=", coordX + 1), ("posy", "=", coordY + 1), ] ) location.ensure_one() view = self.env.ref("stock.view_location_form") action = self.env.ref("stock.action_location_form").read()[0] action.update( { "res_id": location.id, "view_mode": "form", "view_type": "form", "view_id": view.id, "views": [(view.id, "form")], } ) return action @api.model_create_multi def create(self, vals_list): records = super().create(vals_list) records._update_tray_sublocations() return records def _check_before_add_tray_type(self): if not self.tray_type_id and self.child_ids: raise exceptions.UserError( _("Location %s has sub-locations, it cannot be converted to a tray.") % (self.display_name) ) def write(self, vals): for location in self: trays_to_update = False if "tray_type_id" in vals: location._check_before_add_tray_type() new_tray_type_id = vals.get("tray_type_id") trays_to_update = location.tray_type_id.id != new_tray_type_id # short-circuit this check if we already know that we have to # update trays if not trays_to_update and "cell_name_format" in vals: new_format = vals.get("cell_name_format") trays_to_update = location.cell_name_format != new_format super(StockLocation, location).write(vals) if trays_to_update: self._update_tray_sublocations() elif "posz" in vals and location.tray_type_id: # On initial generation (when tray_to_update is true), # the sublocations are already generated with the pos z. location.child_ids.write({"posz": vals["posz"]}) return True def tray_cell_center_position(self): """Return the center position in mm of a cell The returned position is a tuple with the number of millimeters from the bottom-left corner. Tuple: (left, bottom) """ if not self.cell_in_tray_type_id: return 0, 0 posx = self.posx posy = self.posy cell_width = self.cell_in_tray_type_id.width_per_cell cell_depth = self.cell_in_tray_type_id.depth_per_cell # posx and posy start at one, we want to count from 0 from_left = (posx - 1) * cell_width + (cell_width / 2) from_bottom = (posy - 1) * cell_depth + (cell_depth / 2) return from_left, from_bottom def _tray_cell_coords(self): if not self.cell_in_tray_type_id: return [] return [self.posx - 1, self.posy - 1] def _tray_cell_matrix(self): assert self.tray_type_id or self.cell_in_tray_type_id if self.tray_type_id: location = self else: # cell location = self.location_id cells = location.tray_type_id._generate_cells_matrix() for cell in location.child_ids: if cell.tray_cell_contains_stock: # 1 means used cells[cell.posy - 1][cell.posx - 1] = 1 return cells def _format_tray_sublocation_name(self, x, y, z): template = self.cell_name_format or self._default_cell_name_format() # using format_map allows to have missing replacement strings return template.format_map(defaultdict(str, x=x, y=y, z=z)) def _update_tray_sublocations(self): values = [] for location in self: tray_type = location.tray_type_id location.child_ids.write({"active": False}) if not tray_type: continue # create accepts several records now posz = location.posz or 0 for row in range(1, tray_type.rows + 1): for col in range(1, tray_type.cols + 1): cell_name = location._format_tray_sublocation_name(col, row, posz) subloc_values = { "name": cell_name, "posx": col, "posy": row, "posz": posz, "location_id": location.id, "company_id": location.company_id.id, } values.append(subloc_values) if values: self.create(values) def _create_tray_xmlids(self, module): """Create external IDs for generated cells If the tray location has one. Used for the demo/test data. It will not handle properly changing the tray format as the former cells will keep the original xmlid built on x and y, the new ones will not be able to use them. As these xmlids are meant for the demo data and the tests, it is not a problem and should not be used for other purposes. Called from stock_location_tray/demo/stock_location_demo.xml. """ xmlids_to_create = [] def has_ref(xmlid): ModelData = self.env["ir.model.data"] __, res_id = ModelData.xmlid_to_res_model_res_id(xmlid) return bool(res_id) for location in self: if not location.cell_in_tray_type_id: continue tray = location.location_id tray_external_id = tray.get_external_id().get(tray.id) if not tray_external_id: continue # This will never happen as both name and module are required # in ir.model.data # if "." not in tray_external_id: # continue namespace, tray_name = tray_external_id.split(".") if module != namespace: continue tray_external = self.env["ir.model.data"].browse( self.env["ir.model.data"]._get_id(module, tray_name) ) cell_external_id = "{}_x{}y{}".format( tray_name, location.posx, location.posy ) cell_xmlid = "{}.{}".format(module, cell_external_id) if not has_ref(cell_xmlid): xmlids_to_create.append( { "name": cell_external_id, "module": module, "model": self._name, "res_id": location.id, "noupdate": tray_external.noupdate, } ) self.env["ir.model.data"].create(xmlids_to_create)
class ServerConfiguration(models.TransientModel): """Display server configuration.""" _name = "server.config" _description = "Display server configuration" _conf_defaults = _Defaults() config = Serialized() @classmethod def _build_model(cls, pool, cr): """Add columns to model dynamically and init some properties """ ModelClass = super(ServerConfiguration, cls)._build_model(pool, cr) ModelClass._add_columns() ModelClass.running_env = system_base_config["running_env"] # Only show passwords in development ModelClass.show_passwords = ModelClass.running_env in ("dev", ) ModelClass._arch = None ModelClass._build_osv() return ModelClass @classmethod def _format_key(cls, section, key): return "{}_I_{}".format(section, key) @classmethod def _format_key_display_name(cls, key_name): return key_name.replace("_I_", " | ") @classmethod def _add_columns(cls): """Add columns to model dynamically""" cols = chain( list(cls._get_base_cols().items()), list(cls._get_env_cols().items()), list(cls._get_system_cols().items()), ) for col, value in cols: col_name = col.replace(".", "_") setattr( ServerConfiguration, col_name, fields.Char( string=cls._format_key_display_name(col_name), sparse="config", readonly=True, ), ) cls._conf_defaults[col_name] = value @classmethod def _get_base_cols(cls): """ Compute base fields""" res = {} for col, item in list(system_base_config.options.items()): key = cls._format_key("odoo", col) res[key] = item return res @classmethod def _get_env_cols(cls, sections=None): """ Compute base fields""" res = {} sections = sections if sections else serv_config.sections() for section in sections: for col, item in serv_config.items(section): key = cls._format_key(section, col) res[key] = item return res @classmethod def _get_system_cols(cls): """ Compute system fields""" res = {} for col, item in get_server_environment(): key = cls._format_key("system", col) res[key] = item return res @classmethod def _group(cls, items): """Return an XML chunk which represents a group of fields.""" names = [] for key in sorted(items): names.append(key.replace(".", "_")) return ('<group col="2" colspan="4">' + "".join([ '<field name="%s" readonly="1"/>' % _escape(name) for name in names ]) + "</group>") @classmethod def _build_osv(cls): """Build the view for the current configuration.""" arch = '<form string="Configuration Form">' '<notebook colspan="4">' # Odoo server configuration rcfile = system_base_config.rcfile items = cls._get_base_cols() arch += '<page string="Odoo">' arch += '<separator string="%s" colspan="4"/>' % _escape(rcfile) arch += cls._group(items) arch += '<separator colspan="4"/></page>' arch += '<page string="Environment based configurations">' for section in sorted(serv_config.sections()): items = cls._get_env_cols(sections=[section]) arch += '<separator string="[%s]" colspan="4"/>' % _escape(section) arch += cls._group(items) arch += '<separator colspan="4"/></page>' # System information arch += '<page string="System">' arch += '<separator string="Server Environment" colspan="4"/>' arch += cls._group(cls._get_system_cols()) arch += '<separator colspan="4"/></page>' arch += "</notebook></form>" cls._arch = etree.fromstring(arch) @api.model def fields_view_get(self, view_id=None, view_type="form", toolbar=False, submenu=False): """Overwrite the default method to render the custom view.""" res = super().fields_view_get(view_id, view_type, toolbar) View = self.env["ir.ui.view"] if view_type == "form": arch_node = self._arch xarch, xfields = View.postprocess_and_fields( self._name, arch_node, view_id) res["arch"] = xarch res["fields"] = xfields return res @api.model def _is_secret(self, key): """ This method is intended to be inherited to defined which keywords should be secret. :return: list of secret keywords """ secret_keys = ["passw", "key", "secret", "token"] return any(secret_key in key for secret_key in secret_keys) @api.model def default_get(self, fields_list): res = super().default_get(fields_list) if not self.env.user.has_group( "server_environment.has_server_configuration_access"): return res for key in self._conf_defaults: if not self.show_passwords and self._is_secret(key=key): res[key] = "**********" else: res[key] = self._conf_defaults[key]() return res
class ServerConfiguration(models.TransientModel): """Display server configuration.""" _name = 'server.config' _description = 'Display server configuration' _conf_defaults = _Defaults() config = Serialized() @classmethod def _build_model(cls, pool, cr): """Add columns to model dynamically and init some properties """ ModelClass = super(ServerConfiguration, cls)._build_model(pool, cr) ModelClass._add_columns() ModelClass._arch = None ModelClass._build_osv() return ModelClass @classmethod def _format_key(cls, section, key): return '%s_I_%s' % (section, key) @property def show_passwords(self): return system_base_config["running_env"] in ("dev", ) @classmethod def _format_key_display_name(cls, key_name): return key_name.replace('_I_', ' | ') @classmethod def _add_columns(cls): """Add columns to model dynamically""" cols = chain(list(cls._get_base_cols().items()), list(cls._get_env_cols().items()), list(cls._get_system_cols().items())) for col, value in cols: col_name = col.replace('.', '_') setattr( ServerConfiguration, col_name, fields.Char(string=cls._format_key_display_name(col_name), sparse='config', readonly=True)) cls._conf_defaults[col_name] = value @classmethod def _get_base_cols(cls): """ Compute base fields""" res = {} for col, item in list(system_base_config.options.items()): key = cls._format_key('odoo', col) res[key] = item return res @classmethod def _get_env_cols(cls, sections=None): """ Compute base fields""" res = {} sections = sections if sections else serv_config.sections() for section in sections: for col, item in serv_config.items(section): key = cls._format_key(section, col) res[key] = item return res @classmethod def _get_system_cols(cls): """ Compute system fields""" res = {} for col, item in get_server_environment(): key = cls._format_key('system', col) res[key] = item return res @classmethod def _group(cls, items): """Return an XML chunk which represents a group of fields.""" names = [] for key in sorted(items): names.append(key.replace('.', '_')) return ('<group col="2" colspan="4">' + ''.join([ '<field name="%s" readonly="1"/>' % _escape(name) for name in names ]) + '</group>') @classmethod def _build_osv(cls): """Build the view for the current configuration.""" arch = ('<form string="Configuration Form">' '<notebook colspan="4">') # Odoo server configuration rcfile = system_base_config.rcfile items = cls._get_base_cols() arch += '<page string="Odoo">' arch += '<separator string="%s" colspan="4"/>' % _escape(rcfile) arch += cls._group(items) arch += '<separator colspan="4"/></page>' arch += '<page string="Environment based configurations">' for section in sorted(serv_config.sections()): items = cls._get_env_cols(sections=[section]) arch += '<separator string="[%s]" colspan="4"/>' % _escape(section) arch += cls._group(items) arch += '<separator colspan="4"/></page>' # System information arch += '<page string="System">' arch += '<separator string="Server Environment" colspan="4"/>' arch += cls._group(cls._get_system_cols()) arch += '<separator colspan="4"/></page>' arch += '</notebook></form>' cls._arch = etree.fromstring(arch) @api.model def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False): """Overwrite the default method to render the custom view.""" res = super(ServerConfiguration, self).fields_view_get(view_id, view_type, toolbar) View = self.env['ir.ui.view'] if view_type == 'form': arch_node = self._arch xarch, xfields = View.postprocess_and_fields( self._name, arch_node, view_id) res['arch'] = xarch res['fields'] = xfields return res @api.model def _is_secret(self, key): """ This method is intended to be inherited to defined which keywords should be secret. :return: list of secret keywords """ secret_keys = ['passw', 'key', 'secret', 'token'] return any(secret_key in key for secret_key in secret_keys) @api.model def default_get(self, fields_list): res = {} if not self.env.user.has_group( 'server_environment.has_server_configuration_access'): return res for key in self._conf_defaults: if not self.show_passwords and self._is_secret(key=key): res[key] = '**********' else: res[key] = self._conf_defaults[key]() return res
class VerticalLiftOperationTransfer(models.AbstractModel): """Base model for shuttle pick and put operations""" _name = "vertical.lift.operation.transfer" _inherit = "vertical.lift.operation.base" _description = "Vertical Lift Operation - Transfer" current_move_line_id = fields.Many2one(comodel_name="stock.move.line", readonly=True) tray_location_id = fields.Many2one( comodel_name="stock.location", compute="_compute_tray_data", string="Tray Location", ) tray_name = fields.Char(compute="_compute_tray_data", string="Tray Name") tray_type_id = fields.Many2one( comodel_name="stock.location.tray.type", compute="_compute_tray_data", string="Tray Type", ) tray_type_code = fields.Char(compute="_compute_tray_data", string="Tray Code") tray_x = fields.Integer(string="X", compute="_compute_tray_data") tray_y = fields.Integer(string="Y", compute="_compute_tray_data") tray_matrix = Serialized(string="Cells", compute="_compute_tray_data") tray_qty = fields.Float(string="Stock Quantity", compute="_compute_tray_qty") # current operation information picking_id = fields.Many2one(related="current_move_line_id.picking_id", readonly=True) picking_origin = fields.Char( related="current_move_line_id.picking_id.origin", readonly=True) picking_partner_id = fields.Many2one( related="current_move_line_id.picking_id.partner_id", readonly=True) product_id = fields.Many2one(related="current_move_line_id.product_id", readonly=True) product_uom_id = fields.Many2one( related="current_move_line_id.product_uom_id", readonly=True) product_uom_qty = fields.Float( related="current_move_line_id.product_uom_qty", readonly=True) product_packagings = fields.Html(string="Packaging", compute="_compute_product_packagings") qty_done = fields.Float(related="current_move_line_id.qty_done", readonly=True) lot_id = fields.Many2one(related="current_move_line_id.lot_id", readonly=True) location_dest_id = fields.Many2one( string="Destination", related="current_move_line_id.location_dest_id", readonly=False, ) # TODO add a glue addon with product_expiry to add the field def on_barcode_scanned(self, barcode): self.ensure_one() self.env.user.notify_info( "Scanned barcode: {}. Not implemented.".format(barcode)) @api.depends("current_move_line_id.product_id.packaging_ids") def _compute_product_packagings(self): for record in self: product = record.current_move_line_id.product_id if not product: record.product_packagings = "" continue content = self._render_product_packagings(product) record.product_packagings = content @api.depends() def _compute_number_of_ops(self): for record in self: record.number_of_ops = record.count_move_lines_to_do() @api.depends() def _compute_number_of_ops_all(self): for record in self: record.number_of_ops_all = record.count_move_lines_to_do_all() @api.depends("tray_location_id", "current_move_line_id.product_id") def _compute_tray_qty(self): for record in self: if not (record.tray_location_id and record.current_move_line_id): record.tray_qty = 0.0 continue product = record.current_move_line_id.product_id location = record.tray_location_id record.tray_qty = self._get_tray_qty(product, location) @api.depends("current_move_line_id") def _compute_tray_data(self): for record in self: modes = {"pick": "location_id", "put": "location_dest_id"} location = record.current_move_line_id[modes[record.mode]] tray_type = location.location_id.tray_type_id # this is the current cell record.tray_location_id = location.id # name of the tray where the cell is record.tray_name = location.location_id.name record.tray_type_id = tray_type.id record.tray_type_code = tray_type.code record.tray_x = location.posx record.tray_y = location.posy record.tray_matrix = location.tray_matrix def _domain_move_lines_to_do(self): # to implement in sub-classes return [("id", "=", 0)] def _domain_move_lines_to_do_all(self): # to implement in sub-classes return [("id", "=", 0)] def count_move_lines_to_do(self): """Count move lines to process in current shuttles""" self.ensure_one() return self.env["stock.move.line"].search_count( self._domain_move_lines_to_do()) def count_move_lines_to_do_all(self): """Count move lines to process in all shuttles""" self.ensure_one() return self.env["stock.move.line"].search_count( self._domain_move_lines_to_do_all()) def process_current(self): line = self.current_move_line_id if line.state in ("assigned", "partially_available"): line.qty_done = line.product_qty line.move_id._action_done() return True def fetch_tray(self): raise NotImplementedError def reset_steps(self): self.clear_current_move_line() super().reset_steps() def clear_current_move_line(self): self.current_move_line_id = False return True
class EDIExchangeConsumerMixin(models.AbstractModel): """Record that might have related EDI Exchange records""" _name = "edi.exchange.consumer.mixin" _description = "Abstract record where exchange records can be assigned" exchange_record_ids = fields.One2many( "edi.exchange.record", inverse_name="res_id", domain=lambda r: [("model", "=", r._name)], ) exchange_record_count = fields.Integer(compute="_compute_exchange_record_count") expected_edi_configuration = Serialized( compute="_compute_expected_edi_configuration", default={}, ) has_expected_edi_configuration = fields.Boolean( compute="_compute_expected_edi_configuration" ) def _compute_expected_edi_configuration(self): for record in self: configurations = record._get_expected_edi_configuration() record.expected_edi_configuration = configurations record.has_expected_edi_configuration = bool(configurations) def _get_expected_edi_configuration(self): exchange_types = ( self.env["edi.exchange.type"] .sudo() .search([("model_ids.model", "=", self._name)]) ) result = {} for exchange_type in exchange_types: eval_ctx = dict( self._get_eval_context(), record=self, exchange_type=exchange_type ) domain = safe_eval.safe_eval(exchange_type.enable_domain or "[]", eval_ctx) if not self.filtered_domain(domain): continue if exchange_type.enable_snippet: safe_eval.safe_eval( exchange_type.enable_snippet, eval_ctx, mode="exec", nocopy=True ) if not eval_ctx.get("result", False): continue result[exchange_type.id] = exchange_type.display_name return result def _get_eval_context(self): """Prepare the context used when evaluating python code :returns: dict -- evaluation context given to safe_eval """ return { "datetime": safe_eval.datetime, "dateutil": safe_eval.dateutil, "time": safe_eval.time, "uid": self.env.uid, "user": self.env.user, } @api.model def fields_view_get( self, view_id=None, view_type="form", toolbar=False, submenu=False ): res = super().fields_view_get( view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu ) if view_type == "form": doc = etree.XML(res["arch"]) for node in doc.xpath("//sheet"): group = False if hasattr(self, "_edi_generate_group"): group = self._edi_generate_group str_element = self.env["ir.qweb"]._render( "edi_oca.edi_exchange_consumer_mixin_buttons", {"group": group}, ) node.addprevious(etree.fromstring(str_element)) View = self.env["ir.ui.view"] # Override context for postprocessing if view_id and res.get("base_model", self._name) != self._name: View = View.with_context(base_model_name=res["base_model"]) new_arch, new_fields = View.postprocess_and_fields(doc, self._name) res["arch"] = new_arch # We don't want to lose previous configuration, so, we only want to add # the new fields new_fields.update(res["fields"]) res["fields"] = new_fields return res def _edi_create_exchange_record_vals(self, exchange_type): return { "model": self._name, "res_id": self.id, } def _edi_create_exchange_record(self, exchange_type, backend): exchange_record = backend.create_record( exchange_type.code, self._edi_create_exchange_record_vals(exchange_type) ) self._event("on_edi_generate_manual").notify(self, exchange_record) return exchange_record.get_formview_action() def edi_create_exchange_record(self, exchange_type_id): self.ensure_one() exchange_type = self.env["edi.exchange.type"].browse(exchange_type_id) backend = exchange_type.backend_id if ( not backend and self.env["edi.backend"].search_count( [("backend_type_id", "=", exchange_type.backend_type_id.id)] ) == 1 ): backend = self.env["edi.backend"].search( [("backend_type_id", "=", exchange_type.backend_type_id.id)] ) if backend: return self._edi_create_exchange_record(exchange_type, backend) action = self.env.ref("edi_oca.edi_exchange_record_create_act_window").read()[0] action["context"] = { "default_res_id": self.id, "default_model": self._name, "default_exchange_type_id": exchange_type_id, } return action def _has_exchange_record(self, exchange_type, backend=False, extra_domain=False): """Check if there is a related exchange record following with a specific exchange type""" return bool( self.env["edi.exchange.record"].search_count( self._has_exchange_record_domain( exchange_type, backend=backend, extra_domain=extra_domain ) ) ) def _has_exchange_record_domain( self, exchange_type, backend=False, extra_domain=False ): domain = [ ("model", "=", self._name), ("res_id", "=", self.id), ("type_id.code", "=", exchange_type), ] if backend: domain.append(("backend_id", "=", backend.id)) if extra_domain: domain += extra_domain return domain def _get_exchange_record(self, exchange_type, backend=False, extra_domain=False): """Obtain all the exchange record related to this record with the expected exchange type""" return self.env["edi.exchange.record"].search( self._has_exchange_record_domain( exchange_type, backend=backend, extra_domain=extra_domain ) ) @api.depends("exchange_record_ids") def _compute_exchange_record_count(self): for record in self: record.exchange_record_count = len(record.exchange_record_ids) def action_view_edi_records(self): self.ensure_one() action = self.env.ref("edi_oca.act_open_edi_exchange_record_view").read()[0] action["domain"] = [("model", "=", self._name), ("res_id", "=", self.id)] return action @api.model def get_edi_access(self, doc_ids, operation, model_name=False): """Retrieve access policy. The behavior is similar to `mail.thread` and `mail.message` and it relies on the access rules defines on the related record. The behavior can be customized on the related model by defining `_edi_exchange_record_access`. By default `write`, otherwise the custom permission is returned. """ DocModel = self.env[model_name] if model_name else self create_allow = getattr(DocModel, "_edi_exchange_record_access", "write") if operation in ["write", "unlink"]: check_operation = "write" elif operation == "create" and create_allow in [ "create", "read", "write", "unlink", ]: check_operation = create_allow elif operation == "create": check_operation = "write" else: check_operation = operation return check_operation
class QueueJob(models.Model): """ Job status and result """ _name = 'queue.job' _description = 'Queue Job' _inherit = ['mail.thread', 'mail.activity.mixin'] _log_access = False _order = 'date_created DESC, date_done DESC' _removal_interval = 30 # days _default_related_action = 'related_action_open_record' uuid = fields.Char(string='UUID', readonly=True, index=True, required=True) user_id = fields.Many2one(comodel_name='res.users', string='User ID', required=True) company_id = fields.Many2one(comodel_name='res.company', string='Company', index=True) name = fields.Char(string='Description', readonly=True) model_name = fields.Char(string='Model', readonly=True) method_name = fields.Char(readonly=True) record_ids = Serialized(readonly=True) args = JobSerialized(readonly=True) kwargs = JobSerialized(readonly=True) func_string = fields.Char(string='Task', compute='_compute_func_string', readonly=True, store=True) state = fields.Selection(STATES, readonly=True, required=True, index=True) priority = fields.Integer() exc_info = fields.Text(string='Exception Info', readonly=True) result = fields.Text(readonly=True) date_created = fields.Datetime(string='Created Date', readonly=True) date_started = fields.Datetime(string='Start Date', readonly=True) date_enqueued = fields.Datetime(string='Enqueue Time', readonly=True) date_done = fields.Datetime(readonly=True) eta = fields.Datetime(string='Execute only after') retry = fields.Integer(string='Current try') max_retries = fields.Integer( string='Max. retries', help="The job will fail if the number of tries reach the " "max. retries.\n" "Retries are infinite when empty.", ) channel_method_name = fields.Char(readonly=True, compute='_compute_job_function', store=True) job_function_id = fields.Many2one(comodel_name='queue.job.function', compute='_compute_job_function', string='Job Function', readonly=True, store=True) channel = fields.Char(compute='_compute_channel', inverse='_inverse_channel', store=True, index=True) @api.multi def _inverse_channel(self): self.filtered(lambda a: not a.channel)._compute_channel() @api.multi @api.depends('job_function_id.channel_id') def _compute_channel(self): for record in self: record.channel = record.job_function_id.channel @api.multi @api.depends('model_name', 'method_name', 'job_function_id.channel_id') def _compute_job_function(self): for record in self: model = self.env[record.model_name] method = getattr(model, record.method_name) channel_method_name = channel_func_name(model, method) func_model = self.env['queue.job.function'] function = func_model.search([('name', '=', channel_method_name)]) record.channel_method_name = channel_method_name record.job_function_id = function @api.multi @api.depends('model_name', 'method_name', 'record_ids', 'args', 'kwargs') def _compute_func_string(self): for record in self: record_ids = record.record_ids model = repr(self.env[record.model_name].browse(record_ids)) args = [repr(arg) for arg in record.args] kwargs = [ '%s=%r' % (key, val) for key, val in record.kwargs.items() ] all_args = ', '.join(args + kwargs) record.func_string = ("%s.%s(%s)" % (model, record.method_name, all_args)) @api.multi def open_related_action(self): """ Open the related action associated to the job """ self.ensure_one() job = Job.load(self.env, self.uuid) action = job.related_action() if action is None: raise exceptions.UserError(_('No action available for this job')) return action @api.multi def _change_job_state(self, state, result=None): """ Change the state of the `Job` object itself so it will change the other fields (date, result, ...) """ for record in self: job_ = Job.load(record.env, record.uuid) if state == DONE: job_.set_done(result=result) elif state == PENDING: job_.set_pending(result=result) else: raise ValueError('State not supported: %s' % state) job_.store() @api.multi def button_done(self): result = _('Manually set to done by %s') % self.env.user.name self._change_job_state(DONE, result=result) return True @api.multi def requeue(self): self._change_job_state(PENDING) return True @api.multi def write(self, vals): res = super(QueueJob, self).write(vals) if vals.get('state') == 'failed': # subscribe the users now to avoid to subscribe them # at every job creation domain = self._subscribe_users_domain() users = self.env['res.users'].search(domain) self.message_subscribe_users(user_ids=users.ids) for record in self: msg = record._message_failed_job() if msg: record.message_post(body=msg, subtype='queue_job.mt_job_failed') return res @api.multi def _subscribe_users_domain(self): """ Subscribe all users having the 'Queue Job Manager' group """ group = self.env.ref('queue_job.group_queue_job_manager') if not group: return companies = self.mapped('company_id') domain = [('groups_id', '=', group.id)] if companies: domain.append(('company_id', 'child_of', companies.ids)) return domain @api.multi def _message_failed_job(self): """ Return a message which will be posted on the job when it is failed. It can be inherited to allow more precise messages based on the exception informations. If nothing is returned, no message will be posted. """ self.ensure_one() return _("Something bad happened during the execution of the job. " "More details in the 'Exception Information' section.") @api.model def _needaction_domain_get(self): """ Returns the domain to filter records that require an action :return: domain or False is no action """ return [('state', '=', 'failed')] @api.model def autovacuum(self): """ Delete all jobs done since more than ``_removal_interval`` days. Called from a cron. """ deadline = datetime.now() - timedelta(days=self._removal_interval) jobs = self.search( [('date_done', '<=', fields.Datetime.to_string(deadline))], ) jobs.unlink() return True @api.multi def related_action_open_record(self): """Open a form view with the record(s) of the job. For instance, for a job on a ``product.product``, it will open a ``product.product`` form view with the product record(s) concerned by the job. If the job concerns more than one record, it opens them in a list. This is the default related action. """ self.ensure_one() model_name = self.model_name records = self.env[model_name].browse(self.record_ids).exists() if not records: return None action = { 'name': _('Related Record'), 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': records._name, } if len(records) == 1: action['res_id'] = records.id else: action.update({ 'name': _('Related Records'), 'view_mode': 'tree,form', 'domain': [('id', 'in', records.ids)], }) return action
class Product(models.Model): _inherit = "product.product" packaging_contained_mapping = Serialized( compute="_compute_packaging_contained_mapping", help="Technical field to store contained packaging. ", ) @api.depends_context("lang") @api.depends("packaging_ids.qty") def _compute_packaging_contained_mapping(self): for rec in self: rec.packaging_contained_mapping = rec._packaging_contained_mapping() def _packaging_contained_mapping(self): """Produce a mapping of packaging and contained packagings. Used mainly for `product_qty_by_packaging` but can be used to display info as you prefer. :returns: a dictionary in the form {pkg.id: [contained packages]} """ res = {} packaging = self._ordered_packaging() for i, pkg in enumerate(packaging): if pkg.is_unit: # skip minimal unit continue res[pkg.id] = self._product_qty_by_packaging(packaging[i + 1 :], pkg.qty) return res def product_qty_by_packaging(self, prod_qty, with_contained=False): """Calculate quantity by packaging. The minimal quantity is always represented by the UoM of the product. Limitation: fractional quantities are lost. :prod_qty: total qty to satisfy. :with_contained: include calculation of contained packagings. eg: 1 pallet contains 4 big boxes and 6 little boxes. :returns: list of dict in the form [{id: 1, qty: qty_per_package, name: package_name}] If `with_contained` is passed, each element will include the quantity of smaller packaging, like: {contained: [{id: 1, qty: 4, name: "Big box"}]} """ self.ensure_one() return self._product_qty_by_packaging( self._ordered_packaging(), prod_qty, with_contained=with_contained, ) def _ordered_packaging(self): """Prepare packaging ordered by qty and exclude empty ones. Use ctx key `_packaging_filter` to pass a function to filter packaging to be considered. Use ctx key `_packaging_name_getter` to pass a function to change the display name of the packaging. """ custom_filter = self.env.context.get("_packaging_filter", lambda x: x) name_getter = self.env.context.get( "_packaging_name_getter", self._packaging_name_getter ) packagings = sorted( [ Packaging(x.id, name_getter(x), x.qty, x.barcode, False) for x in self.packaging_ids.filtered(custom_filter) # Exclude the ones w/ zero qty as they are useless for the math if x.qty ], reverse=True, key=lambda x: x.qty, ) # Add minimal unit packagings.append( # NOTE: the ID here could clash w/ one of the packaging's. # If you create a mapping based on IDs, keep this in mind. # You can use `is_unit` to check this. Packaging(self.uom_id.id, self.uom_id.name, self.uom_id.factor, None, True) ) return packagings def _packaging_name_getter(self, packaging): return packaging.name def _product_qty_by_packaging(self, pkg_by_qty, qty, with_contained=False): """Produce a list of dictionaries of packaging info.""" # TODO: refactor to handle fractional quantities (eg: 0.5 Kg) res = [] prepare_values = self.env.context.get( "_packaging_values_handler", self._prepare_qty_by_packaging_values ) for pkg in pkg_by_qty: qty_per_pkg, qty = self._qty_by_pkg(pkg.qty, qty) if qty_per_pkg: value = prepare_values(pkg, qty_per_pkg) if with_contained: contained = None if not pkg.is_unit: mapping = self.packaging_contained_mapping # integer keys are serialized as strings :/ contained = mapping.get(str(pkg.id)) value["contained"] = contained res.append(value) if not qty: break return res def _qty_by_pkg(self, pkg_qty, qty): """Calculate qty needed for given package qty.""" qty_per_pkg = 0 while ( float_compare(qty - pkg_qty, 0.0, precision_digits=self.uom_id.rounding) >= 0.0 ): qty -= pkg_qty qty_per_pkg += 1 return qty_per_pkg, qty def _prepare_qty_by_packaging_values(self, packaging, qty_per_pkg): return { "id": packaging.id, "qty": qty_per_pkg, "name": packaging.name, "is_unit": packaging.is_unit, "barcode": packaging.barcode, } def product_qty_by_packaging_as_str( self, prod_qty, include_total_units=False, only_packaging=False ): """Return a string representing the qty of each packaging. :param prod_qty: the qty of current product to translate to pkg qty :param include_total_units: includes total qty required initially :param only_packaging: exclude units if you have only units. IOW: if the qty does not match any packaging and this flag is true you'll get an empty string instead of `N units`. """ self.ensure_one() if not prod_qty: return "" qty_by_packaging = self.product_qty_by_packaging(prod_qty) if not qty_by_packaging: return "" # Exclude unit qty and reuse it later unit_qty = None has_only_units = True _qty_by_packaging = [] for pkg_qty in qty_by_packaging: if pkg_qty["is_unit"]: unit_qty = pkg_qty["qty"] continue has_only_units = False _qty_by_packaging.append(pkg_qty) # Browse them all at once records = self.env["product.packaging"].browse( [x["id"] for x in _qty_by_packaging] ) _qty_by_packaging_as_str = self.env.context.get( "_qty_by_packaging_as_str", self._qty_by_packaging_as_str ) # Collect all strings representations as_string = [] for record, info in zip(records, _qty_by_packaging): bit = _qty_by_packaging_as_str(record, info["qty"]) if bit: as_string.append(bit) # Restore unit information if any. include_units = (has_only_units and not only_packaging) or not has_only_units if unit_qty and include_units: as_string.append(f"{unit_qty} {self.uom_id.name}") # We want to avoid line break here as this string # can be used by reports res = f",{NO_BREAK_SPACE_CHAR}".join(as_string) if include_total_units and not has_only_units: res += " " + self._qty_by_packaging_total_units(prod_qty) return res def _qty_by_packaging_as_str(self, packaging, qty): return f"{qty} {packaging.name}" def _qty_by_packaging_total_units(self, prod_qty): return f"({prod_qty} {self.uom_id.name})"
class StockLocationTrayType(models.Model): _name = "stock.location.tray.type" _description = "Stock Location Tray Type" name = fields.Char(required=True) code = fields.Char(required=True) rows = fields.Integer(required=True) cols = fields.Integer(required=True) width = fields.Integer(help="Width of the tray in mm") depth = fields.Integer(help="Depth of the tray in mm") height = fields.Integer(help="Height of the tray in mm") width_per_cell = fields.Float(compute="_compute_width_per_cell") depth_per_cell = fields.Float(compute="_compute_depth_per_cell") active = fields.Boolean(default=True) tray_matrix = Serialized(compute="_compute_tray_matrix") location_ids = fields.One2many(comodel_name="stock.location", inverse_name="tray_type_id") @api.depends("width", "cols") def _compute_width_per_cell(self): for record in self: width = record.width if not width: record.width_per_cell = 0.0 continue record.width_per_cell = width / record.cols @api.depends("depth", "rows") def _compute_depth_per_cell(self): for record in self: depth = record.depth if not depth: record.depth_per_cell = 0.0 continue record.depth_per_cell = depth / record.rows @api.depends("rows", "cols") def _compute_tray_matrix(self): for record in self: # As we only want to show the disposition of # the tray, we generate a "full" tray, we'll # see all the boxes on the web widget. # (0 means empty, 1 means used) cells = self._generate_cells_matrix(default_state=1) record.tray_matrix = {"selected": [], "cells": cells} def _name_search(self, name, args=None, operator="ilike", limit=100, name_get_uid=None): args = args or [] domain = [] if name: domain = ["|", ("name", operator, name), ("code", operator, name)] tray_ids = self._search(expression.AND([domain, args]), limit=limit, access_rights_uid=name_get_uid) return self.browse(tray_ids).name_get() def _generate_cells_matrix(self, default_state=0): return [[default_state] * self.cols for __ in range(self.rows)] @api.constrains("active") def _location_check_active(self): for record in self: if record.active: continue if record.location_ids: location_bullets = [ " - {}".format(location.display_name) for location in record.location_ids ] raise exceptions.ValidationError( _("The tray type {} is used by the following locations " "and cannot be archived:\n\n{}").format( record.name, "\n".join(location_bullets))) @api.constrains("rows", "cols") def _location_check_rows_cols(self): for record in self: if record.location_ids: location_bullets = [ " - {}".format(location.display_name) for location in record.location_ids ] raise exceptions.ValidationError( _("The tray type {} is used by the following locations, " "it's size cannot be changed:\n\n{}").format( record.name, "\n".join(location_bullets))) def open_locations(self): action = self.env.ref("stock.action_location_form").read()[0] action["domain"] = [("tray_type_id", "in", self.ids)] if len(self.ids) == 1: action["context"] = {"default_tray_type_id": self.id} return action
class CommerceImageMixin(models.AbstractModel): _name = "commerce.image.mixin" _description = "Commerce Image Mixin" _image_field = None images = Serialized( compute="_compute_images", string="Commerce Image", compute_sudo=True, ) # Tech field to store images data. # It cannot be computed because the computation # might required generating thumbs # which requires access to the storage files # which requires components registry to be available # which is not the case when Odoo starts. images_stored = Serialized() images_store_hash = fields.Char() def _compute_images(self): # Force computation if needed self.filtered( lambda x: x._images_must_recompute())._compute_images_stored() for record in self: record.images = record.images_stored def _compute_images_stored(self): for record in self: record.images_stored = record._get_image_data_for_record() record.images_store_hash = record._get_images_store_hash() def _images_must_recompute(self): return self.images_store_hash != self._get_images_store_hash() @property def _resize_scales_field(self): return "%s_resize_ids" % self._name.replace(".", "_") def _resize_scales(self): return self.backend_id[self._resize_scales_field] def _get_images_store_hash(self): return str(hash(self._get_images_store_hash_tuple())) def _get_images_store_hash_tuple(self): resize_scales = tuple(self._resize_scales().mapped( lambda r: (r.key, r.size_x, r.size_y))) images_timestamp = self[self._image_field].mapped( "image_id.write_date") # fmt: off # FIXME: may vary by _get_image_url_key -> is this needed for real? url_key = (self.display_name, ) # fmt: on # TODO: any other bit to consider here? return resize_scales + tuple(images_timestamp) + url_key def _get_image_url_key(self, image_relation): # You can inherit this method to change the name of the image of # your website. By default we use the name of the product or category # linked to the image processed # Note the url will be slugify by the get_or_create_thumnail self.ensure_one() return self.display_name def _get_image_data_for_record(self): self.ensure_one() res = [] resizes = self._resize_scales() for image_relation in self[self._image_field]: url_key = self._get_image_url_key(image_relation) image_data = {} for resize in resizes: thumbnail = image_relation.image_id.get_or_create_thumbnail( resize.size_x, resize.size_y, url_key=url_key) image_data[resize.key] = self._prepare_data_resize( thumbnail, image_relation) res.append(image_data) return res def _prepare_data_resize(self, thumbnail, image_relation): """ Prepare data to fill images serialized field :param thumbnail: storage.thumbnail recordset :param image_relation: product.image.relation recordset :return: dict """ self.ensure_one() tag = "" if image_relation.tag_id: tag = image_relation.tag_id.name return {"src": thumbnail.url, "alt": self.name, "tag": tag}
class ImportRecordset(models.Model, JobRelatedMixin): """Set of records, together with their configuration. A recordset can be considered as an "import session". Here you declare: * what you want to import (via "Import type") * where you get records from (via "Source" configuration) A recordset is also responsible to hold and display some meaningful information about imports: * required fields, translatable fields, defaults * import stats (created|updated|skipped|errored counters, latest run) * fully customizable HTML report to provide more details * downloadable report file (via reporters) * global states of running jobs When you run the import of a recordset this is what it does: * ask the source to provide all the records (chunked) * create an import record for each chunk * schedule the import job for each import record """ _name = "import.recordset" _inherit = "import.source.consumer.mixin" _description = "Import recordset" _order = "sequence ASC, create_date DESC" _backend_type = "import_backend" backend_id = fields.Many2one("import.backend", string="Import Backend") sequence = fields.Integer("Sequence", help="Sequence for the handle.", default=10) import_type_id = fields.Many2one(string="Import type", comodel_name="import.type", required=True) override_existing = fields.Boolean( string="Override existing items", help="Enable to update existing items w/ new values. " "If disabled, matching records will be skipped.", default=True, ) name = fields.Char(string="Name", compute="_compute_name") create_date = fields.Datetime("Create date") record_ids = fields.One2many("import.record", "recordset_id", string="Records") # store info about imports report report_data = Serialized() shared_data = Serialized() report_html = fields.Html("Report summary", compute="_compute_report_html") full_report_url = fields.Char("Full report url", compute="_compute_full_report_url") jobs_global_state = fields.Selection( string="Jobs global state", selection=STATES, compute="_compute_jobs_global_state", help=("Tells you if a job is running for this recordset. " "If any of the sub jobs is not DONE or FAILED " "we assume the global state is PENDING."), readonly=True, ) report_file = fields.Binary("Report file") report_filename = fields.Char("Report filename") docs_html = fields.Html(string="Docs", compute="_compute_docs_html") notes = fields.Html("Notes", help="Useful info for your users") def unlink(self): # inheritance of non-model mixin - like JobRelatedMixin - # does not work w/out this return super().unlink() @api.depends("backend_id.name") def _compute_name(self): for item in self: names = [item.backend_id.name.strip(), "#" + str(item.id)] item.name = " ".join(names) def get_records(self): """Retrieve importable records and keep ordering.""" return self.env["import.record"].search([("recordset_id", "=", self.id) ]) def _set_serialized(self, fname, values, reset=False): """Update seriazed data.""" _values = {} if not reset: _values = self[fname] _values.update(values) self[fname] = _values # Without invalidating cache we will have a bug because of Serialized # field in odoo. It uses json.loads on convert_to_cache, which leads # to all of our int dict keys converted to strings. Except for the # first value get, where we get not from cache yet. # SO if you plan on using integers as your dict keys for a serialized # field beware that they will be converted to strings. # In order to streamline this I invalidate cache right away so the # values are converted right away # TL/DR integer dict keys will always be converted to strings, beware self.invalidate_cache((fname, )) def set_report(self, values, reset=False): """Update import report values.""" self.ensure_one() self._set_serialized("report_data", values, reset=reset) def get_report(self): self.ensure_one() return self.report_data or {} def set_shared(self, values, reset=False): """Update import report values.""" self.ensure_one() self._set_serialized("shared_data", values, reset=reset) def get_shared(self): self.ensure_one() return self.shared_data or {} def _get_report_html_data(self): """Prepare data for HTML report. :return dict: containing data for HTML report. Keys: ``recordset``: current recordset ``last_start``: last time import ran ``report_by_model``: report data grouped by model. Like: data['report_by_model'] = { ir.model(res.parner): { 'errored': 1, 'skipped': 4, 'created': 10, 'updated': 8, } } """ report = self.get_report() data = { "recordset": self, "last_start": report.pop("_last_start"), "report_by_model": OrderedDict(), } # count keys by model for item in self.available_models(): _model = item[0] model = self.env["ir.model"]._get(_model) data["report_by_model"][model] = {} # be defensive here. At some point # we could decide to skip models on demand. for k, v in report.get(_model, {}).items(): data["report_by_model"][model][k] = len(v) return data @api.depends("report_data") def _compute_report_html(self): template = self.env.ref("connector_importer.recordset_report") for item in self: item.report_html = False if not item.report_data: continue data = item._get_report_html_data() item.report_html = template.render(data) def _compute_full_report_url(self): for item in self: item.full_report_url = "/importer/import-recordset/{}".format( item.id) def debug_mode(self): return self.backend_id.debug_mode or os.getenv("IMPORTER_DEBUG_MODE") @api.depends("job_id.state", "record_ids.job_id.state") def _compute_jobs_global_state(self): for item in self: item.jobs_global_state = item._get_global_state() @api.model def _get_global_state(self): if not self.job_id: return DONE res = DONE for item in self.record_ids: if not item.job_id: # TODO: investigate how this is possible continue # TODO: check why `item.job_state` does not reflect the job state if item.job_id.state != DONE: res = item.job_id.state break return res def available_models(self): return self.import_type_id.available_models() @job(default_channel="root.connector_importer") def import_recordset(self): """This job will import a recordset.""" with self.backend_id.work_on(self._name) as work: importer = work.component(usage="recordset.importer") return importer.run(self) def run_import(self): """ queue a job for creating records (import.record items) """ job_method = self.with_delay().import_recordset if self.debug_mode(): logger.warn("### DEBUG MODE ACTIVE: WILL NOT USE QUEUE ###") job_method = self.import_recordset for item in self: result = job_method() if self.debug_mode(): # debug mode, no job here: reset it! item.write({"job_id": False}) else: # link the job item.write({"job_id": result.db_record().id}) if self.debug_mode(): # TODO: port this # the "after_all" job needs to be fired manually when in debug mode # since the event handler in .events.chunk_finished_subscriber # cannot estimate when all the chunks have been processed. # for model, importer in self.import_type_id.available_models(): # import_record_after_all( # session, # self.backend_id.id, # model, # ) pass def generate_report(self): self.ensure_one() reporter = self.get_source().get_reporter() if reporter is None: logger.debug("No reporter found...") return metadata, content = reporter.report_get(self) self.write({ "report_file": base64.encodestring(content.encode()), "report_filename": metadata["complete_filename"], }) logger.info(("Report file updated on recordset={}. " "Filename: {}").format(self.id, metadata["complete_filename"])) def _get_importers(self): importers = OrderedDict() for model_name, importer, __ in self.available_models(): model = self.env["ir.model"]._get(model_name) with self.backend_id.work_on(self._name) as work: importers[model] = work.component_by_name( importer, model_name=model_name) return importers @api.depends("import_type_id") def _compute_docs_html(self): template = self.env.ref("connector_importer.recordset_docs") for item in self: item.docs_html = False if isinstance(item.id, models.NewId): continue importers = item._get_importers() data = {"recordset": item, "importers": importers} item.docs_html = template.render(data)
class VerticalLiftOperationInventory(models.Model): _name = "vertical.lift.operation.inventory" _inherit = "vertical.lift.operation.base" _description = "Vertical Lift Operation Inventory" _initial_state = "noop" def _selection_states(self): return [ ("noop", "No inventory in progress"), ("quantity", "Inventory, please enter the amount"), ("confirm_wrong_quantity", "The quantity does not match, are you sure?"), # save is never visible, but save and go to the next or noop directly ("save", "Save"), # no need for release and save button here? # ("release", "Release"), ] def _transitions(self): return ( self.Transition( "noop", "quantity", # transition only if inventory lines are found lambda self: self.select_next_inventory_line(), ), self.Transition( "quantity", "save", lambda self: self._has_identical_quantity(), ), self.Transition( "quantity", "confirm_wrong_quantity", lambda self: self._start_confirm_wrong_quantity(), ), self.Transition( "confirm_wrong_quantity", "save", lambda self: self.quantity_input == self.last_quantity_input, ), # if the confirmation of the quantity is different, cycle back to # the 'quantity' step self.Transition( "confirm_wrong_quantity", "quantity", lambda self: self._go_back_to_quantity_input(), ), # go to quantity if we have lines in queue, otherwise, go to noop self.Transition( "save", "quantity", lambda self: self.process_current() and self.select_next_inventory_line(), # when we reach 'save', this transition is directly # evaluated direct_eval=True, ), self.Transition( "save", "noop", lambda self: self.process_current() and self.clear_current_inventory_line(), # when we reach 'save', this transition is directly # evaluated direct_eval=True, ), ) current_inventory_line_id = fields.Many2one( comodel_name="stock.inventory.line", readonly=True ) quantity_input = fields.Float() # if the quantity is wrong, user has to write 2 times # the same quantity to really confirm it's correct last_quantity_input = fields.Float() tray_location_id = fields.Many2one( comodel_name="stock.location", compute="_compute_tray_data", string="Tray Location", ) tray_name = fields.Char(compute="_compute_tray_data", string="Tray Name") tray_type_id = fields.Many2one( comodel_name="stock.location.tray.type", compute="_compute_tray_data", string="Tray Type", ) tray_type_code = fields.Char(compute="_compute_tray_data", string="Tray Code") tray_x = fields.Integer(string="X", compute="_compute_tray_data") tray_y = fields.Integer(string="Y", compute="_compute_tray_data") tray_matrix = Serialized(string="Cells", compute="_compute_tray_data") tray_qty = fields.Float(string="Stock Quantity", compute="_compute_tray_qty") # current operation information inventory_id = fields.Many2one( related="current_inventory_line_id.inventory_id", readonly=True ) product_id = fields.Many2one( related="current_inventory_line_id.product_id", readonly=True ) product_uom_id = fields.Many2one( related="current_inventory_line_id.product_uom_id", readonly=True ) product_qty = fields.Float( related="current_inventory_line_id.product_qty", readonly=True ) product_packagings = fields.Html( string="Packaging", compute="_compute_product_packagings" ) package_id = fields.Many2one( related="current_inventory_line_id.package_id", readonly=True ) lot_id = fields.Many2one( related="current_inventory_line_id.prod_lot_id", readonly=True ) @api.depends("current_inventory_line_id") def _compute_tray_data(self): for record in self: location = record.current_inventory_line_id.location_id tray_type = location.location_id.tray_type_id # this is the current cell record.tray_location_id = location.id # name of the tray where the cell is record.tray_name = location.location_id.name record.tray_type_id = tray_type.id record.tray_type_code = tray_type.code record.tray_x = location.posx record.tray_y = location.posy record.tray_matrix = location.tray_matrix @api.depends("current_inventory_line_id.product_id.packaging_ids") def _compute_product_packagings(self): for record in self: product = record.current_inventory_line_id.product_id if not product: record.product_packagings = "" continue content = self._render_product_packagings(product) record.product_packagings = content @api.depends("tray_location_id", "current_inventory_line_id.product_id") def _compute_tray_qty(self): for record in self: if not (record.tray_location_id and record.current_inventory_line_id): record.tray_qty = 0.0 continue product = record.current_inventory_line_id.product_id location = record.tray_location_id record.tray_qty = self._get_tray_qty(product, location) def _compute_number_of_ops(self): for record in self: line_model = self.env["stock.inventory.line"] record.number_of_ops = line_model.search_count( self._domain_inventory_lines_to_do() ) def _compute_number_of_ops_all(self): for record in self: line_model = self.env["stock.inventory.line"] record.number_of_ops_all = line_model.search_count( self._domain_inventory_lines_to_do_all() ) def _domain_inventory_lines_to_do(self): return [ ("location_id", "child_of", self.location_id.id), ("state", "=", "confirm"), ("vertical_lift_done", "=", False), ] def _domain_inventory_lines_to_do_all(self): shuttle_locations = self.env["stock.location"].search( [("vertical_lift_kind", "=", "view")] ) return [ ("location_id", "child_of", shuttle_locations.ids), ("state", "=", "confirm"), ("vertical_lift_done", "=", False), ] def reset_steps(self): self.clear_current_inventory_line() super().reset_steps() def _has_identical_quantity(self): line = self.current_inventory_line_id return ( float_compare( line.theoretical_qty, self.quantity_input, precision_rounding=line.product_uom_id.rounding, ) == 0 ) def _start_confirm_wrong_quantity(self): self.last_quantity_input = self.quantity_input self.quantity_input = 0.0 return True def _go_back_to_quantity_input(self): self.last_quantity_input = self.quantity_input self.quantity_input = 0.0 return True def clear_current_inventory_line(self): self.write( { "quantity_input": 0.0, "last_quantity_input": 0.0, "current_inventory_line_id": False, } ) return True def fetch_tray(self): location = self.current_inventory_line_id.location_id location.fetch_vertical_lift_tray() def select_next_inventory_line(self): self.ensure_one() next_line = self.env["stock.inventory.line"].search( self._domain_inventory_lines_to_do(), limit=1, order="vertical_lift_tray_id, location_id, id", ) self.current_inventory_line_id = next_line if next_line: self.fetch_tray() return bool(next_line) def process_current(self): line = self.current_inventory_line_id if not line.vertical_lift_done: line.vertical_lift_done = True if self.quantity_input != line.product_qty: line.product_qty = self.quantity_input inventory = line.inventory_id if all(line.vertical_lift_done for line in inventory.line_ids): inventory.action_validate() self.quantity_input = self.last_quantity_input = 0.0 return True def button_save(self): self.ensure_one() if not self.step() in ("quantity", "confirm_wrong_quantity"): return self.next_step() if self.step() == "noop": # close the tray once everything is inventoried self.shuttle_id.release_vertical_lift_tray() # sorry not sorry return self._rainbow_man()
class QueueJob(models.Model): """Model storing the jobs to be executed.""" _name = 'queue.job' _description = 'Queue Job' _inherit = ['mail.thread', 'mail.activity.mixin'] _log_access = False _order = 'date_created DESC, date_done DESC' _removal_interval = 30 # days _default_related_action = 'related_action_open_record' uuid = fields.Char(string='UUID', readonly=True, index=True, required=True) user_id = fields.Many2one(comodel_name='res.users', string='User ID', required=True) company_id = fields.Many2one(comodel_name='res.company', string='Company', index=True) name = fields.Char(string='Description', readonly=True) model_name = fields.Char(string='Model', readonly=True) method_name = fields.Char(readonly=True) record_ids = Serialized(readonly=True) args = JobSerialized(readonly=True) kwargs = JobSerialized(readonly=True) func_string = fields.Char(string='Task', compute='_compute_func_string', readonly=True, store=True) state = fields.Selection(STATES, readonly=True, required=True, index=True) priority = fields.Integer() exc_info = fields.Text(string='Exception Info', readonly=True) result = fields.Text(readonly=True) date_created = fields.Datetime(string='Created Date', readonly=True) date_started = fields.Datetime(string='Start Date', readonly=True) date_enqueued = fields.Datetime(string='Enqueue Time', readonly=True) date_done = fields.Datetime(readonly=True) eta = fields.Datetime(string='Execute only after') retry = fields.Integer(string='Current try') max_retries = fields.Integer( string='Max. retries', help="The job will fail if the number of tries reach the " "max. retries.\n" "Retries are infinite when empty.", ) channel_method_name = fields.Char(readonly=True, compute='_compute_job_function', store=True) job_function_id = fields.Many2one(comodel_name='queue.job.function', compute='_compute_job_function', string='Job Function', readonly=True, store=True) override_channel = fields.Char() channel = fields.Char(compute='_compute_channel', inverse='_inverse_channel', store=True, index=True) identity_key = fields.Char() @api.model_cr def init(self): self._cr.execute( 'SELECT indexname FROM pg_indexes WHERE indexname = %s ', ('queue_job_identity_key_state_partial_index',) ) if not self._cr.fetchone(): self._cr.execute( "CREATE INDEX queue_job_identity_key_state_partial_index " "ON queue_job (identity_key) WHERE state in ('pending', " "'enqueued') AND identity_key IS NOT NULL;" ) @api.multi def _inverse_channel(self): for record in self: record.override_channel = record.channel @api.multi @api.depends('job_function_id.channel_id') def _compute_channel(self): for record in self: record.channel = (record.override_channel or record.job_function_id.channel) @api.multi @api.depends('model_name', 'method_name', 'job_function_id.channel_id') def _compute_job_function(self): for record in self: model = self.env[record.model_name] method = getattr(model, record.method_name) channel_method_name = channel_func_name(model, method) func_model = self.env['queue.job.function'] function = func_model.search([('name', '=', channel_method_name)], limit=1) record.channel_method_name = channel_method_name record.job_function_id = function @api.multi @api.depends('model_name', 'method_name', 'record_ids', 'args', 'kwargs') def _compute_func_string(self): for record in self: record_ids = record.record_ids model = repr(self.env[record.model_name].browse(record_ids)) args = [repr(arg) for arg in record.args] kwargs = ['%s=%r' % (key, val) for key, val in record.kwargs.items()] all_args = ', '.join(args + kwargs) record.func_string = ( "%s.%s(%s)" % (model, record.method_name, all_args) ) @api.multi def open_related_action(self): """Open the related action associated to the job""" self.ensure_one() job = Job.load(self.env, self.uuid) action = job.related_action() if action is None: raise exceptions.UserError(_('No action available for this job')) return action @api.multi def _change_job_state(self, state, result=None): """Change the state of the `Job` object Changing the state of the Job will automatically change some fields (date, result, ...). """ for record in self: job_ = Job.load(record.env, record.uuid) if state == DONE: job_.set_done(result=result) elif state == PENDING: job_.set_pending(result=result) else: raise ValueError('State not supported: %s' % state) job_.store() @api.multi def button_done(self): result = _('Manually set to done by %s') % self.env.user.name self._change_job_state(DONE, result=result) return True @api.multi def requeue(self): self._change_job_state(PENDING) return True def _message_post_on_failure(self): # subscribe the users now to avoid to subscribe them # at every job creation domain = self._subscribe_users_domain() users = self.env['res.users'].search(domain) self.message_subscribe(partner_ids=users.mapped('partner_id').ids) for record in self: msg = record._message_failed_job() if msg: record.message_post(body=msg, subtype='queue_job.mt_job_failed') @api.multi def write(self, vals): res = super(QueueJob, self).write(vals) if vals.get('state') == 'failed': self._message_post_on_failure() return res @api.multi def _subscribe_users_domain(self): """Subscribe all users having the 'Queue Job Manager' group""" group = self.env.ref('queue_job.group_queue_job_manager') if not group: return None companies = self.mapped('company_id') domain = [('groups_id', '=', group.id)] if companies: domain.append(('company_id', 'child_of', companies.ids)) return domain @api.multi def _message_failed_job(self): """Return a message which will be posted on the job when it is failed. It can be inherited to allow more precise messages based on the exception informations. If nothing is returned, no message will be posted. """ self.ensure_one() return _("Something bad happened during the execution of the job. " "More details in the 'Exception Information' section.") @api.model def _needaction_domain_get(self): """Returns the domain to filter records that require an action :return: domain or False is no action """ return [('state', '=', 'failed')] @api.model def autovacuum(self): """Delete all jobs done based on the removal interval defined on the channel Called from a cron. """ for channel in self.env['queue.job.channel'].search([]): deadline = datetime.now() - timedelta( days=int(channel.removal_interval)) jobs = self.search( [('date_done', '<=', deadline), ('channel', '=', channel.complete_name)], ) if jobs: jobs.unlink() return True @api.model def requeue_stuck_jobs(self, enqueued_delta=5, started_delta=0): """Fix jobs that are in a bad states :param in_queue_delta: lookup time in minutes for jobs that are in enqueued state :param started_delta: lookup time in minutes for jobs that are in enqueued state, 0 means that it is not checked """ self._get_stuck_jobs_to_requeue( enqueued_delta=enqueued_delta, started_delta=started_delta ).requeue() return True @api.model def _get_stuck_jobs_domain(self, queue_dl, started_dl): domain = [] now = fields.datetime.now() if queue_dl: queue_dl = now - timedelta(minutes=queue_dl) domain.append([ '&', ('date_enqueued', '<=', fields.Datetime.to_string(queue_dl)), ('state', '=', 'enqueued'), ]) if started_dl: started_dl = now - timedelta(minutes=started_dl) domain.append([ '&', ('date_started', '<=', fields.Datetime.to_string(started_dl)), ('state', '=', 'started'), ]) if not domain: raise exceptions.ValidationError( _("If both parameters are 0, ALL jobs will be requeued!") ) return expression.OR(domain) @api.model def _get_stuck_jobs_to_requeue(self, enqueued_delta, started_delta): job_model = self.env['queue.job'] stuck_jobs = job_model.search(self._get_stuck_jobs_domain( enqueued_delta, started_delta, )) return stuck_jobs @api.multi def related_action_open_record(self): """Open a form view with the record(s) of the job. For instance, for a job on a ``product.product``, it will open a ``product.product`` form view with the product record(s) concerned by the job. If the job concerns more than one record, it opens them in a list. This is the default related action. """ self.ensure_one() model_name = self.model_name records = self.env[model_name].browse(self.record_ids).exists() if not records: return None action = { 'name': _('Related Record'), 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': records._name, } if len(records) == 1: action['res_id'] = records.id else: action.update({ 'name': _('Related Records'), 'view_mode': 'tree,form', 'domain': [('id', 'in', records.ids)], }) return action
class BveView(models.Model): _name = 'bve.view' _description = 'BI View Editor' @api.depends('group_ids') @api.multi def _compute_users(self): for bve_view in self: group_ids = bve_view.sudo().group_ids if group_ids: bve_view.user_ids = group_ids.mapped('users') else: bve_view.user_ids = self.env['res.users'].sudo().search([]) @api.depends('name') @api.multi def _compute_model_name(self): for bve_view in self: name = [x for x in bve_view.name.lower() if x.isalnum()] model_name = ''.join(name).replace('_', '.').replace(' ', '.') bve_view.model_name = 'x_bve.' + model_name name = fields.Char(required=True, copy=False) model_name = fields.Char(compute='_compute_model_name', store=True) note = fields.Text(string='Notes') state = fields.Selection([('draft', 'Draft'), ('created', 'Created')], default='draft', copy=False) data = Serialized( help="Use the special query builder to define the query " "to generate your report dataset. " "NOTE: To be edited, the query should be in 'Draft' status.") action_id = fields.Many2one('ir.actions.act_window', string='Action') view_id = fields.Many2one('ir.ui.view', string='View') group_ids = fields.Many2many( 'res.groups', string='Groups', help="User groups allowed to see the generated report; " "if NO groups are specified the report will be public " "for everyone.") user_ids = fields.Many2many('res.users', string='Users', compute='_compute_users', store=True) _sql_constraints = [ ('name_uniq', 'unique(name)', _('Custom BI View names must be unique!')), ] @api.multi def _create_view_arch(self): self.ensure_one() def _get_field_def(name, def_type=''): if not def_type: return '' return """<field name="x_{}" type="{}" />""".format(name, def_type) def _get_field_type(field_info): row = field_info['row'] and 'row' column = field_info['column'] and 'col' measure = field_info['measure'] and 'measure' return row or column or measure def _get_field_list(fields_info): view_fields = [] for field_info in fields_info: field_name = field_info['name'] def_type = _get_field_type(field_info) if def_type: field_def = _get_field_def(field_name, def_type) view_fields.append(field_def) return view_fields fields_info = json.loads(self.data) view_fields = _get_field_list(fields_info) return view_fields @api.multi def _create_tree_view_arch(self): self.ensure_one() def _get_field_def(name): return """<field name="x_{}" />""".format(name) def _get_field_list(fields_info): view_fields = [] for field_info in fields_info: field_name = field_info['name'] if field_info['list'] and 'join_node' not in field_info: field_def = _get_field_def(field_name) view_fields.append(field_def) return view_fields fields_info = json.loads(self.data) view_fields = _get_field_list(fields_info) return view_fields @api.multi def _create_bve_view(self): self.ensure_one() # create views View = self.env['ir.ui.view'] old_views = View.sudo().search([('model', '=', self.model_name)]) old_views.unlink() view_vals = [{ 'name': 'Pivot Analysis', 'type': 'pivot', 'model': self.model_name, 'priority': 16, 'arch': """<?xml version="1.0"?> <pivot string="Pivot Analysis"> {} </pivot> """.format("".join(self._create_view_arch())) }, { 'name': 'Graph Analysis', 'type': 'graph', 'model': self.model_name, 'priority': 16, 'arch': """<?xml version="1.0"?> <graph string="Graph Analysis" type="bar" stacked="True"> {} </graph> """.format("".join(self._create_view_arch())) }, { 'name': 'Search BI View', 'type': 'search', 'model': self.model_name, 'priority': 16, 'arch': """<?xml version="1.0"?> <search string="Search BI View"> {} </search> """.format("".join(self._create_view_arch())) }] for vals in view_vals: View.sudo().create(vals) # create Tree view tree_view = View.sudo().create({ 'name': 'Tree Analysis', 'type': 'tree', 'model': self.model_name, 'priority': 16, 'arch': """<?xml version="1.0"?> <tree string="List Analysis" create="false"> {} </tree> """.format("".join(self._create_tree_view_arch())) }) # set the Tree view as the default one action_vals = { 'name': self.name, 'res_model': self.model_name, 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'tree,graph,pivot', 'view_id': tree_view.id, 'context': "{'service_name': '%s'}" % self.name, } ActWindow = self.env['ir.actions.act_window'] action_id = ActWindow.sudo().create(action_vals) self.write({ 'action_id': action_id.id, 'view_id': tree_view.id, 'state': 'created' }) @api.multi def _build_access_rules(self, model): self.ensure_one() def group_ids_with_access(model_name, access_mode): # pylint: disable=sql-injection self.env.cr.execute( '''SELECT g.id FROM ir_model_access a JOIN ir_model m ON (a.model_id=m.id) JOIN res_groups g ON (a.group_id=g.id) WHERE m.model=%s AND a.active = true AND a.perm_''' + access_mode, (model_name, )) res = self.env.cr.fetchall() return [x[0] for x in res] info = json.loads(self.data) model_names = list(set([f['model'] for f in info])) read_groups = set.intersection(*[ set(group_ids_with_access(model_name, 'read')) for model_name in model_names ]) if not read_groups and not self.group_ids: raise UserError( _('Please select at least one group' ' on the security tab.')) # read access for group in read_groups: self.env['ir.model.access'].sudo().create({ 'name': 'read access to ' + self.model_name, 'model_id': model.id, 'group_id': group, 'perm_read': True, }) # read and write access for group in self.group_ids: self.env['ir.model.access'].sudo().create({ 'name': 'read-write access to ' + self.model_name, 'model_id': model.id, 'group_id': group.id, 'perm_read': True, 'perm_write': True, }) @api.model def _create_sql_view(self): def get_fields_info(fields_data): fields_info = [] for field_data in fields_data: field = self.env['ir.model.fields'].browse(field_data['id']) vals = { 'table': self.env[field.model_id.model]._table, 'table_alias': field_data['table_alias'], 'select_field': field.name, 'as_field': 'x_' + field_data['name'], 'join': False, 'model': field.model_id.model } if field_data.get('join_node'): vals.update({'join': field_data['join_node']}) fields_info.append(vals) return fields_info def get_join_nodes(info): join_nodes = [(f['table_alias'], f['join'], f['select_field']) for f in info if f['join'] is not False] return join_nodes def get_tables(info): tables = set([(f['table'], f['table_alias']) for f in info]) return tables def get_fields(info): return [("{}.{}".format(f['table_alias'], f['select_field']), f['as_field']) for f in info if 'join_node' not in f] def check_empty_data(data): if not data or data == '[]': raise UserError(_('No data to process.')) check_empty_data(self.data) formatted_data = json.loads(self.data) info = get_fields_info(formatted_data) select_fields = get_fields(info) tables = get_tables(info) join_nodes = get_join_nodes(info) table_name = self.model_name.replace('.', '_') # robustness in case something went wrong # pylint: disable=sql-injection self._cr.execute('DROP TABLE IF EXISTS "%s"' % table_name) basic_fields = [("t0.id", "id")] # pylint: disable=sql-injection q = """CREATE or REPLACE VIEW %s as ( SELECT %s FROM %s WHERE %s )""" % (table_name, ','.join([ "{} AS {}".format(f[0], f[1]) for f in basic_fields + select_fields ]), ','.join(["{} AS {}".format( t[0], t[1]) for t in list(tables)]), " AND ".join( ["{}.{} = {}.id".format(j[0], j[2], j[1]) for j in join_nodes] + ["TRUE"])) self.env.cr.execute(q) @api.multi def action_translations(self): self.ensure_one() model = self.env['ir.model'].sudo().search([('model', '=', self.model_name)]) translation_obj = self.env['ir.translation'].sudo() translation_obj.translate_fields('ir.model', model.id) for field_id in model.field_id.ids: translation_obj.translate_fields('ir.model.fields', field_id) return { 'name': 'Translations', 'res_model': 'ir.translation', 'type': 'ir.actions.act_window', 'view_mode': 'tree', 'view_id': self.env.ref('base.view_translation_dialog_tree').id, 'target': 'current', 'flags': { 'search_view': True, 'action_buttons': True }, 'domain': [ '|', '&', ('res_id', 'in', model.field_id.ids), ('name', '=', 'ir.model.fields,field_description'), '&', ('res_id', '=', model.id), ('name', '=', 'ir.model,name') ], } @api.multi def action_create(self): self.ensure_one() def _prepare_field(field_data): if not field_data['custom']: field = self.env['ir.model.fields'].browse(field_data['id']) vals = { 'name': 'x_' + field_data['name'], 'complete_name': field.complete_name, 'model': self.model_name, 'relation': field.relation, 'field_description': field_data.get('description', field.field_description), 'ttype': field.ttype, 'selection': field.selection, 'size': field.size, 'state': 'manual', 'readonly': True } if vals['ttype'] == 'monetary': vals.update({'ttype': 'float'}) if field.ttype == 'selection' and not field.selection: model_obj = self.env[field.model_id.model] selection = model_obj._fields[field.name].selection if callable(selection): selection_domain = selection(model_obj) else: selection_domain = selection vals.update({'selection': str(selection_domain)}) return vals # clean dirty view (in case something went wrong) self.action_reset() # create sql view self._create_sql_view() # create model and fields data = json.loads(self.data) model_vals = { 'name': self.name, 'model': self.model_name, 'state': 'manual', 'field_id': [(0, 0, _prepare_field(field)) for field in data if 'join_node' not in field] } Model = self.env['ir.model'].sudo().with_context(bve=True) model = Model.create(model_vals) # give access rights self._build_access_rules(model) # create tree, graph and pivot views self._create_bve_view() @api.multi def open_view(self): self.ensure_one() [action] = self.action_id.read() action['display_name'] = _('BI View') return action @api.multi def copy(self, default=None): self.ensure_one() default = dict(default or {}, name=_("%s (copy)") % self.name) return super(BveView, self).copy(default=default) @api.multi def action_reset(self): self.ensure_one() has_menus = False if self.action_id: action = 'ir.actions.act_window,%d' % (self.action_id.id, ) menus = self.env['ir.ui.menu'].sudo().search([('action', '=', action)]) has_menus = True if menus else False menus.unlink() if self.action_id.view_id: self.action_id.view_id.sudo().unlink() self.action_id.sudo().unlink() self.env['ir.ui.view'].sudo().search([('model', '=', self.model_name) ]).unlink() ir_models = self.env['ir.model'].sudo().search([('model', '=', self.model_name)]) for model in ir_models: model.unlink() table_name = self.model_name.replace('.', '_') tools.drop_view_if_exists(self.env.cr, table_name) self.state = 'draft' if has_menus: return {'type': 'ir.actions.client', 'tag': 'reload'} @api.multi def unlink(self): for view in self: if view.state == 'created': raise UserError( _('You cannot delete a created view! ' 'Reset the view to draft first.')) return super(BveView, self).unlink()
class CommerceBackend(models.Model): _name = "commerce.backend" _inherit = [ "collection.base", "server.env.techname.mixin", "server.env.mixin", ] _description = "Commerce Backend" name = fields.Char(required=True) company_id = fields.Many2one( "res.company", "Company", required=True, default=lambda s: s._default_company_id(), ) location = fields.Char() notification_ids = fields.One2many( "commerce.notification", "backend_id", "Notification", default=lambda self: self.env["commerce.notification"].search([]), ) nbr_product = fields.Integer(compute="_compute_nbr_content", string="Number of bound products") nbr_variant = fields.Integer(compute="_compute_nbr_content") nbr_category = fields.Integer(compute="_compute_nbr_content") allowed_country_ids = fields.Many2many(comodel_name="res.country", string="Allowed Country") anonymous_partner_id = fields.Many2one( "res.partner", "Anonymous Partner", help=("Provide partner settings for unlogged users " "(i.e. fiscal position)"), required=True, default=lambda self: self.env.ref("commerce.anonymous"), ) sequence_id = fields.Many2one("ir.sequence", "Sequence", help="Naming policy for orders and carts") lang_ids = fields.Many2many("res.lang", string="Lang", required=True) pricelist_id = fields.Many2one( "product.pricelist", string="Pricelist", default=lambda self: self._default_pricelist_id(), ) account_analytic_id = fields.Many2one( comodel_name="account.analytic.account", string="Analytic account", help="This analytic account will be used to fill the " "field on the sale order created.", ) filter_ids = fields.Many2many(comodel_name="product.filter", string="Filter") use_commerce_product_name = fields.Boolean( string="Use Commerce product display name", help="If checked, use the specific commerce display name for " "products instead of the original product name.", ) category_root_binding_level = fields.Integer( default=0, help="Define the starting level for root categories when auto-binding." "This is typically handy when you want to have some root categories " "for internal organization only (eg: All / Saleable) " "but you don't want them to appear in the shop." "Works for both 'Bind all products' and 'Bind all categories'", ) category_binding_level = fields.Integer( default=0, help="Define if the product binding should also bind related " "categories and how many related parents.\n" "Set 0 (or less) to disable the category auto-binding.\n" "Set 1 to auto-bind the direct category.\n" "Set 2 to auto-bind the direct category and his parent.\n" "etc.", ) website_public_name = fields.Char( help="Public name of your backend/website.\n" " Used for products name referencing.") clear_cart_options = fields.Selection( selection=[ ("delete", "Delete"), ("clear", "Clear"), ("cancel", "Cancel"), ], required=True, string="Clear cart", default="clear", help="Action to execute on the cart when the front want to clear the " "current cart:\n" "- Delete: delete the cart (and items);\n" "- Clear: keep the cart but remove items;\n" "- Cancel: The cart is canceled but kept into the database.\n" "When a quotation is not validated, habitually it's not removed " "but cancelled. " "It could be also useful if you want to keep cart for " "statistics reasons. A new cart is created automatically when the " "customer will add a new item.", ) cart_checkout_address_policy = fields.Selection( selection=[ ("no_defaults", "No defaults"), ( "invoice_defaults_to_shipping", "Invoice address defaults to shipping", ), ], default="no_defaults", required=True, string="Cart address behavior", help= "Define how the cart address will be handled in the checkout step:\n" "- No defaults: client will pass shipping and invoicing address" " together or in separated calls." " No automatic value for non passed addresses will be set;\n" "- Invoice address defaults to shipping:" " if the client does not pass the invoice address explicitly " " the shipping one will be used as invoice address as well.\n", ) partner_title_ids = fields.Many2many( "res.partner.title", string="Available partner titles", default=lambda self: self._default_partner_title_ids(), ) partner_industry_ids = fields.Many2many( "res.partner.industry", string="Available partner industries", default=lambda self: self._default_partner_industry_ids(), ) # Invoice settings invoice_settings = Serialized( # Default values on the sparse fields work only for create # and does not provide defaults for existing records. default={ "invoice_linked_to_sale_only": True, "invoice_access_open": False, }) invoice_linked_to_sale_only = fields.Boolean( default=True, string="Only sale invoices", help="Only serve invoices that are linked to a sale order.", sparse="invoice_settings", ) invoice_access_open = fields.Boolean( default=False, string="Open invoices", help="Give customer access to open invoices as well as the paid ones.", sparse="invoice_settings", ) invoice_report_id = fields.Many2one( comodel_name="ir.actions.report", domain=lambda self: self._get_invoice_report_id_domain(), string="Specific report", help= "Select a specific report for invoice download, if none are selected " "default commerce implementation is used.", ) customer_default_role = fields.Char( compute="_compute_customer_default_role", ) salesman_notify_create = fields.Selection( selection=[ ("", "None"), ("all", "Companies, simple users and addresses"), ("company", "Company users only"), ("user", "Simple users only"), ("company_and_user", "Companies and simple users"), ("address", "Addresses only"), ], default="company", ) salesman_notify_update = fields.Selection( selection=[ ("", "None"), ("all", "Companies, simple users and addresses"), ("company", "Company users only"), ("user", "Simple users only"), ("company_and_user", "Companies and simple users"), ("address", "Addresses only"), ], default="", ) website_unique_key = fields.Char( required=True, help="This identifier should be provided by each REST request through " "a WEBSITE-UNIQUE-KEY http header to identify the target backend. " "If not provided by the request, you must pu in place a way to" "lookup the target request for a given request by overriding the" "method _get_backend into the service context provider component. " "The commerce_auth_api_key and commerce_auth_jwt addons " "provides a fallback mechanism in such a case.", default=lambda self: self._default_website_unique_key(), ) currency_ids = fields.Many2many(comodel_name="res.currency", string="Currency") _sql_constraints = [( "unique_website_unique_key", "unique(website_unique_key)", _("This website unique key already exists in database"), )] @property def _server_env_fields(self): return {"location": {}} @api.model def _default_company_id(self): return self.env.company @api.model def _default_pricelist_id(self): return self.env.ref("product.list0") @api.model def _default_partner_title_ids(self): return self.env["res.partner.title"].search([]) @api.model def _default_partner_industry_ids(self): return self.env["res.partner.industry"].search([]) @api.model def _default_website_unique_key(self): return hashlib.pbkdf2_hmac("sha256", os.urandom(32), os.urandom(32), 100000).hex() def _compute_customer_default_role(self): for rec in self: rec.customer_default_role = "default" def _get_invoice_report_id_domain(self): return [( "binding_model_id", "=", self.env.ref("account.model_account_move").id, )] def _to_compute_nbr_content(self): """ Get a dict to compute the number of content. The dict is build like this: Key = Odoo number fields string (should be Integer/Float) Value = The target model string :return: dict """ values = { # key => Odoo field: value => related model "nbr_product": "commerce.product", "nbr_category": "commerce.category", "nbr_variant": "commerce.variant", } return values def _compute_nbr_content(self): to_count = self._to_compute_nbr_content() domain = [("backend_id", "in", self.ids)] for odoo_field, odoo_model in to_count.items(): if odoo_model in self.env and self.env[ odoo_model]._is_an_ordinary_table(): target_model_obj = self.env[odoo_model] result = target_model_obj.read_group(domain, ["backend_id"], ["backend_id"], lazy=False) result = { data["backend_id"][0]: data["__count"] for data in result } for record in self: record[odoo_field] = result.get(record.id, 0) def _bind_all_content(self, model, bind_model, domain): bind_model_obj = self.env[bind_model].with_context(active_test=False) model_obj = self.env[model] records = model_obj.search(domain) binds = bind_model_obj.search([ ("backend_id", "in", self.ids), ("record_id", "in", records.ids), ("lang_id", "in", self.mapped("lang_ids").ids), ]) for backend in self: for lang in backend.lang_ids: for record in records: bind = fields.first( binds.filtered( lambda b: b.backend_id == backend and b.record_id == record and b.lang_id == lang)) if not bind: bind_model_obj.with_context(map_children=True).create({ "backend_id": backend.id, "record_id": record.id, "lang_id": lang.id, }) elif not bind.active: bind.write({"active": True}) return True def bind_all_product(self, domain=None): domain = domain or [("sale_ok", "=", True)] result = self._bind_all_content("product.template", "commerce.product", domain) self.auto_bind_categories() return result def auto_bind_categories(self): """ Auto bind product.category for bound commerce.product :return: bool """ backends = self.filtered(lambda b: b.category_binding_level > 0) if not backends: return True all_products = self.env["commerce.variant"].search([ ("backend_id", "in", backends.ids), # Force to have only active binding ("active", "=", True), ]) for backend in backends: commerce_variants = all_products.filtered( lambda p: p.backend_id == backend) products = commerce_variants.mapped("record_id") categories = backend._get_related_categories(products) if categories: self._bind_all_content( categories._name, "commerce.category", [("id", "in", categories.ids)], ) return True def _get_related_categories(self, products): """Get related product.category to bind based on current backend. :param products: product recordset (product or template) :return: product.category recordset """ self.ensure_one() # As we consume the first level (direct category), minus 1 level = self.category_binding_level - 1 # TODO: this will include categories out of the level of hierarchy # when they are assigned directly to the product. # We should have a flag to turn this on/off explicitely # and in case it should documented on the backend UI. categories = products.mapped("categ_id") # pull up until the correct level parent_categories = categories while level > 0: parent_categories = parent_categories.mapped("parent_id") categories |= parent_categories level -= 1 to_exclude = self._get_categories_to_exclude() return categories - to_exclude def _get_categories_to_exclude(self): root_lvl = self.category_root_binding_level if not root_lvl: return self.env["product.category"].browse() categories = self.env["product.category"].search([("parent_id", "=", False)]) lvl = root_lvl - 1 # the limit is inclusive while lvl: categories += categories.mapped("child_id") lvl -= 1 return categories def bind_all_category(self, domain=None): if domain is None: domain = [] to_exclude = self._get_categories_to_exclude() if to_exclude: domain = [("id", "not in", to_exclude.ids)] # TODO: we should exclude levels from `category_binding_level` as well self._bind_all_content("product.category", "commerce.category", domain) def bind_selected_products(self, products, langs=None, run_immediately=False): """Bind given product variants. :param products: product.product recordset :param langs: res.lang recordset. If none, all langs from backend :param run_immediately: do not use jobs """ for backend in self: langs = langs or backend.lang_ids grouped_by_template = defaultdict( self.env["product.product"].browse) for rec in products: grouped_by_template[rec.product_tmpl_id] |= rec method = backend.with_delay().bind_single_product if run_immediately: method = backend.bind_single_product for tmpl, variants in grouped_by_template.items(): method(langs, tmpl, variants) def bind_single_product(self, langs, product_tmpl, variants): """Bind given product variants for given template and languages. :param langs: res.lang recordset :param product_tmpl: product.template browse record :param variants: product.product recordset :param run_immediately: do not use jobs """ self.ensure_one() commerce_products = self._get_or_create_commerce_products( langs, product_tmpl) for commerce_product in commerce_products: self._get_or_create_commerce_variants(commerce_product, variants) self.auto_bind_categories() def _get_or_create_commerce_products(self, langs, product_tmpl): """Get template bindings for given languages or create if missing. :param langs: res.lang recordset :param product_tmpl: product.template browse record """ binding_model = self.env["commerce.product"].with_context( active_test=False) bound_templates = binding_model.search([ ("record_id", "=", product_tmpl.id), ("backend_id", "=", self.id), ("lang_id", "in", langs.ids), ]) for lang in langs: commerce_product = bound_templates.filtered( lambda x: x.lang_id == lang) if not commerce_product: # fmt: off data = { "record_id": product_tmpl.id, "backend_id": self.id, "lang_id": lang.id, } # fmt: on bound_templates |= binding_model.create(data) elif not commerce_product.active: commerce_product.write({"active": True}) return bound_templates def _get_or_create_commerce_variants(self, commerce_product, variants): """Get variant bindings, create if missing. :param langs: res.lang recordset :param product_tmpl: product.template browse record """ binding_model = self.env["commerce.variant"] bound_variants = commerce_product.commerce_variant_ids for variant in variants: commerce_variant = bound_variants.filtered( lambda p: p.record_id == variant) if not commerce_variant: # fmt: off data = { "record_id": variant.id, "backend_id": self.id, "commerce_product_id": commerce_product.id, } # fmt: on bound_variants |= binding_model.create(data) elif not commerce_variant.active: commerce_variant.write({"active": True}) return bound_variants def _send_notification(self, notification, record): self.ensure_one() record.ensure_one() notifs = self.env["commerce.notification"].search([ ("backend_id", "=", self.id), ("notification_type", "=", notification), ]) description = _("Notify %s for %s,%s") % ( notification, record._name, record.id, ) for notif in notifs: notif.with_delay(description=description).send(record.id) return True def _extract_configuration(self): return {} def _bind_langs(self, lang_ids): self.ensure_one() self.env["commerce.variant.binding.wizard"].bind_langs(self, lang_ids) self.env["commerce.category.binding.wizard"].bind_langs(self, lang_ids) def _unbind_langs(self, lang_ids): self.ensure_one() self.env["commerce.variant.unbinding.wizard"].unbind_langs( self, lang_ids) self.env["commerce.category.unbinding.wizard"].unbind_langs( self, lang_ids) @contextmanager def _keep_binding_sync_with_langs(self): lang_ids_by_record = {} for record in self: lang_ids_by_record[record.id] = record.lang_ids.ids yield for record in self: old_lang_ids = set(lang_ids_by_record[record.id]) actual_lang_ids = set(record.lang_ids.ids) if old_lang_ids == actual_lang_ids: continue added_lang_ids = actual_lang_ids - old_lang_ids if added_lang_ids: record._bind_langs(list(added_lang_ids)) removed_lang_ids = old_lang_ids - actual_lang_ids if removed_lang_ids: record._unbind_langs(list(removed_lang_ids)) def _get_backend_pricelist(self): """The pricelist configure by this backend.""" # There must be a pricelist somehow: safe fallback to default Odoo one return self.pricelist_id or self._default_pricelist_id() def _get_customer_default_pricelist(self): """Retrieve pricelist to be used for brand new customer record.""" return self._get_backend_pricelist() def _get_partner_pricelist(self, partner): """Retrieve pricelist for given res.partner record.""" # Normally we should return partner.property_product_pricelist # but by default the shop must use the same pricelist for all customers # because products' prices are computed only by backend pricelist. # Nevertheless, this is a good point to hook to # if a different behavior per partner is required. return None def _get_cart_pricelist(self, partner=None): """Retrieve pricelist to be used for the cart. NOTE: if you change this behavior be aware that the prices displayed on the cart might differ from the ones showed on product details. This is because product info comes from indexes which are completely agnostic in regard to specific partner info. """ pricelist = self._get_backend_pricelist() if partner: pricelist = self._get_partner_pricelist(partner) or pricelist return pricelist def _validate_partner(self, commerce_partner): """Hook to validate partners when required.""" return True @api.model @tools.ormcache("self._uid", "website_unique_key") def _get_id_from_website_unique_key(self, website_unique_key): return self.search([("website_unique_key", "=", website_unique_key) ]).id @api.model def _get_from_website_unique_key(self, website_unique_key): return self.browse( self._get_id_from_website_unique_key(website_unique_key)) def write(self, values): if "website_unique_key" in values: self._get_id_from_website_unique_key.clear_cache( self.env[self._name]) with self._keep_binding_sync_with_langs(): return super(CommerceBackend, self).write(values)
class EDIExchangeType(models.Model): """ Define a kind of exchange. """ _name = "edi.exchange.type" _description = "EDI Exchange Type" backend_id = fields.Many2one( string="EDI backend", comodel_name="edi.backend", ondelete="set null", ) backend_type_id = fields.Many2one( string="EDI Backend type", comodel_name="edi.backend.type", required=True, ondelete="restrict", ) name = fields.Char(required=True) code = fields.Char(required=True) direction = fields.Selection(selection=[("input", "Input"), ("output", "Output")], required=True) exchange_filename_pattern = fields.Char( default="{record_name}-{type.code}-{dt}") # TODO make required if exchange_filename_pattern is exchange_file_ext = fields.Char() exchange_file_auto_generate = fields.Boolean( help="Auto generate output for records missing their payload. " "If active, a cron will take care of generating the output when not set yet. " ) ack_type_id = fields.Many2one( string="Ack exchange type", comodel_name="edi.exchange.type", ondelete="set null", help="Identify the type of the ack. " "If this field is valued it means an hack is expected.", ) advanced_settings_edit = fields.Text( string="Advanced YAML settings", help=""" Advanced technical settings as YAML format. The YAML structure should reproduce a dictionary. The backend might use these settings for automated operations. Currently supported conf: components: generate: usage: $comp_usage work_ctx: opt1: True validate: usage: $comp_usage check: usage: $comp_usage send: usage: $comp_usage receive: usage: $comp_usage process: usage: $comp_usage In any case, you can use these settings to provide your own configuration for whatever need you might have. """, ) advanced_settings = Serialized(default={}, compute="_compute_advanced_settings") model_ids = fields.Many2many( "ir.model", help="""Modules to be checked for manual EDI generation""", ) enable_domain = fields.Char( string="Enable on domain", help="""Filter domain to be checked on Models""") enable_snippet = fields.Char( string="Enable on snippet", help="""Snippet of code to be checked on Models, You can use `record` and `exchange_type` here. It will be executed if variable result has been defined as True """, ) _sql_constraints = [( "code_uniq", "unique(code, backend_id)", "The code must be unique per backend", )] @api.depends("advanced_settings_edit") def _compute_advanced_settings(self): for rec in self: rec.advanced_settings = rec._load_advanced_settings() def _load_advanced_settings(self): return yaml.safe_load(self.advanced_settings_edit or "") or {} @api.constrains("backend_id", "backend_type_id") def _check_backend(self): for rec in self: if not rec.backend_id: continue if rec.backend_id.backend_type_id != rec.backend_type_id: raise exceptions.UserError( _("Backend should respect backend type!")) def _make_exchange_filename(self, exchange_record): """Generate filename.""" pattern = self.exchange_filename_pattern ext = self.exchange_file_ext pattern = pattern + ".{ext}" dt = slugify(fields.Datetime.to_string(fields.Datetime.now())) record_name = self._get_record_name(exchange_record) record = exchange_record if exchange_record.model and exchange_record.res_id: record = exchange_record.record return pattern.format( exchange_record=exchange_record, record=record, record_name=record_name, type=self, dt=dt, ext=ext, ) def _get_record_name(self, exchange_record): if not exchange_record.res_id or not exchange_record.model: return slugify(exchange_record.display_name) if hasattr(exchange_record.record, "_get_edi_exchange_record_name"): return exchange_record.record._get_edi_exchange_record_name( exchange_record) return slugify(exchange_record.record.display_name)
class ServerEnvMixin(models.AbstractModel): """Mixin to add server environment in existing models Usage ----- :: class StorageBackend(models.Model): _name = "storage.backend" _inherit = ["storage.backend", "server.env.mixin"] @property def _server_env_fields(self): return {"directory_path": {}} With the snippet above, the "storage.backend" model now uses a server environment configuration for the field ``directory_path``. Under the hood, this mixin automatically replaces the original field by an env-computed field that reads from the configuration files. By default, it looks for the configuration in a section named ``[model_name.Record Name]`` where ``model_name`` is the ``_name`` of the model with ``.`` replaced by ``_``. Then in a global section which is only the name of the model. They can be customized by overriding the method :meth:`~_server_env_section_name` and :meth:`~_server_env_global_section_name`. For each field transformed to an env-computed field, a companion field ``<field>_env_default`` is automatically created. When its value is set and the configuration files do not contain a key for that field, the env-computed field uses the default value stored in database. If there is a key for this field but it is empty, the env-computed field has an empty value. Env-computed fields are conditionally editable, based on the absence of their key in environment configuration files. When edited, their value is stored in the database. Integration with keychain ------------------------- The keychain addon is used account information, encrypting the password with a key per environment. The default behavior of server_environment is to store the default fields in a serialized field, so the password would lend there unencrypted. You can benefit from keychain by using custom compute/inverse methods to get/set the password field: :: class StorageBackend(models.Model): _name = 'storage.backend' _inherit = ['keychain.backend', 'collection.base'] @property def _server_env_fields(self): base_fields = super()._server_env_fields sftp_fields = { "sftp_server": {}, "sftp_port": {}, "sftp_login": {}, "sftp_password": { "no_default_field": True, "compute_default": "_compute_password", "inverse_default": "_inverse_password", }, } sftp_fields.update(base_fields) return sftp_fields * ``no_default_field`` means that no new (sparse) field need to be created, it already is provided by keychain * ``compute_default`` is the name of the compute method to get the default value when no key is set in the configuration files. ``_compute_password`` is implemented by ``keychain.backend``. * ``inverse_default`` is the name of the compute method to set the default value when it is editable. ``_inverse_password`` is implemented by ``keychain.backend``. """ _name = "server.env.mixin" _description = "Mixin to add server environment in existing models" server_env_defaults = Serialized() _server_env_getter_mapping = { "integer": "getint", "float": "getfloat", "monetary": "getfloat", "boolean": "getboolean", "char": "get", "selection": "get", "text": "get", } @property def _server_env_fields(self): """Dict of fields to replace by fields computed from env To override in models. The dictionary is: {'name_of_the_field': options} Where ``options`` is a dictionary:: options = { "getter": "getint", "no_default_field": True, "compute_default": "_compute_password", "inverse_default": "_inverse_password", } * ``getter``: The configparser getter can be one of: get, getboolean, getint, getfloat. The getter is automatically inferred from the type of the field, so it shouldn't generally be needed to set it. * ``no_default_field``: disable creation of a field for storing the default value, must be used with ``compute_default`` and ``inverse_default`` * ``compute_default``: name of a compute method to get the default value when no key is present in configuration files * ``inverse_default``: name of an inverse method to set the default value when the value is editable Example:: @property def _server_env_fields(self): base_fields = super()._server_env_fields sftp_fields = { "sftp_server": {}, "sftp_port": {}, "sftp_login": {}, "sftp_password": {}, } sftp_fields.update(base_fields) return sftp_fields """ return {} @api.model def _server_env_global_section_name(self): """Name of the global section in the configuration files Can be customized in your model """ return self._name.replace(".", "_") def _server_env_section_name(self): """Name of the section in the configuration files Can be customized in your model """ self.ensure_one() base = self._server_env_global_section_name() return ".".join((base, self.name)) def _server_env_read_from_config(self, field_name, config_getter): self.ensure_one() global_section_name = self._server_env_global_section_name() section_name = self._server_env_section_name() try: # at this point we should have checked that we have a key with # _server_env_has_key_defined so we are sure that the value is # either in the global or the record config getter = getattr(serv_config, config_getter) if section_name in serv_config and field_name in serv_config[ section_name]: value = getter(section_name, field_name) else: value = getter(global_section_name, field_name) except Exception: _logger.exception("error trying to read field %s in section %s", field_name, section_name) return False return value def _server_env_has_key_defined(self, field_name): self.ensure_one() global_section_name = self._server_env_global_section_name() section_name = self._server_env_section_name() has_global_config = (global_section_name in serv_config and field_name in serv_config[global_section_name]) has_config = (section_name in serv_config and field_name in serv_config[section_name]) return has_global_config or has_config def _compute_server_env_from_config(self, field_name, options): getter_name = options.get("getter") if options else None if not getter_name: field_type = self._fields[field_name].type getter_name = self._server_env_getter_mapping.get(field_type) if not getter_name: # if you get this message and the field is working as expected, # you may want to add the type in _server_env_getter_mapping _logger.warning("server.env.mixin is used on a field of type %s, " "which may not be supported properly") getter_name = "get" value = self._server_env_read_from_config(field_name, getter_name) self[field_name] = value def _compute_server_env_from_default(self, field_name, options): if options and options.get("compute_default"): getattr(self, options["compute_default"])() else: default_field = self._server_env_default_fieldname(field_name) if default_field: self[field_name] = self[default_field] else: self[field_name] = False def _compute_server_env(self): """Read values from environment configuration files If an env-computed field has no key in configuration files, read from the ``<field>_env_default`` field from database. """ for record in self: for field_name, options in self._server_env_fields.items(): if record._server_env_has_key_defined(field_name): record._compute_server_env_from_config(field_name, options) else: record._compute_server_env_from_default( field_name, options) def _inverse_server_env(self, field_name): options = self._server_env_fields[field_name] default_field = self._server_env_default_fieldname(field_name) is_editable_field = self._server_env_is_editable_fieldname(field_name) for record in self: # when we write in an env-computed field, if it is editable # we update the default value in database if record[is_editable_field]: if options and options.get("inverse_default"): getattr(record, options["inverse_default"])() elif default_field: record[default_field] = record[field_name] def _compute_server_env_is_editable(self): """Compute <field>_is_editable values We can edit an env-computed filed only if there is no key in any environment configuration file. If there is an empty key, it's an empty value so we can't edit the env-computed field. """ # we can't group it with _compute_server_env otherwise when called # in ``_inverse_server_env`` it would reset the value of the field for record in self: for field_name in self._server_env_fields: is_editable_field = self._server_env_is_editable_fieldname( field_name) is_editable = not record._server_env_has_key_defined( field_name) record[is_editable_field] = is_editable def _server_env_view_set_readonly(self, view_arch): field_xpath = './/field[@name="%s"]' for field in self._server_env_fields: is_editable_field = self._server_env_is_editable_fieldname(field) for elem in view_arch.findall(field_xpath % field): # set env-computed fields to readonly if the configuration # files have a key set for this field elem.set("attrs", str({"readonly": [(is_editable_field, "=", False)]})) if not view_arch.findall(field_xpath % is_editable_field): # add the _is_editable fields in the view for the 'attrs' # domain view_arch.append( etree.Element("field", name=is_editable_field, invisible="1")) return view_arch def _fields_view_get(self, view_id=None, view_type="form", toolbar=False, submenu=False): view_data = super()._fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu) view_arch = etree.fromstring(view_data["arch"].encode("utf-8")) view_arch = self._server_env_view_set_readonly(view_arch) view_data["arch"] = etree.tostring(view_arch, encoding="unicode") return view_data def _server_env_default_fieldname(self, base_field_name): """Return the name of the field with default value""" options = self._server_env_fields[base_field_name] if options and options.get("no_default_field"): return "" return "{}_env_default".format(base_field_name) def _server_env_is_editable_fieldname(self, base_field_name): """Return the name of the field for "is editable" This is the field used to tell if the env-computed field can be edited. """ return "{}_env_is_editable".format(base_field_name) def _server_env_transform_field_to_read_from_env(self, field): """Transform the original field in a computed field""" field.compute = "_compute_server_env" inverse_method_name = "_inverse_server_env_%s" % field.name inverse_method = partialmethod( type(self)._inverse_server_env, field.name) setattr(type(self), inverse_method_name, inverse_method) field.inverse = inverse_method_name field.store = False field.required = False field.copy = False field.sparse = None field.prefetch = False def _server_env_add_is_editable_field(self, base_field): """Add a field indicating if we can edit the env-computed fields It is used in the inverse function of the env-computed field and in the views to add 'readonly' on the fields. """ fieldname = self._server_env_is_editable_fieldname(base_field.name) # if the field is inherited, it's a related to its delegated model # (inherits), we want to override it with a new one if fieldname not in self._fields or self._fields[fieldname].inherited: field = fields.Boolean( compute="_compute_server_env_is_editable", automatic=True, # this is required to be able to edit fields # on new records default=True, ) self._add_field(fieldname, field) def _server_env_add_default_field(self, base_field): """Add a field storing the default value The default value is used when there is no key for an env-computed field in the configuration files. The field is a sparse field stored in the serialized (json) field ``server_env_defaults``. """ fieldname = self._server_env_default_fieldname(base_field.name) if not fieldname: return # if the field is inherited, it's a related to its delegated model # (inherits), we want to override it with a new one if fieldname not in self._fields or self._fields[fieldname].inherited: base_field_cls = base_field.__class__ field_args = base_field.args.copy() field_args.pop("_sequence", None) field_args.update({ "sparse": "server_env_defaults", "automatic": True }) if hasattr(base_field, "selection"): field_args["selection"] = base_field.selection field = base_field_cls(**field_args) self._add_field(fieldname, field) @api.model def _setup_base(self): super()._setup_base() for fieldname in self._server_env_fields: field = self._fields[fieldname] self._server_env_add_default_field(field) self._server_env_transform_field_to_read_from_env(field) self._server_env_add_is_editable_field(field)
class ShopinvaderImageMixin(models.AbstractModel): _name = "shopinvader.image.mixin" _description = "Shopinvader Image Mixin" _image_field = None images = Serialized( compute="_compute_images", string="Shopinvader Image", compute_sudo=True, ) # Tech field to store images data. # It cannot be computed because the computation # might required generating thumbs # which requires access to the storage files # which requires components registry to be available # which is not the case when Odoo starts. images_stored = Serialized() images_store_hash = fields.Char() def _compute_images(self): # Force computation if needed self.filtered( lambda x: x._images_must_recompute())._compute_images_stored() for record in self: record.images = record.images_stored def _compute_images_stored(self): for record in self: record.images_stored = record._get_image_data_for_record() record.images_store_hash = record._get_images_store_hash() def _images_must_recompute(self): return self.images_store_hash != self._get_images_store_hash() @property def _resize_scales_field(self): return "%s_resize_ids" % self._name.replace(".", "_") def _resize_scales(self): return self.backend_id[self._resize_scales_field] def _get_images_store_hash(self): self.ensure_one() if not self[self._image_field]: return False return str(hash(self._get_images_store_hash_tuple())) def _get_images_store_hash_timestamp(self): """Get the timestamp of the last modification of the images This also includes the last modification of their relation or tags records :return: datetime """ images_relation = self[self._image_field] timestamps = [ *images_relation.mapped("write_date"), *images_relation.mapped("image_id.write_date"), ] if "tag_id" in images_relation._fields: timestamps += images_relation.mapped("tag_id.write_date") return max(timestamps) if timestamps else False def _get_images_store_hash_tuple(self): images = self[self._image_field].image_id # Get fresh URLs. # Normally we have only one backend # but potentially you can have different backends by image record. # If any base URL changes, images should be recomputed. # Eg: swap an image to another backend or change the CDN URL. # NOTE: this is not perfect in terms of perf because it will cause # calls to `get_or_create_thumbnail` when no image data has changed # but it's better than having broken URLs. public_urls = tuple(images.mapped("url")) resize_scales = tuple(self._resize_scales().mapped( lambda r: (r.key, r.size_x, r.size_y))) timestamp = self._get_images_store_hash_timestamp() # TODO: any other bit to consider here? return resize_scales + public_urls + (timestamp, ) def _get_image_url_key(self, image_relation): # You can inherit this method to change the name of the image of # your website. By default we use the name of the product or category # linked to the image processed # Note the url will be slugify by the get_or_create_thumnail self.ensure_one() return self.display_name def _get_image_data_for_record(self): self.ensure_one() res = [] resizes = self._resize_scales() for image_relation in self[self._image_field]: url_key = self._get_image_url_key(image_relation) image_data = {} for resize in resizes: thumbnail = image_relation.image_id.get_or_create_thumbnail( resize.size_x, resize.size_y, url_key=url_key) image_data[resize.key] = self._prepare_data_resize( thumbnail, image_relation) res.append(image_data) return res def _prepare_data_resize(self, thumbnail, image_relation): """Prepare data to fill images serialized field :param thumbnail: ``storage.thumbnail`` recordset :param image_relation: ``image.relation.abstract`` recordset :return: dict """ self.ensure_one() res = {"src": thumbnail.url, "alt": self.name} if "tag_id" in image_relation._fields: res["tag"] = image_relation.tag_id.name or "" return res