class Sale: id = Integer(label="Identifier", primary_key=True) contents = Jsonb(label="Properties") @classmethod def create(cls, contents): """Create a Sale, returning it, and corresponding Reservation Request. """ sale = cls.insert(contents=contents) Wms = cls.registry.Wms Reservation = Wms.Reservation RequestItem = Reservation.RequestItem GoodsType = Wms.PhysObj.Type req = Reservation.Request.insert(purpose=['sale', sale.id]) for product, qty in contents.items(): gt = GoodsType.query().filter(GoodsType.code == product).one() RequestItem.insert(goods_type=gt, quantity=qty, request=req) return sale, req @classmethod def create_random(cls): contents = {} for _ in range(randrange(4)): width = randrange(25, 45) height = randrange(20, 40) contents['JEANS/%d/%d' % (width, height)] = randrange(1, 3) return cls.create(contents)
class Type: """Types of Goods. For a full functional discussion, see :ref:`goods_type`. """ id = Integer(label="Identifier", primary_key=True) """Primary key""" code = String(label=u"Identifying code", index=True) """Uniquely identifying code. As a convenience, and for sharing with other applications. """ label = String(label=u"Label") behaviours = Jsonb(label="Behaviours in operations") """ Goods Types specify with this flexible field how various :class:`Operations <anyblok_wms_base.bloks.wms_core.operation.base.Operation>` will treat the represented Goods. .. seealso:: :class:`Unpack <anyblok_wms_base.bloks.wms_core.operation.unpack.Unpack>` for a complex example. The value is a key/value mapping. This field is also open for downstream libraries and applications to make use of it to define some of their specific logic, but care must be taken not to conflict with the keys used by ``wms-core`` and other bloks (TODO introduce namespacing, then ? at least make a list available by using constants from an autodocumented module) """ def __str__(self): return "(id={self.id}, code={self.code!r})".format(self=self) def __repr__(self): return "Wms.Goods.Type" + str(self) def get_behaviour(self, key, default=None): """Get the value of the behaviour with given key. This is shortcut to avoid testing over and over if :attr:`behaviours` is ``None``. """ behaviours = self.behaviours if behaviours is None: return default return behaviours.get(key, default)
class Service(Mixin.UuidColumn, Mixin.TrackModel): """ Carrier service Model.Delivery.Carrier.Service """ CARRIER_CODE = None name = String(label="Name", nullable=False, size=128) product_code = String(label="Product code", unique=True, nullable=False) carrier = Many2One(label="Name", model=Declarations.Model.Delivery.Carrier, one2many='services', nullable=False) credential = Many2One(label="Credential", model=Declarations.Model.Delivery.Carrier.Credential, one2many='services', nullable=False) properties = Jsonb(label="Properties") carrier_code = Selection(selections='get_carriers') @classmethod def define_mapper_args(cls): mapper_args = super(Service, cls).define_mapper_args() if cls.__registry_name__ == 'Model.Delivery.Carrier.Service': mapper_args.update({'polymorphic_on': cls.carrier_code}) mapper_args.update({'polymorphic_identity': cls.CARRIER_CODE}) return mapper_args @classmethod def query(cls, *args, **kwargs): query = super(Service, cls).query(*args, **kwargs) if cls.__registry_name__.startswith('Model.Delivery.Carrier.Service.'): query = query.filter(cls.carrier_code == cls.CARRIER_CODE) return query @classmethod def get_carriers(cls): return dict() def create_label(self, *args, **kwargs): raise Exception("Creating a label directly from Carrier.Service class " "is Forbidden. Please use a specialized one like " "Colissimo, Dhl, etc...") def get_label_status(self, *args, **kwargs): raise Exception("Update the status of the label directly from " "Carrier.Service class is Forbidden. Please use " "a specialized one like Colissimo, Dhl, etc...")
class Template: """Template class """ id = Integer(label="Identifier", primary_key=True) create_date = DateTime(default=datetime.now, nullable=False) edit_date = DateTime(default=datetime.now, nullable=False, auto_update=True) code = String(label="Template code", unique=True, nullable=False) name = String(label="Template name", size=256) description = Text(label="Template description") properties = Jsonb(label="Template properties") def __str__(self): return "%s : %s" % (self.code, self.name) def __repr__(self): return "<Template(code=%s, name=%s)>" % ( self.code, self.name)
class Passenger(Mixin.UuidColumn): # Find out whether birthdate should be nullable or not. birthdate = Date(label="Birthdate", nullable=True) name = String(label="Passenger Name", nullable=True) reduction_card = Many2One( label="Reduction Card", model=Model.ReductionCard, one2many="passengers", ) user = Many2One(label="User", model=Model.User, nullable=False, one2many="passengers") properties = Jsonb(label="properties")
class Line: """Represent an assessment for a :class:`Node <Node>` instance. This is an inert model, meant to be filled through some user interface. If the corresponding :class:`Node` is a leaf, then :attr:`location` could be any container under the Node's :attr:`location <Node.location>`. But if the :class:`Node` is split, then the :attr:`location` must be identical to the Node's :attr:`location <Node.location>`, otherwise the simplification of reconciliation :class:`Actions <.action.Action>` can't work properly. """ id = Integer(label="Identifier", primary_key=True) """Primary key.""" node = Many2One(model=Wms.Inventory.Node, one2many='lines', nullable=False) location = Many2One(model=Wms.PhysObj, nullable=False) type = Many2One(model=Wms.PhysObj.Type, nullable=False) code = Text() properties = Jsonb() quantity = Integer(nullable=False)
class Arrival(Operation): """Operation to describe physical arrival of goods in some location. Arrivals store data about the expected or arrived Goods: properties, code… These are copied over to the corresponding Goods records in all cases and stay inert after the fact. In case the Arrival state is ``planned``, these are obviously only unchecked values, but in case it is ``done``, the actual meaning can depend on the application: - maybe the application won't use the ``planned`` state at all, and will only create Arrival after checking them, - maybe the application will inspect the Arrival properties, compare them to reality, update them on the created Goods and cancel downstream operations if needed, before calling :meth:`execute`. TODO maybe provide higher level facilities for validation scenarios. """ TYPE = 'wms_arrival' id = Integer(label="Identifier", primary_key=True, autoincrement=False, foreign_key=Operation.use('id').options(ondelete='cascade')) """Primary key.""" goods_type = Many2One(model='Model.Wms.Goods.Type') """Expected :class:`Goods Type <anyblok_wms_base.bloks.wms_core.goods.Type>`. """ goods_properties = Jsonb(label="Properties of arrived Goods") """Expected :class:`Properties <anyblok_wms_base.bloks.wms_core.goods.Properties>`. They are copied over to the newly created :class:`Goods <anyblok_wms_base.bloks.wms_core.goods.Goods>` as soon as the Arrival is planned, and aren't updated by :meth:`execute`. Matching them with reality is the concern of separate validation processes, and this field can serve for later assessments after the fact. """ goods_code = String(label="Code to set on arrived Goods") """Expected :attr:`Goods code <anyblok_wms_base.bloks.wms_core.goods.Goods.code>`. Can be ``None`` in case the arrival process issues the code only at the time of actual arrival. """ location = Many2One(model='Model.Wms.Location') """Will be the location of the initial Avatar.""" inputs_number = 0 """This Operation is a purely creative one.""" def specific_repr(self): return ("goods_type={self.goods_type!r}, " "location={self.location!r}").format(self=self) def after_insert(self): Goods = self.registry.Wms.Goods self_props = self.goods_properties if self_props is None: props = None else: props = Goods.Properties.create(**self_props) goods = Goods.insert(type=self.goods_type, properties=props, code=self.goods_code) Goods.Avatar.insert( goods=goods, location=self.location, reason=self, state='present' if self.state == 'done' else 'future', dt_from=self.dt_execution, ) def execute_planned(self): Avatar = self.registry.Wms.Goods.Avatar Avatar.query().filter(Avatar.reason == self).one().update( state='present', dt_from=self.dt_execution)
class Inventory: """This model represents the decision of making an Inventory. It expresses a global specification for the inventory process to be made as well as human level additional information. Applicative code is welcomed and actually supposed to override this to add more columns as needed (dates, creator, reason, comments...) Instances of :class:`Wms.Inventory <Inventory>` are linked to a tree of processing :class:`Nodes <anyblok_wms_base.inventory.node.Node>`, which is reachable with the convenience :attr:`root` attribute. TODO structural Properties to use throughout the whole hierarchy for Physical Object identification This tree is designed for distribution of the assessment and reconciliation work, but it's possible to compute all reconciliations and apply them on an Inventory for testing purposes as follows (assuming that all related :class:`Nodes <.node.Node>` are in the ``full`` state):: inventory.root.recurse_compute_push_actions() inventory.reconcile_all() """ id = Integer(label="Identifier", primary_key=True) """Primary key.""" excluded_types = Jsonb() """List of Physobj.Type codes to be excluded. This is not the smartest way of excluding stuff, but it's good enough for time being. The primary use-case is to exclude some/most of the container types from inventories, which could also be done by excluding all container types with a recursive query involving behaviours, but that's a performance hit for something that can be done by simply excluding a few types. """ considered_types = Jsonb() """List of ``Physobj.Type`` codes to be considered. Similarly to :attr:`excluded_types`, this is good enough and can be later be improved by adding a flag to make it recursive. """ @property def root(self): """Root Node of the Inventory.""" return (self.registry.Wms.Inventory.Node.query() .filter_by(inventory=self, parent=None) .one()) @classmethod def create(cls, location, **fields): """Insert a new Inventory together with its root Node. :return: the new Inventory """ Node = cls.registry.Wms.Inventory.Node inventory = cls.insert(**fields) Node.insert(inventory=inventory, location=location) return inventory def reconcile_all(self): """Convenience method to apply all Actions linked to this Inventory. This is a straightforward yet non scalable implementation of the final reconciliation (see below). Don't use it on large installations. To run it, it is required that the :attr:`root` Node has reached the ``pushed`` state. :raises: NodeStateError if :attr:`root` Node is not ready. This method does everything in one shot, therefore leading to huge database transactions on full inventories of large installations. For large inventories, a more progressive way of doing is required, perhaps Node per Node plus batching for each Node. Nodes wouldn't have to be taken in order, but care must be taken while updating their state to 'reconciled' in out of order executions with several batches per Node. """ root = self.root if root.state != 'pushed': raise NodeStateError(root, "This root {node} has not " "reached the 'pushed' state " "(currently at {state!r})") Node = self.Node Action = self.Action for action in (Action.query() .join(Node, Node.id == Action.node_id) .filter(Node.inventory == self) .all()): action.apply() (Node.query() .filter_by(inventory=self) .update(dict(state='reconciled'), synchronize_session='fetch'))
class Arrival(Mixin.WmsSingleOutcomeOperation, Operation): """Operation to describe physical arrival of goods in some location. Arrivals store data about the expected or arrived physical objects: properties, code… These are copied over to the corresponding PhysObj records in all cases and stay inert after the fact. In case the Arrival state is ``planned``, these are obviously only unchecked values, but in case it is ``done``, the actual meaning can depend on the application: - maybe the application won't use the ``planned`` state at all, and will only create Arrival after checking them, - maybe the application will inspect the Arrival properties, compare them to reality, update them on the created PhysObj and cancel downstream operations if needed, before calling :meth:`execute`. TODO maybe provide higher level facilities for validation scenarios. """ TYPE = 'wms_arrival' id = Integer(label="Identifier", primary_key=True, autoincrement=False, foreign_key=Operation.use('id').options(ondelete='cascade')) """Primary key.""" physobj_type = Many2One(model='Model.Wms.PhysObj.Type') """Expected :class:`PhysObj Type <anyblok_wms_base.core.physobj.Type>`. """ physobj_properties = Jsonb(label="Properties of arrived PhysObj") """Expected :class:`Properties <anyblok_wms_base.core.physobj.Properties>`. They are copied over to the newly created :class:`PhysObj <anyblok_wms_base.core.physobj.PhysObj>` as soon as the Arrival is planned, and aren't updated by :meth:`execute`. Matching them with reality is the concern of separate validation processes, and this field can serve for later assessments after the fact. """ physobj_code = Text(label="Code to set on arrived PhysObj") """Expected :attr:`PhysObj code <anyblok_wms_base.core.physobj.PhysObj.code>`. Can be ``None`` in case the arrival process issues the code only at the time of actual arrival. """ location = Many2One(model='Model.Wms.PhysObj') """Will be the location of the initial Avatar.""" goods_type = Function(fget='_goods_type_get', fset='_goods_type_set', fexpr='_goods_type_expr') """Compatibility wrapper. Before version 0.9.0, :attr:`physobj_type` was ``goods_type``. This does not extend to compatibility of the former low level ``goods_type_id`` column. """ goods_properties = Function(fget='_goods_properties_get', fset='_goods_properties_set', fexpr='_goods_properties_expr') """Compatibility wrapper. Before version 0.9.0, :attr:`physobj_properties` was ``goods_properties``. """ goods_code = Function(fget='_goods_code_get', fset='_goods_code_set', fexpr='_goods_code_expr') """Compatibility wrapper. Before version 0.9.0, :attr:`physobj_code` was ``goods_code``. """ inputs_number = 0 """This Operation is a purely creative one.""" destination_field = 'location' def specific_repr(self): return ("physobj_type={self.physobj_type!r}, " "location={self.location!r}").format(self=self) def _goods_col_get(self, suffix): deprecation_warn_goods_col(self, suffix) return getattr(self, 'physobj_' + suffix) def _goods_col_set(self, suffix, value): deprecation_warn_goods_col(self, suffix) setattr(self, 'physobj_' + suffix, value) @classmethod def _goods_col_expr(cls, suffix): deprecation_warn_goods_col(cls, suffix) return getattr(cls, 'physobj_' + suffix) def _goods_type_get(self): return self._goods_col_get('type') def _goods_type_set(self, value): self._goods_col_set('type', value) @classmethod def _goods_type_expr(cls): return cls._goods_col_expr('type') def _goods_properties_get(self): return self._goods_col_get('properties') def _goods_properties_set(self, value): self._goods_col_set('properties', value) @classmethod def _goods_properties_expr(cls): return cls._goods_col_expr('properties') def _goods_code_get(self): return self._goods_col_get('code') def _goods_code_set(self, value): self._goods_col_set('code', value) @classmethod def _goods_code_expr(cls): return cls._goods_col_expr('code') @classmethod def check_create_conditions(cls, state, dt_execution, location=None, **kwargs): """Ensure that ``location`` is indeed a container.""" super(Arrival, cls).check_create_conditions(state, dt_execution, **kwargs) if location is None or not location.is_container(): raise OperationContainerExpected( cls, "location field value {offender}", offender=location) def after_insert(self): PhysObj = self.registry.Wms.PhysObj self_props = self.physobj_properties if self_props is None: props = None else: props = PhysObj.Properties.create(**self_props) goods = PhysObj.insert(type=self.physobj_type, properties=props, code=self.physobj_code) PhysObj.Avatar.insert( obj=goods, location=self.location, outcome_of=self, state='present' if self.state == 'done' else 'future', dt_from=self.dt_execution, ) def execute_planned(self): self.outcome.update(state='present', dt_from=self.dt_execution) @classmethod def refine_with_trailing_unpack(cls, arrivals, pack_type, dt_pack_arrival=None, dt_unpack=None, pack_properties=None, pack_code=None): """Replace some Arrivals by the Arrival of a pack followed by an Unpack. This is useful in cases where it is impossible to predict ahead how incoming goods will actually be packed: the arrivals of individual items can first be planned, and once more is known about the form of delivery, this classmethod can replace some of them with the Arrival of a parcel and the subsequent Unpack. Together with :meth:`refine_with_trailing_move <anyblok_wms_base.core.operation.base.Operation.refine_with_trailing_move>`, this can handle the use case detailed in :ref:`improvement_operation_superseding`. :param arrivals: the Arrivals considered to be superseded by the Unpack. It is possible that only a subset of them are superseded, and conversely that the Unpack has more outcomes than the superseded Arrivals. For more details about the matching, see :meth:`Unpack.plan_for_outcomes <anyblok_wms_base.core.operation.unpack.Unpack.plan_for_outcomes>` :param pack_type: :attr:`anyblok_wms_base.core.PhysObj.main.PhysObj.type` of the expected pack. :param pack_properties: optional properties of the expected Pack. This optional parameter is of great importance in the case of parcels with variable contents, since it allows to set the ``contents`` Property. :param str pack_code: Optional code of the expected Pack. :param datetime dt_pack_arrival: expected date/time for the Arrival of the pack. If not specified, a default one will be computed. :param datetime dt_unpack: expected date/time for the Unpack Operation. If not specified, a default one will be computed. """ # noqa (unbreakable meth crosslink) for arr in arrivals: arr.check_alterable() if not arrivals: raise OperationError(cls, "got empty collection of arrivals " "to refine: {arrivals!r}", arrivals=arrivals) arr_iter = iter(arrivals) location = next(arr_iter).location if not all(arr.location == location for arr in arr_iter): raise OperationError(cls, "can't rewrite arrivals to different " "locations, got {nb_locs} different ones in " "{arrivals}", nb_locs=len(set(arr.location for arr in arrivals)), arrivals=arrivals) Wms = cls.registry.Wms Unpack = Wms.Operation.Unpack # check that the arrivals happen in the same locations if dt_pack_arrival is None: # max minimizes the number of date/time shifts to perform # upon later execution, min is more optimistic dt_pack_arrival = min(arr.dt_execution for arr in arrivals) pack_arr = cls.create(location=location, dt_execution=dt_pack_arrival, physobj_type=pack_type, physobj_properties=pack_properties, physobj_code=pack_code, state='planned') arrivals_outcomes = {arr.outcome: arr for arr in arrivals} unpack, attached_avatars = Unpack.plan_for_outcomes( pack_arr.outcomes, arrivals_outcomes.keys(), dt_execution=dt_unpack) for att in attached_avatars: arrivals_outcomes[att].delete() return unpack
class Properties: """Properties of Goods. This is kept in a separate Model (and SQL table) to provide sharing among several :class:`Goods` instances, as they can turn out to be identical for a large number of them. Use-case: receive a truckload of milk bottles that all have the same expiration date, and unpack everything down to the bottles. The expiration date would be stored in a single Properties instance, assuming there aren't also non-uniform properties to store, of course. Applications are welcome to overload this model to add new fields rather than storing their meaningful information in the :attr:`flexible` field, if it has added value for performance or programmming tightness reasons. This has the obvious drawback of defining some properties for all Goods, regardless of their Types, so it should not be abused. On :class:`Goods`, the :meth:`get_property <Goods.get_property>` / :meth:`set_property <Goods.set_property>` API will treat direct fields and top-level keys of :attr:`flexible` uniformely, that, as long as all pieces of code use only this API to handle properties, flexible keys can be replaced with proper fields transparently at any time in the development of downstream applications and libraries (assuming of course that any existing data is properly migrated to the new schema). """ id = Integer(label="Identifier", primary_key=True) """Primary key.""" flexible = Jsonb(label="Flexible properties") """Flexible properties. The value is expected to be a mapping, and all property handling operations defined in the ``wms-core`` will handle the properties by key, while being indifferent of the values. .. note:: the core also makes use of a few special properties, such as ``unpack_outcomes``. TODO make a list, in the form of constants in a module """ def get(self, k, default=None): if k in self.loaded_columns: return getattr(self, k) return self.flexible.get(k, default) def set(self, k, v): if k in ('id', 'flexible'): raise ValueError("The key %k is reserved, and can't be used for " "properties" % k) if k in self.fields_description(): setattr(self, k, v) else: self.flexible[k] = v flag_modified(self, '__anyblok_field_flexible') def duplicate(self): """Insert a copy of ``self`` and return its id.""" fields = { k: getattr(self, k) for k in self.fields_description().keys() } fields.pop('id') return self.insert(**fields) @classmethod def create(cls, **props): """Direct creation. The caller doesn't have to care about which properties get stored as direct fields or in the :attr:`flexible` field. This method is a better alternative than insertion followed by calls to :meth:`set`, because it guarantees that only one SQL INSERT will be issued. """ fields = cls.fields_description() columns = {} flexible = {} forbidden = ('id', 'flexible') for k, v in props.items(): if k in forbidden: raise ValueError( "The key %r is reserved, and can't be used as " "a property key" % k) if k in fields: columns[k] = v else: flexible[k] = v return cls.insert(flexible=flexible, **columns)
class Request: id = Integer(label="Identifier", primary_key=True) """Primary key. In this model, the ordering of ``id`` ordering is actually important (whereas on many others, it's a matter of habit to have a serial id): the smaller it is, the older the Request. Requests have to be reserved in order. Note that ``serial`` columns in PostgreSQL don't induce conflicts, as the sequence is evaluated out of transaction. """ purpose = Jsonb() """Flexible field to describe what the reservations will be for. This is typically used by a planner, to produce an appropriate chain of Operations to fulfill that purpose. Example: in a simple sales system, we would record a sale order reference here, and the planner would then take the related PhysObj and issue (planned) Moves and Departures for their 'present' or 'future' Avatars. """ reserved = Boolean(nullable=False, default=False) """Indicates that all reservations are taken. TODO: find a way to represent if the Request is partially done ? Some use-cases would require planning partial deliveries and the like in that case. """ planned = Boolean(nullable=False, default=False) """Indicates that the planner has finished with that Request. It's better than deleting, because it allows to cancel all Operations, set this back to ``True``, and plan again. """ txn_owned_reservations = set() """The set of Request ids whose current transaction owns reservations.""" @classmethod @contextmanager def claim_reservations(cls, query=None, **filter_by): """Context manager to claim ownership over this Request's reservations. This is meant for planners and works on fully reserved Requests. Example:: Request = registry.Wms.Reservation.Request with Request.claim_reservations() as req_id: request = Request.query().get(req_id) (...) read request.purpose, plan Operations (...) By calling this, the current transaction becomes responsible for all Request's reservations, meaning that it has the liberty to issue any Operation affecting its PhysObj or their Avatars. :return: id of claimed Request :param dict filter_by: direct filtering criteria to add to the query, e.g, a planner looking for planning to be done would pass ``planned=False``. :param query: if specified, is used to form the final SQL query, instead of creating a new one. The passed query must have the present model class in its ``FROM`` clause and return only the ``id`` column of the present model. The criteria of ``filter_by`` are still applied if also provided. This is safe with respect to concurrency: no other transaction can claim the same Request (guaranteed by a PostgreSQL lock). The session will forget about this Request as soon as one exits the ``with`` statement, and the underlying PG lock is released at the end of the transaction. TODO for now it's a context manager. I'd found it more elegant to tie it to the transaction, to get automatic release, without a ``with`` syntax, but that requires more digging into SQLAlchemy and Anyblok internals. TODO I think FOR UPDATE actually creates a new internal PG row (table bloat). Shall we switch to advisory locks (see PG doc) with an harcoded mapping to an integer ? If that's true, then performance-wise it's equivalent for us to set the txn id in some service column (but that would require inconditional cleanup, a complication) """ if query is None: query = cls.query('id') if filter_by is not None: query = query.filter_by(reserved=True, **filter_by) # issues a SELECT FOR UPDATE SKIP LOCKED (search # 'with_for_update' within # http://docs.sqlalchemy.org/en/latest/core/selectable.html # also, noteworthy, SKIP LOCKED appeared within PostgreSQL 9.5 # (https://www.postgresql.org/docs/current/static/release-9-5.html) cols = query.with_for_update(skip_locked=True, of=cls).order_by(cls.id).first() request_id = None if cols is None else cols[0] if request_id is not None: cls.txn_owned_reservations.add(request_id) yield request_id if request_id is not None: cls.txn_owned_reservations.discard(request_id) def is_txn_reservations_owner(self): """Tell if transaction is the owner of this Request's reservations. :return: ``True`` if the current transaction has claimed ownership, using the :meth:``claim_reservations`` method. """ return self.id in self.txn_owned_reservations def reserve(self): """Try and perform reservation for all RequestItems. :return: ``True`` if all reservations are now taken :rtype: bool Should not fail if reservations are already done. """ Item = self.registry.Wms.Reservation.RequestItem # could use map() and all(), but it's not recommended style # if there are strong side effects. all_reserved = True for item in Item.query().filter(Item.request == self).all(): all_reserved = all_reserved and item.reserve() self.reserved = all_reserved return all_reserved @classmethod def lock_unreserved(cls, batch_size, query_filter=None, offset=0): """Take exclusivity over not yet reserved Requests This is used in :ref:`Reservers <arch_reserver>` implementations. :param int batch: maximum of reservations to lock at once. Since reservations have to be taken in order, this produces a hard error in case there's a conflicting database lock, instead of skipping them like :meth:`claim_reservations` does. This conflicts in particular locks taken with :meth`claim_reservations`, but in principle, only :ref:`reservers <arch_reserver>` should take locks over reservation Requests that are not reserved yet, and these should not run in concurrency (or in a very controlled way, using ``query_filter``). """ query = cls.query().filter(cls.reserved.is_(False)) if query_filter is not None: query = query_filter(query) query = query.with_for_update(nowait=True).order_by(cls.id) try: return query.limit(batch_size).offset(offset).all() except sqlalchemy.exc.OperationalError as op_err: cls.registry.rollback() raise cls.ReservationsLocked(op_err) class ReservationsLocked(RuntimeError): """Used to rewrap concurrency errors while taking locks.""" def __init__(self, db_exc): self.db_exc = db_exc @classmethod def reserve_all(cls, batch_size=10, nb_attempts=5, retry_delay=1, query_filter=None): """Try and perform all reservations for pending Requests. This walks all pending (:attr:`reserved` equal to ``False``) Requests that haven't been reserved from the oldest and locks them by batches of ``batch_size``. Reservation is attempted for each request, in order, meaning that each request will grab as much PhysObj as it can before the next one gets processed. :param int batch_size: number of pending Requests to grab at each iteration :param nb_attempts: number of attempts (in the face of conflicts) for each batch :param retry_delay: time to wait before retrying to grab a batch (hoping other transactions holding locks would have released them) :param query_filter: optional function to add filtering to the query used to grab the reservations. The caller can use this to implement controlled concurrency in the reservation process: several processes can focus on different Requests, as long as they don't compete for PhysObj to reserve. The transaction is committed for each batch, and that's essential for proper operation under concurrency. """ skip = 0 while True: # TODO log.info count = 1 while True: try: requests = cls.lock_unreserved(batch_size, offset=skip, query_filter=query_filter) except cls.ReservationsLocked: # TODO log.warning if count == nb_attempts: raise time.sleep(retry_delay) count += 1 else: break if not requests: break for request in requests: if not request.reserve(): skip += 1 cls.registry.commit()
class Family: """Product.Family class """ FAMILY_CODE = None family_schema = None template_schema = None item_schema = None id = Integer(label="Identifier", primary_key=True) create_date = DateTime(default=datetime.now, nullable=False) edit_date = DateTime(default=datetime.now, nullable=False, auto_update=True) code = String(label="Family code", unique=True, nullable=False) name = String(label="Family name", size=256) description = Text(label="Family description") properties = Jsonb(label="Family properties") family_code = Selection(selections='get_family_codes') items = Function(fget="fget_items") @classmethod def get_family_codes(cls): return dict() def fget_items(self): """Returns a list of products instance from this family """ return self.registry.InstrumentedList( set([i for t in self.templates for i in t.items])) @classmethod def create(cls, **kwargs): data = kwargs.copy() if cls.family_schema: sch = cls.family_schema(registry=cls.registry) data = sch.load(kwargs) return cls.insert(**data) def amend(self, **kwargs): data = kwargs.copy() properties = data.pop('properties', dict()) if properties: for k, v in properties.items(): self.properties[k] = v if self.family_schema: sch = self.family_schema(registry=self.registry) data.update(dict(properties=self.properties)) data = sch.load(data) self.update(**data) return self @classmethod def query(cls, *args, **kwargs): query = super(Family, cls).query(*args, **kwargs) if cls.__registry_name__ != 'Model.Product.Family': query = query.filter(cls.family_code == cls.FAMILY_CODE) return query @classmethod def define_mapper_args(cls): mapper_args = super(Family, cls).define_mapper_args() if cls.__registry_name__ == 'Model.Product.Family': mapper_args.update({'polymorphic_on': cls.family_code}) mapper_args.update({'polymorphic_identity': cls.FAMILY_CODE}) return mapper_args def __str__(self): return "%s : %s" % (self.code, self.name) def __repr__(self): return "<Product.Family(code=%s, name=%s)>" % (self.code, self.name)
class Assembly(Mixin.WmsSingleOutcomeOperation, Operation): """Assembly/Pack Operation. This operation covers simple packing and assembly needs : those for which a single outcome is produced from the inputs, which must also be in the same Location. The behaviour is specified on the :attr:`outcome's PhysObj Type <outcome_type>` (see :attr:`Assembly specification <specification>`); it amounts to describe the expected inputs, and how to build the Properties of the outcome (see :meth:`outcome_properties`). All Property related parameters in the specification are bound to the state to be reached or passed through. A given Type can be assembled in different ways: the :attr:`Assembly specification <specification>` is chosen within the ``assembly`` Type behaviour according to the value of the :attr:`name` field. :meth:`Specific hooks <specific_outcome_properties>` are available for use-cases that aren't covered by the specification format (example: to forward Properties with non uniform values from the inputs to the outcome). The :attr:`name` is the main dispatch key for these hooks, which don't depend on the :attr:`outcome's Good Type <outcome_type>`. """ TYPE = 'wms_assembly' id = Integer(label="Identifier", primary_key=True, autoincrement=False, foreign_key=Operation.use('id').options(ondelete='cascade')) outcome_type = Many2One(model='Model.Wms.PhysObj.Type', nullable=False) """The :class:`PhysObj Type <anyblok_wms_base.core.physobj.Type>` to produce. """ name = Text(nullable=False, default=DEFAULT_ASSEMBLY_NAME) """The name of the assembly, to be looked up in behaviour. This field has a default value to accomodate the common case where there's only one assembly for the given :attr:`outcome_type`. .. note:: the default value is not enforced before flush, this can prove out to be really inconvenient for downstream code. TODO apply the default value in :meth:`check_create_conditions` for convenience ? """ parameters = Jsonb() """Extra parameters specific to this instance. This :class:`dict` is merged with the parameters from the :attr:`outcome_type` behaviour to build the final :attr:`specification`. """ match = Jsonb() """Field use to store the result of inputs matching Assembly Operations match their actual inputs (set at creation) with the ``inputs`` part of :attr:`specification`. This field is used to store the result, so that it's available for further logic (for instance in the :meth:`property setting hooks <specific_outcome_properties>`). This field's value is either ``None`` (before matching) or a list of lists: for each of the inputs specification, respecting ordering, the list of ids of the matching Avatars. """ @property def extra_inputs(self): matched = set(av_id for m in self.match for av_id in m) return (av for av in self.inputs if av.id not in matched) def specific_repr(self): return ("outcome_type={self.outcome_type!r}, " "name={self.name!r}").format(self=self) @classmethod def check_create_conditions(cls, state, dt_execution, inputs=None, outcome_type=None, name=None, **kwargs): super(Assembly, cls).check_create_conditions(state, dt_execution, inputs=inputs, **kwargs) behaviour = outcome_type.behaviours.get('assembly') if behaviour is None: raise OperationError( cls, "No assembly specified for type {outcome_type!r}", outcome_type=outcome_type) spec = behaviour.get(name) if spec is None: raise OperationError( cls, "No such assembly: {name!r} for type {outcome_type!r}", name=name, outcome_type=outcome_type) cls.check_inputs_locations(inputs, outcome_type=outcome_type, name=name) @classmethod def check_inputs_locations(cls, inputs, **kwargs): """Check consistency of inputs locations. This method is singled out for easy override by applicative code. Indeed applicative code can consider that the inputs may be in a bunch of related locations, with a well defined output location. In particular, it receives keyword arguments ``kwargs`` that we don't need in this default implementation. """ loc = inputs[0].location if any(inp.location != loc for inp in inputs[1:]): raise OperationInputsError( cls, "Inputs {inputs} are in different Locations: {locations!r}", inputs=inputs, # in the passing case, building a set would have been # useless overhead locations=set(inp.location for inp in inputs)) def extract_property(self, extracted, goods, prop, exc_details=None): """Extract the wished property from goods, forbidding conflicts. :param str prop: Property name :param dict extracted: the specified property value is read from `goods` and stored there, if not already present with a different value :param exc_details: If specified the index and value of the input specifification this comes from, for exception raising (the exception will assume that the conflict arises in the global forward_properties directive). :raises: AssemblyPropertyConflict """ candidate_value = goods.get_property(prop, default=_missing) if candidate_value is _missing: return try: existing = extracted[prop] except KeyError: extracted[prop] = candidate_value else: if existing != candidate_value: raise AssemblyPropertyConflict(self, exc_details, prop, existing, candidate_value) def forward_properties(self, state, for_creation=False): """Forward properties from the inputs to the outcome This is done according to the global specification :param state: the Assembly state that we are reaching. :param bool for_creation: if ``True``, means that this is part of the creation process, i.e, there's no previous state. :raises: AssemblyPropertyConflict if forwarding properties changes an already set value. """ spec = self.specification Avatar = self.registry.Wms.PhysObj.Avatar from_state = None if for_creation else self.state glob_fwd = merge_state_sub_parameters(spec.get('inputs_properties'), from_state, state, ('forward', 'set')) inputs_spec = spec.get('inputs', ()) forwarded = {} for i, (match_item, input_spec) in enumerate(zip(self.match, inputs_spec)): input_fwd = merge_state_sub_parameters( input_spec.get('properties'), from_state, state, ('forward', 'set')) for av_id in match_item: goods = Avatar.query().get(av_id).obj for fp in itertools.chain(input_fwd, glob_fwd): self.extract_property(forwarded, goods, fp, exc_details=(i, input_spec)) for extra in self.extra_inputs: for fp in glob_fwd: self.extract_property(forwarded, extra.obj, fp) return forwarded def check_inputs_properties(self, state, for_creation=False): """Apply global and per input Property requirements according to state. All property requirements between the current state (or None if we are at creation) and the wished state are checked. :param state: the state that the Assembly is about to reach :param for_creation: if True, the current value of the :attr:`state` field is ignored, and all states up to the wished state are considered. :raises: :class:`AssemblyWrongInputProperties` """ spec = self.specification global_props_spec = spec.get('inputs_properties') if global_props_spec is None: return req_props, req_prop_values = merge_state_sub_parameters( global_props_spec, None if for_creation else self.state, state, ('required', 'set'), ('required_values', 'dict'), ) for avatar in self.inputs: goods = avatar.obj if (not goods.has_properties(req_props) or not goods.has_property_values(req_prop_values)): raise AssemblyWrongInputProperties(self, avatar, req_props, req_prop_values) Avatar = self.registry.Wms.PhysObj.Avatar for i, (match_item, input_spec) in enumerate( zip(self.match, spec.get('inputs', ()))): req_props, req_prop_values = merge_state_sub_parameters( input_spec.get('properties'), None if for_creation else self.state, state, ('required', 'set'), ('required_values', 'dict'), ) for av_id in match_item: goods = Avatar.query().get(av_id).obj if (not goods.has_properties(req_props) or not goods.has_property_values(req_prop_values)): raise AssemblyWrongInputProperties(self, avatar, req_props, req_prop_values, spec_item=(i, input_spec)) def match_inputs(self, state, for_creation=False): """Compare input Avatars to specification and apply Properties rules. :param state: the state for which to perform the matching :return: extra_inputs, an iterable of inputs that are left once all input specifications are met. :raises: :class:`anyblok_wms_base.exceptions.AssemblyInputNotMatched`, :class:`anyblok_wms_base.exceptions.AssemblyForbiddenExtraInputs` """ # let' stress that the incoming ordering shouldn't matter # from this method's point of view. And indeed, only in tests can # it come from the will of a caller. In reality, it'll be due to # factors that are random wrt the specification. inputs = set(self.inputs) spec = self.specification PhysObjType = self.registry.Wms.PhysObj.Type types_by_code = dict() from_state = None if for_creation else self.state match = self.match = [] for i, expected in enumerate(spec['inputs']): match_item = [] match.append(match_item) req_props, req_prop_values = merge_state_sub_parameters( expected.get('properties'), from_state, state, ('required', 'set'), ('required_values', 'dict'), ) type_code = expected['type'] expected_id = expected.get('id') expected_code = expected.get('code') gtype = types_by_code.get(type_code) if gtype is None: gtype = PhysObjType.query().filter_by(code=type_code).one() types_by_code[type_code] = gtype for _ in range(expected['quantity']): for candidate in inputs: goods = candidate.obj if (not goods.has_type(gtype) or not goods.has_properties(req_props) or not goods.has_property_values(req_prop_values)): continue if expected_id is not None and goods.id != expected_id: continue if (expected_code is not None and goods.code != expected_code): continue inputs.discard(candidate) match_item.append(candidate.id) break else: raise AssemblyInputNotMatched(self, (expected, i), from_state=from_state, to_state=state) if inputs and not spec.get('allow_extra_inputs'): raise AssemblyExtraInputs(self, inputs) return inputs # TODO PERF cache ? @property def specification(self): """The Assembly specification The Assembly specification is merged from two sources: - within the ``assembly`` part of the behaviour field of :attr:`outcome_type`, the subdict associated with :attr:`name`; - optionally, the instance specific :attr:`parameters`. Here's an example, for an Assembly whose :attr:`name` is ``'soldering'``, also displaying most standard parameters. Individual aspects of these parameters are discussed in detail afterwards, as well as the merging logic. On the :attr:`outcome_type`:: behaviours = { … 'assembly': { 'soldering': { 'outcome_properties': { 'planned': {'built_here': ['const', True]}, 'started': {'spam': ['const', 'eggs']}, 'done': {'serial': ['sequence', 'SOLDERINGS']}, }, 'inputs': [ {'type': 'GT1', 'quantity': 1, 'properties': { 'planned': { 'required': ['x'], }, 'started': { 'required': ['foo'], 'required_values': {'x': True}, 'requirements': 'match', # default is 'check' }, 'done': { 'forward': ['foo', 'bar'], 'requirements': 'check', } }, {'type': 'GT2', 'quantity': 2 }, {'type': 'GT3', 'quantity': 1, } ], 'inputs_spec_type': { 'planned': 'check', # default is 'match' 'started': 'match', # default is 'check' for # 'started' and 'done' states }, 'for_contents': ['all', 'descriptions'], 'allow_extra_inputs': True, 'inputs_properties': { 'planned': { 'required': … 'required_values': … 'forward': … }, 'started': … 'done': … } } … } } On the Assembly instance:: parameters = { 'outcome_properties': { 'started': {'life': ['const', 'brian']} }, 'inputs': [ {}, {'code': 'ABC'}, {'id': 1234}, ] 'inputs_properties': { 'planned': { 'forward': ['foo', 'bar'], }, }, } .. note:: Non standard parameters can be specified, for use in :meth:`Specific hooks <specific_outcome_properties>`. **Inputs** The ``inputs`` part of the specification is primarily a list of expected inputs, with various criteria (PhysObj Type, quantity, PhysObj code and Properties). Besides requiring them in the first place, these criteria are also used to :meth:`qualify (match) the inputs <match_inputs>` (note that Operation inputs are unordered in general, while this ``inputs`` parameter is). This spares the calling code the need to keep track of that qualification after selecting the goods in the first place. The result of that matching is stored in the :attr:`match` field, is kept for later Assembly state changes and can be used by application code, e.g., for operator display purposes. Assemblies can also have extra inputs, according to the value of the ``allow_extra_inputs`` boolean parameter. This is especially useful for generic packing scenarios. Having both specified and extra inputs is supported (imagine packing client parcels with specified wrapping, a greetings card plus variable contents). The ``type`` criterion applies the PhysObj Type hierarchy, hence it's possible to create a generic packing Assembly for a whole family of PhysObj Types (e.g., adult trekking shoes). Similarly, all Property requirements take the properties inherited from the PhysObj Types into account. **Global Property specifications** The Assembly :attr:`specification` can have the following key/value pairs: * ``outcome_properties``: a dict whose keys are Assembly states, and values are dicts of Properties to set on the outcome; the values are pairs ``(TYPE, EXPRESSION)``, evaluated by passing as positional arguments to :meth:`eval_typed_expr`. * ``inputs_properties``: a dict whose keys are Assembly states, and values are themselves dicts with key/values: + required: list of properties that must be present on all inputs while reaching or passing through the given Assembly state, whatever their values + required_values: dict of Property key/value pairs that all inputs must bear while reaching or passing through the given Assembly state. + forward: list of properties to forward to the outcome while reaching or passing through the given Assembly state. **Per input Property checking, matching and forwarding** The same parameters as in ``inputs_properties`` can also be specified inside each :class:`dict` that form the ``inputs`` list of the :meth:`Assembly specification <spec>`), as the ``properties`` sub parameter. In that case, the Property requirements are used either as matching criteria on the inputs, or as a check on already matched PhysObj, according to the value of the ``inputs_spec_type`` parameter (default is ``'match'`` in the ``planned`` Assembly state, and ``'check'`` in the other states). Example:: 'inputs_spec_type': { 'started': 'match', # default is 'check' for # 'started' and 'done' states }, 'inputs': [ {'type': 'GT1', 'quantity': 1, 'properties': { 'planned': {'required': ['x']}, 'started': { 'required_values': {'x': True}, }, 'done': { 'forward': ['foo', 'bar'], }, … ] During matching, per input specifications are applied in order, but remember that the ordering of ``self.inputs`` itself is to be considered random. In case ``inputs_spec_type`` is ``'check'``, the checking is done on the PhysObj matched by previous states, thus avoiding a potentially costly rematching. In the above example, matching will be performed in the ``'planned'`` and ``'started'`` states, but a simple check will be done if going from the ``started`` to the ``done`` state. It is therefore possible to plan an Assembly with partial information about its inputs (waiting for some Observation, or a previous Assembly to be done), and to refine that information, which can be displayed to operators, or have consequences on the Properties of the outcome, at each state change. In many cases, rematching the inputs for all state changes is unnecessary. That's why, to avoid paying the computational cost three times, the default value is ``'check'`` for the ``done`` and ``started`` states. The result of matching is stored in the :attr:`match` field. In all cases, if a given Property is to be forwarded from several inputs to the outcome and its values on these inputs aren't equal, :class:`AssemblyPropertyConflict` will be raised. **Passing through states** Following the general expectations about states of Operations, if an Assembly is created directly in the ``done`` state, it will apply the ``outcome_properties`` for the ``planned``, ``started`` and ``done`` states. Also, the matching and checks of input Properties for the ``planned``, ``started`` and ``done`` state will be performed, in that order. In other words, it behaves exactly as if it had been first planned, then started, and finally executed. Similarly, if a planned Assembly is executed (without being started first), then outcome Properties, matches and checks related to the ``started`` state are performed before those of the ``done`` state. **for_contents: building the contents Property** The outcome of the Assembly bears the special :data:`contents property <anyblok_wms_base.constants.CONTENTS_PROPERTY>`, also used by :class:`Operation.Unpack <anyblok_wms_base.core.operation.unpack.Unpack>`. This makes the reversal of Assemblies by Unpacks possible (with care in the behaviour specifications), and also can be used by applicative code to use information about the inputs even after the Assembly is done. The building of the contents Property is controlled by the ``for_contents`` parameter, which itself is either ``None`` or a pair of strings, whose first element indicates which inputs to list, and the second how to list them. The default value of ``for_contents`` is :attr:`DEFAULT_FOR_CONTENTS`. If ``for_contents`` is ``None``, no contents Property will be set on the outcome. Use this if it's unnecessary pollution, for instance if it is custom set by specific hooks anyway, or if no Unpack for disassembly is ever to be wished. *for_contents: possible values of first element:* * ``'all'``: all inputs will be listed * ``'extra'``: only the actual inputs that aren't specified in the behaviour will be listed. This is useful in cases where the Unpack behaviour already takes the specified ones into account. Hence, the variable parts of Assembly and Unpack are consistent. *for_contents: possible values of second element:* * ``'descriptions'``: include PhysObj' Types, those Properties that aren't recoverable by an Unpack from the Assembly outcome, together with appropriate ``forward_properties`` for those who are (TODO except those that come from a global ``forward`` in the Assembly specification) * ``'records'``: same as ``descriptions``, but also includes the record ids, so that an Unpack following the Assembly would not give rise to new PhysObj records, but would reuse the existing ones, hence keep the promise that the PhysObj records are meant to track the "sameness" of the physical objects. **Merging logic** All sub parameters are merged according to the expected type. For instance, ``required`` and ``forward`` in the various Property parameters are merged as a :class:`set`. As displayed in the example above, if there's an ``inputs`` part in :attr:`parameters`, it must be made of exactly the same number of ``dicts`` as within the :attr:`outcome_type` behaviour. More precisely, these lists are merged using the :func:`zip` Python builtin, which results in a truncation to the shortest. Of course, not having an ``inputs`` part in :attr:`parameters` does *not* result in empty ``inputs``. .. seealso:: :attr:`SPEC_LIST_MERGE` and :func:`dict_merge <anyblok_wms_base.utils.dict_merge>`. **Specific hooks** While already powerful, the Property manipulations described above are not expected to fit all situations. This is obviously true for the rule forbidding the forwarding of values that aren't equal for all relevant inputs: in some use cases, one would want to take the minimum of theses values, sum them, keep them as a list, or all of these at once… On the other hand, the specification is already complicated enough as it is. Therefore, the core will stick to these still relatively simple primitives, but will also provide the means to perform custom logic, through :meth:`assembly-specific hooks <specific_outcome_properties>` """ type_spec = self.outcome_type.get_behaviour('assembly')[self.name] if self.parameters is None: return type_spec return dict_merge(self.parameters, type_spec, list_merge=self.SPEC_LIST_MERGE) SPEC_LIST_MERGE = dict(inputs_properties={ '*': dict( required=('set', None), forward=('set', None), ), }, inputs=('zip', { '*': dict(properties={ '*': dict( required=('set', None), forward=('set', None), ), }, ), })) DEFAULT_FOR_CONTENTS = ('extra', 'records') """Default value of the ``for_contents`` part of specification. See :meth:`outcome_properties` for the meaning of the values. """ def outcome_properties(self, state, for_creation=False): """Method responsible for properties on the outcome. For the given state that is been reached, this method returns a dict of Properties to apply on the outcome. :param state: The Assembly state that we are reaching. :param bool for_creation: if ``True``, means that this is part of the creation process, i.e, there's no previous state. :rtype: :class:`Model.Wms.PhysObj.Properties <anyblok_wms_base.core.physobj.Properties>` :raises: :class:`AssemblyInputNotMatched` if one of the :attr:`input specifications <specification>` is not matched by ``self.inputs``, :class:`AssemblyPropertyConflict` in case of conflicting values for the outcome. The :meth:`specific hook <specific_outcome_properties>` gets called at the very end of the process, giving it higher precedence than any other source of Properties. """ spec = self.specification assembled_props = self.forward_properties(state, for_creation=for_creation) contents = self.build_contents(assembled_props) if contents: assembled_props[CONTENTS_PROPERTY] = contents prop_exprs = merge_state_parameter( spec.get('outcome_properties'), None if for_creation else self.state, state, 'dict') assembled_props.update( (k, self.eval_typed_expr(*v)) for k, v in prop_exprs.items()) assembled_props.update( self.specific_outcome_properties(assembled_props, state, for_creation=for_creation)) return assembled_props props_hook_fmt = "outcome_properties_{name}" def specific_outcome_properties(self, assembled_props, state, for_creation=False): """Hook for per-name specific update of Properties on outcome. At the time of Operation creation or execution, this calls a specific method whose name is derived from the :attr:`name` field, :attr:`by this format <props_hook_fmt>`, if that method exists. Applicative code is meant to override the present Model to provide the specific method. The signature to implement is identical to the present one: :param state: The Assembly state that we are reaching. :param dict assembled_props: a :class:`dict` of already prepared Properties for this state. :param bool for_creation: if ``True``, means that this is part of the creation process, i.e, there's no previous state. :return: the properties to set or update :rtype: any iterable that can be passed to :meth:`dict.update`. """ meth = getattr(self, self.props_hook_fmt.format(name=self.name), None) if meth is None: return () return meth(assembled_props, state, for_creation=for_creation) def build_contents(self, forwarded_props): """Construction of the ``contents`` property This is part of :meth`outcome_properties` """ contents_spec = self.specification.get('for_contents', self.DEFAULT_FOR_CONTENTS) if contents_spec is None: return what, how = contents_spec if what == 'extra': for_unpack = self.extra_inputs elif what == 'all': for_unpack = self.inputs contents = [] # sorting here and later is for tests reproducibility for avatar in sorted(for_unpack, key=lambda av: av.id): goods = avatar.obj props = goods.properties unpack_outcome = dict( type=goods.type.code, quantity=1, # TODO hook for wms_quantity ) if props is not None: unpack_outcome_fwd = [] for k, v in props.as_dict().items(): if k in forwarded_props: unpack_outcome_fwd.append(k) else: unpack_outcome.setdefault('properties', {})[k] = v unpack_outcome_fwd.sort() if unpack_outcome_fwd: unpack_outcome['forward_properties'] = unpack_outcome_fwd contents.append(unpack_outcome) if how == 'records': # Adding physobj id so that a forthcoming unpack # would produce the very same physical objects. # TODO this *must* be discarded in case of Departures with # EDI, and maybe some other ones. How to do that cleanly and # efficiently ? unpack_outcome['local_physobj_ids'] = [goods.id] return contents def check_match_inputs(self, to_state, for_creation=False): """Check or match inputs according to specification. :rtype bool: :return: ``True`` iff a match has been performed """ spec = self.specification.get('inputs_spec_type') if spec is None: spec = {} spec.setdefault('planned', 'match') cm = merge_state_parameter(spec, None if for_creation else self.state, to_state, 'check_match') (self.match_inputs if cm.is_match else self.check_inputs_properties)( to_state, for_creation=for_creation) return cm.is_match def after_insert(self): state = self.state outcome_state = 'present' if state == 'done' else 'future' dt_exec = self.dt_execution input_upd = dict(dt_until=dt_exec) if state == 'done': input_upd.update(state='past') # TODO PERF bulk update ? for inp in self.inputs: inp.update(**input_upd) self.check_match_inputs(state, for_creation=True) PhysObj = self.registry.Wms.PhysObj PhysObj.Avatar.insert(obj=PhysObj.insert( type=self.outcome_type, properties=PhysObj.Properties.create( **self.outcome_properties(state, for_creation=True))), location=self.outcome_location(), outcome_of=self, state=outcome_state, dt_from=dt_exec, dt_until=None) def outcome_location(self): """Find where the new assembled physical object should appear. In this default implementation, we insist on the inputs being in a common location (see :meth:`check_inputs_locations` and we decide this is the location of the outcome. Applicative code is welcomed to refine this by overriding this method. """ return next(iter(self.inputs)).location def execute_planned(self): """Check or rematch inputs, update properties and states. """ self.check_match_inputs('done') # TODO PERF direct update query would probably be faster for inp in self.inputs: inp.state = 'past' outcome = self.outcome outcome.obj.update_properties(self.outcome_properties('done')) outcome.state = 'present' def eval_typed_expr(self, etype, expr): """Evaluate a typed expression. :param expr: the expression to evaluate :param etype: the type or ``expr``. *Possible values for etype* * ``'const'``: ``expr`` is considered to be a constant and gets returned directly. Any Python value that is JSON serializable is admissible. * ``'sequence'``: ``expr`` must be the code of a ``Model.System.Sequence`` instance. The return value is the formatted value of that sequence, after incrementation. """ if etype == 'const': return expr elif etype == 'sequence': return self.registry.System.Sequence.nextvalBy(code=expr.strip()) raise UnknownExpressionType(self, etype, expr) def is_reversible(self): """Assembly can be reverted by Unpack. """ return self.outcome_type.get_behaviour("unpack") is not None def plan_revert_single(self, dt_execution, follows=()): unpack_inputs = [out for op in follows for out in op.outcomes] # self.outcomes has actually only those outcomes that aren't inputs # of downstream operations # TODO maybe change that for API clarity unpack_inputs.extend(self.outcomes) return self.registry.Wms.Operation.Unpack.create( dt_execution=dt_execution, inputs=unpack_inputs) def input_location_altered(self): """Being in-place, an Assembly must propagate changes of locations. Also it should recheck that all inputs are in the same place. """ self.check_inputs_locations(self.inputs, name=self.name, outcome_type=self.outcome_type, parameters=self.parameters) outcome = self.outcome outcome.location = self.inputs[0].location for follower in self.followers: follower.input_location_altered()
class Apparition(Operation): """Inventory Operation to record unexpected physical objects. This is similar to Arrival, but has a distinct functional meaning. Apparitions can exist only in the ``done`` :ref:`state <op_states>`. Another difference with Arrivals is that Apparitions have a :attr:`quantity` field. """ TYPE = 'wms_apparition' id = Integer(label="Identifier", primary_key=True, autoincrement=False, foreign_key=Operation.use('id').options(ondelete='cascade')) """Primary key.""" goods_type = Many2One(model='Model.Wms.PhysObj.Type') """Observed :class:`PhysObj Type <anyblok_wms_base.core.physobj.Type>`. """ quantity = Integer() """The number of identical PhysObj that have appeared. Here, identical means "same type, code and properties" """ goods_properties = Jsonb() """Observed :class:`Properties <anyblok_wms_base.core.physobj.Properties>`. They are copied over to the newly created :class:`PhysObj <anyblok_wms_base.core.physobj.PhysObj>`. Then the Properties can evolve on the PhysObj, while this Apparition field will keep the exact values that were observed during inventory. """ goods_code = Text() """Observed :attr:`PhysObj code <anyblok_wms_base.core.physobj.PhysObj.code>`. """ location = Many2One(model='Model.Wms.PhysObj') """Location of appeared PhysObj. This will be the location of the initial Avatars. """ inputs_number = 0 """This Operation is a purely creative one.""" def specific_repr(self): return ("goods_type={self.goods_type!r}, " "location={self.location!r}").format(self=self) @classmethod def check_create_conditions(cls, state, dt_execution, location=None, **kwargs): """Forbid creation with wrong states, check location is a container. :raises: :class:`OperationForbiddenState <anyblok_wms_base.exceptions.OperationForbiddenState>` if state is not ``'done'`` :class:`OperationContainerExpected <anyblok_wms_base.exceptions.OperationContainerExpected>` if location is not a container. """ if state != 'done': raise OperationForbiddenState( cls, "Apparition can exist only in the 'done' state", forbidden=state) if location is None or not location.is_container(): raise OperationContainerExpected(cls, "location field value {offender}", offender=location) super(Apparition, cls).check_create_conditions(state, dt_execution, **kwargs) def after_insert(self): """Create the PhysObj and their Avatars. In the ``wms-core`` implementation, the :attr:`quantity` field gives rise to as many PhysObj records. """ PhysObj = self.registry.Wms.PhysObj self_props = self.goods_properties if self_props is None: props = None else: props = PhysObj.Properties.create(**self_props) for _ in range(self.quantity): PhysObj.Avatar.insert(obj=PhysObj.insert(type=self.goods_type, properties=props, code=self.goods_code), location=self.location, reason=self, state='present', dt_from=self.dt_execution)
class Shipment(Mixin.UuidColumn, Mixin.TrackModel): """ Shipment """ statuses = dict(new="New", label="Label", transit="Transit", delivered="Delivered", exception="Exception", error="Error") service = Many2One(label="Shipping service", model=Declarations.Model.Delivery.Carrier.Service, one2many='shipments', nullable=False) sender_address = Many2One(label="Sender address", model=Declarations.Model.Address, column_names=["sender_address_uuid"], nullable=False) recipient_address = Many2One(label="Recipient address", model=Declarations.Model.Address, column_names=["recipient_address_uuid"], nullable=False) reason = String(label="Reason reference") pack = String(label="Pack reference") status = Selection(label="Shipping status", selections=statuses, default='new', nullable=False) properties = Jsonb(label="Properties") document_uuid = UUID(label="Carrier slip document reference") document = Function(fget='get_latest_document') cn23_document_uuid = UUID(label="Carrier slip document reference") cn23_document = Function(fget='get_latest_cn23_document') tracking_number = String(label="Carrier tracking number") def _get_latest_cocument(self, document_uuid): Document = self.registry.Attachment.Document.Latest query = Document.query().filter_by(uuid=document_uuid) return query.one_or_none() def get_latest_document(self): return self._get_latest_cocument(self.document_uuid) def get_latest_cn23_document(self): return self._get_latest_cocument(self.cn23_document_uuid) def create_label(self): """Retrieve a shipping label from shipping service """ if not self.status == 'new': return return self.service.create_label(shipment=self) def get_label_status(self): """Retrieve a shipping label from shipping service """ if self.status in ('new', 'delivered', 'error'): return return self.service.get_label_status(shipment=self) @classmethod def get_labels_status(cls): status = ['label', 'transit', 'exception'] shipments = cls.query().filter(cls.status.in_(status)).all() for shipment in shipments: shipment.get_label_status() def _save_document(self, document, binary_file, content_type): document.set_file(binary_file) document.filesize = len(binary_file) document.contenttype = content_type hash = hashlib.sha256() hash.update(binary_file) document.hash = hash.digest() self.registry.flush() # flush to update version in document def save_document(self, binary_file, content_type): document = self.document if document is None: document = self.registry.Attachment.Document.insert( data={'shipment': str(self.uuid)}) self.document_uuid = document.uuid self._save_document(document, binary_file, content_type) def save_cn23_document(self, binary_file, content_type): document = self.cn23_document if document is None: document = self.registry.Attachment.Document.insert( data={'shipment': str(self.uuid)}) self.cn23_document_uuid = document.uuid self._save_document(document, binary_file, content_type)
class Properties: """Properties of PhysObj. This is kept in a separate Model (and SQL table) to provide sharing among several :class:`PhysObj` instances, as they can turn out to be identical for a large number of them. Use-case: receive a truckload of milk bottles that all have the same expiration date, and unpack everything down to the bottles. The expiration date would be stored in a single Properties instance, assuming there aren't also non-uniform properties to store, of course. Applications are welcome to overload this model to add new fields rather than storing their meaningful information in the :attr:`flexible` field, if it has added value for performance or programmming tightness reasons. This has the obvious drawback of defining some properties for all PhysObj, regardless of their Types, so it should not be abused. This model implements a subset of the :class:`dict` API, treating direct fields and top-level keys of :attr:`flexible` uniformely, so that, as long as all pieces of code use only this API to handle properties, flexible keys can be replaced with proper fields transparently at any time in the development of downstream applications and libraries (assuming of course that any existing data is properly migrated to the new schema). """ id = Integer(label="Identifier", primary_key=True) """Primary key.""" flexible = Jsonb(label="Flexible properties") """Flexible properties. The value is expected to be a mapping, and all property handling operations defined in the ``wms-core`` will handle the properties by key, while being indifferent of the values. .. note:: the core also makes use of a few special properties, such as ``contents``. TODO make a list, in the form of constants in a module """ @classmethod def _field_property_names(cls): """Iterable over the names of properties that are fields.""" return (f for f in cls._fields_description() if f not in ('id', 'flexible')) def as_dict(self): """Return the properties as a ``dict``. This is not to be confused with the generic :meth:`to_dict` method of all Models. The present method abstracts over the :attr:`flexible` field and the regular ones. It also strips :attr:`id` and doesn't attempt to follow relationships. """ res = {k: getattr(self, k) for k in self._field_property_names()} flex = self.flexible if flex is not None: res.update((k, deepcopy(v)) for k, v in flex.items()) return res def __getitem__(self, k): """Support for reading with the [] syntax. :raises: KeyError """ if k in self._field_property_names(): return getattr(self, k) if self.flexible is None: raise KeyError(k) return self.flexible[k] def get(self, k, *default): """Similar to :meth:`dict.get`.""" if len(default) > 1: return _empty_dict.get(k, *default) try: return self[k] except KeyError: if default: return default[0] return None def __setitem__(self, k, v): """Support for writing with the [] notation.""" if k in ('id', 'flexible'): raise ValueError("The key %r is reserved, and can't be used " "as a property name" % k) if k in self.fields_description(): setattr(self, k, v) else: if self.flexible is None: self.flexible = {k: v} else: self.flexible[k] = v flag_modified(self, '__anyblok_field_flexible') set = __setitem__ # backwards compatibility def __delitem__(self, k): """Support for deleting with the [] notation. :raises: KeyError if ``k`` is missing """ if k in ('id', 'flexible'): raise ValueError("The key %r is reserved, can't be used " "as a property name and hence can't " "be deleted " % k) if k in self._field_property_names(): raise ValueError("Can't delete field backed property %r" % k) if self.flexible is None: raise KeyError(k) del self.flexible[k] flag_modified(self, '__anyblok_field_flexible') def pop(self, k, *default): """Similar to :meth:`dict.pop`.""" if k in ('id', 'flexible'): raise ValueError("The key %r is reserved, can't be used " "as a property name and hence can't " "be deleted " % k) if k in self._field_property_names(): raise ValueError("Can't delete field backed property %r" % k) if self.flexible is None: return _empty_dict.pop(k, *default) res = self.flexible.pop(k, *default) flag_modified(self, '__anyblok_field_flexible') return res def duplicate(self): """Insert a copy of ``self`` and return its id.""" fields = {k: getattr(self, k) for k in self._field_property_names() } return self.insert(flexible=deepcopy(self.flexible), **fields) @classmethod def create(cls, **props): """Direct creation. The caller doesn't have to care about which properties get stored as direct fields or in the :attr:`flexible` field. This method is a better alternative than insertion followed by calls to :meth:`set`, because it guarantees that only one SQL INSERT will be issued. If no ``props`` are given, then nothing is created and ``None`` gets returned, thus avoiding a needless row in the database. This may seem trivial, but it spares a test for callers that would pass a ``dict``, using the ``**`` syntax, which could turn out to be empty. """ if not props: return fields = set(cls._field_property_names()) columns = {} flexible = {} forbidden = ('id', 'flexible') for k, v in props.items(): if k in forbidden: raise ValueError( "The key %r is reserved, and can't be used as " "a property key" % k) if k in fields: columns[k] = v else: flexible[k] = v return cls.insert(flexible=flexible, **columns) def update(self, *args, **kwargs): """Similar to :meth:`dict.update` This current implementation doesn't attempt to be smarter that setting the values one after the other, which means in particular going through all the checks for each key. A future implementation might try and be more efficient. """ if len(args) > 1: raise TypeError("update expected at most 1 arguments, got %d" % ( len(args))) iters = [kwargs.items()] if args: positional = args[0] if isinstance(positional, dict): iters.append(positional.items()) else: iters.append(positional) for it in iters: for k, v in it: self[k] = v def __contains__(self, k): """Support for the 'in' operator. Field properties are always present. Since one could say that the database uses ``None`` to mark absence, it could be relevant to return False if the value is ``None`` (TODO STABILIZATION). """ if k in self._field_property_names(): return True flex = self.flexible if flex is None: return False return k in flex
class RequestItem: id = Integer(label="Identifier", primary_key=True) """Primary key. Note that ``serial`` columns in PostgreSQL don't induce conflicts, as the sequence is evaluated out of transaction. """ request = Many2One(model=Wms.Reservation.Request) goods_type = Many2One(model='Model.Wms.PhysObj.Type') quantity = Integer(nullable=False) properties = Jsonb() @classmethod def define_table_args(cls): return super(RequestItem, cls).define_table_args() + (CheckConstraint( 'quantity > 0', name='positive_qty'), ) def lookup(self, quantity): """Try and find PhysObj matchin the specified conditions. :return: the matching PhysObj that were found and the quantity each accounts for. The PhysObj may not be of the requested type. What matters is how much of the requested quantity each one represents. :rtype: list(int, :class:`PhysObj <anyblok_wms_base/bloks/wms_core/goods.PhysObj`>) This method is where most business logic should lie. This default implementation does only equal matching on PhysObj Type and each property, and therefore is not able to return other PhysObj Type accounting for more than one of the wished. Downstream libraries and applications are welcome to override it. """ Wms = self.registry.Wms PhysObj = Wms.PhysObj Reservation = Wms.Reservation Avatar = PhysObj.Avatar Props = PhysObj.Properties # TODO PERF this returns from the DB one PhysObj line per # Avatar, but SQLA reassembles them as exactly one (seen while # tracing the test_reserve_avatars_once() under pdb) # SELECT DISTINCT ON would be better # TODO provide ordering by Avatar state and/or dt_from query = (PhysObj.query().join(Avatar.obj).outerjoin( Reservation, Reservation.physobj_id == PhysObj.id).filter( Reservation.physobj_id.is_(None), PhysObj.type == self.goods_type, Avatar.state.in_(('present', 'future')))) if self.properties: props = self.properties.copy() query = query.join(PhysObj.properties) pfields = Props.fields_description() for p in set(props).intersection(pfields): query = query.filter(getattr(Props, p) == props.pop(p)) if props: query = query.filter(Props.flexible.contains(props)) return [(1, g) for g in query.limit(quantity).all()] def reserve(self): """Perform the wished reservations. :return bool: if the RequestItem is completely reserved. TODO: shall we store it directly in DB ? """ Reservation = self.registry.Wms.Reservation already = (Reservation.query(func.sum(Reservation.quantity)).filter( Reservation.request_item_id == self.id).one())[0] if already is None: already = 0 if already >= self.quantity: # its legit to be greater, think of reserving 2 packs of 10 # to use 17. Maybe later, we'll unpack just one of them and update # the reservation to add just 7 of the Unpack outcomes. return True added = 0 for quantity, goods in self.lookup(self.quantity - already): # TODO use a o2m ? Reservation.insert(physobj=goods, quantity=quantity, request_item=self) added += quantity return already + added >= self.quantity
class Purchase: id = Integer(label="Identifier", primary_key=True) properties = Jsonb(label="Properties")
class Line(Mixin.UuidColumn, Mixin.TrackModel): """Sale.Order.Line Model """ SCHEMA = OrderLineBaseSchema @classmethod def get_schema_definition(cls, **kwargs): return cls.SCHEMA(**kwargs) order = Many2One(label="Order", model=Declarations.Model.Sale.Order, nullable=False, one2many="lines") item = Many2One(label="Product Item", model=Declarations.Model.Product.Item, nullable=False) properties = Jsonb(label="Item properties", default=dict()) unit_price_untaxed = Decimal(label="Price untaxed", default=D(0)) unit_price = Decimal(label="Price", default=D(0)) unit_tax = Decimal(label="Tax", default=D(0)) quantity = Integer(label="Quantity", default=1, nullable=False) amount_untaxed = Decimal(label="Amount untaxed", default=D(0)) amount_tax = Decimal(label="Tax amount", default=D(0)) amount_total = Decimal(label="Total", default=D(0)) amount_discount_percentage_untaxed = Decimal( label="Amount discount percentage untaxed", default=D(0)) amount_discount_percentage = Decimal(label="Amount discount percentage", default=D(0)) amount_discount_untaxed = Decimal(label="Amount discount untaxed", default=D(0)) amount_discount = Decimal(label="Amount discount", default=D(0)) def __str__(self): return "{self.uuid} : {self.amount_total}".format(self=self) def __repr__(self): return "<Sale.Order.Line(uuid={self.uuid},"\ " amount_untaxed={self.amount_untaxed},"\ " amount_tax={self.amount_tax},"\ " amount_total={self.amount_total})>".format(self=self) def check_unit_price(self): """Ensure consistency between unit_price_untaxed, unit_price and unit_tax TODO: Move this to a specialized marshmallow validation method """ if (self.unit_price_untaxed < D(0) or self.unit_price < D(0) or self.unit_tax < D(0)): raise LineException( """Negative Value forbidden on unit_price_untaxed, unit_price or unit_tax""") if (self.unit_price_untaxed != self.unit_price and self.unit_tax == D(0)): raise LineException( """Inconsistency between unit_price_untaxed, unit_price and unit_tax""") if self.unit_tax != D(0): if (self.unit_price_untaxed >= self.unit_price and self.unit_price != D(0)): raise LineException( """unit_price_untaxed can not be greater than unit_price""" ) def compute(self): """Compute order line total amount * check unit_price consistency * compute tax if any * compute line total amount TODO: maybe add configuration options for computation behaviours, for example computation based on unit_price or unit_price_untaxed """ if not self.order.price_list: self.check_unit_price() if self.unit_price != D(0) and self.unit_price_untaxed == D(0): # compute unit_price_untaxed based on unit_price price = compute_price(net=self.unit_price, gross=self.unit_price, tax=compute_tax(self.unit_tax), keep_gross=True) elif self.unit_price_untaxed != D(0) and self.unit_price == D(0): # compute unit_price based on unit_price_untaxed price = compute_price(net=self.unit_price_untaxed, gross=self.unit_price_untaxed, tax=compute_tax(self.unit_tax), keep_gross=False) elif self.unit_price_untaxed != D(0) and self.unit_price != D(0): # compute unit_price_untaxed based on unit_price price = compute_price(net=self.unit_price, gross=self.unit_price, tax=compute_tax(self.unit_tax), keep_gross=True) else: raise LineException( """Can not find a strategy to compute price""") self.unit_price_untaxed = price.net.amount self.unit_price = price.gross.amount self.unit_tax = compute_tax(self.unit_tax) else: # compute unit price based on price list price_list_item = self.registry.Sale.PriceList.Item.query( ).filter_by(price_list=self.order.price_list).filter_by( item=self.item).one_or_none() if price_list_item: self.unit_price = price_list_item.unit_price self.unit_price_untaxed = price_list_item.unit_price_untaxed self.unit_tax = price_list_item.unit_tax else: raise LineException("""Can not find a price for %r on %r""" % (self.item, self.order.price_list)) # compute total amount self.amount_total = D(self.unit_price * self.quantity) self.amount_untaxed = D(self.unit_price_untaxed * self.quantity) self.amount_tax = self.amount_total - self.amount_untaxed # compute total amount after discount if self.amount_discount_untaxed != D('0'): price = compute_price(net=self.amount_untaxed, tax=self.unit_tax, keep_gross=False) discount = compute_discount( price=price, tax=self.unit_tax, discount_amount=self.amount_discount_untaxed, from_gross=False) self.amount_total = discount.gross.amount self.amount_untaxed = discount.net.amount self.amount_tax = discount.tax.amount return if self.amount_discount_percentage_untaxed != D('0'): price = compute_price(net=self.amount_untaxed, tax=self.unit_tax, keep_gross=False) discount = compute_discount( price=price, tax=self.unit_tax, discount_percent=self.amount_discount_percentage_untaxed, from_gross=False) self.amount_total = discount.gross.amount self.amount_untaxed = discount.net.amount self.amount_tax = discount.tax.amount return if self.amount_discount != D('0'): price = compute_price(gross=self.amount_total, tax=self.unit_tax, keep_gross=True) discount = compute_discount(price=price, tax=self.unit_tax, discount_amount=self.amount_discount, from_gross=True) self.amount_total = discount.gross.amount self.amount_untaxed = discount.net.amount self.amount_tax = discount.tax.amount return if self.amount_discount_percentage != D('0'): price = compute_price(gross=self.amount_total, tax=self.unit_tax, keep_gross=True) discount = compute_discount( price=price, tax=self.unit_tax, discount_percent=self.amount_discount_percentage, from_gross=True) self.amount_total = discount.gross.amount self.amount_untaxed = discount.net.amount self.amount_tax = discount.tax.amount return @classmethod def create(cls, order=None, item=None, **kwargs): data = kwargs.copy() if order is None: raise TypeError if item is None: raise TypeError if cls.get_schema_definition: sch = cls.get_schema_definition( registry=cls.registry, required_fields=["order", "item", "quantity"]) data['item'] = item.to_primary_keys() data['order'] = order.to_primary_keys() data = sch.load(data) data['item'] = item data['order'] = order line = cls.insert(**data) line.compute() return line @classmethod def before_update_orm_event(cls, mapper, connection, target): if cls.get_schema_definition: sch = cls.get_schema_definition( registry=cls.registry, required_fields=["order", "item", "quantity"]) sch.load(sch.dump(target)) if (target.properties and cls.registry.System.Blok.is_installed('product_family') and target.item.template.family.custom_schemas): props = target.item.template.family.custom_schemas.get( target.item.code.lower()).get('schema') props_sch = props(context={"registry": cls.registry}) props_sch.load(target.properties) target.compute()
class Type: """Types of PhysObj. For a full functional discussion, see :ref:`physobj_type`. """ id = Integer(label="Identifier", primary_key=True) """Primary key""" code = Text(label=u"Identifying code", index=True, unique=True, nullable=False) """Uniquely identifying code. As a convenience, and for sharing with other applications. """ label = Text(label=u"Label") behaviours = Jsonb() """ Flexible field to encode how represented objects interact with the system. Notably, PhysObj Types specify with this flexible field how various :class:`Operations <anyblok_wms_base.core.operation.base.Operation>` will treat the represented physical object. .. seealso:: :class:`Unpack <anyblok_wms_base.core.operation.unpack.Unpack>` for a complex example. But behaviours are in no means in one to one correspondence with Operation classes, nor do they need to be related to Operations. Any useful information that depends on the Type only is admissible to encode as a behaviour. The value is a key/value mapping (behaviour name/value). .. warning:: direct read access to a behaviour is to be avoided in favour of :meth:`get_behaviour` (see :ref:`improvement_goods_type_hierarchy`). This field is also open for downstream libraries and applications to make use of it to define some of their specific logic, but care must be taken not to conflict with the keys used by ``wms-core`` and other bloks (TODO introduce namespacing, then ? at least make a list available by using constants from an autodocumented module) """ properties = Jsonb(label="Properties") """PhysObj Types also have flexible properties. These are usually read from the PhysObj themselves (where they act as default values if not defined on the PhysObj), and are useful with generic Types, i.e., those that have children. Operations that handle Properties can do interesting things by using properties that actually come from Type information. """ parent = Many2One(model='Model.Wms.PhysObj.Type') """This field expresses the hierarchy of PhysObj Types.""" def __str__(self): return "(id={self.id}, code={self.code!r})".format(self=self) def __repr__(self): return "Wms.PhysObj.Type" + str(self) # TODO PERF cache ? def get_behaviour(self, name, default=None): """Get the value of the behaviour with given name. This method is the preferred way to access a given behaviour. It resolves the wished behaviour by looking it up within the :attr:`behaviours` :class:`dict`, and recursively on its parent. It also takes care of corner cases, such as when :attr:`behaviours` is ``None`` as a whole. """ behaviours = self.behaviours parent = self.parent if parent is None: parent_beh = _missing else: parent_beh = self.parent.get_behaviour(name, default=_missing) if behaviours is None: beh = _missing else: beh = behaviours.get(name, _missing) if beh is _missing: if parent_beh is _missing: return default return parent_beh if parent_beh is _missing: return beh return dict_merge(beh, parent_beh) def is_sub_type(self, gt): """True if ``self`` is a sub type of ``gt``, inclusively. TODO PERF the current implementation recurses over ancestors. A subsequent implementation could add caching and/or recursive SQL queries. """ if self == gt: return True parent = self.parent if parent is None: return False return parent.is_sub_type(gt) def is_container(self): return self.get_behaviour('container') is not None def get_property(self, k, default=None): """Read a property value recursively. If the current Type does not have the wished property key, but has a parent, then the lookup continues on the parent. """ props = self.properties val = _missing if props is None else props.get(k, _missing) if val is _missing: parent = self.parent if parent is None: return default return parent.get_property(k, default=default) return val def merged_properties(self): """Return this Type properties, merged with its parent.""" parent = self.parent properties = self.properties if parent is None: return properties if properties is not None else {} return dict_merge(properties, parent.merged_properties()) def has_property_values(self, mapping): return all(self.get_property(k, default=_missing) == v for k, v in mapping.items()) def has_property(self, name): if self.properties is not None and name in self.properties: return True parent = self.parent if parent is not None: return parent.has_property(name) return False def has_properties(self, wanted_props): if not wanted_props: return True properties = self.properties if properties is None: missing = wanted_props else: missing = (p for p in wanted_props if p not in properties) parent = self.parent if parent is None: for x in missing: # could be a generator, a list etc. return False return True return parent.has_properties(missing)
class Action: """Represent a reconciliation Action for a :class:`Node <Node>` instance. """ id = Integer(label="Identifier", primary_key=True) """Primary key.""" node = Many2One(model=Wms.Inventory.Node, one2many='actions', nullable=False) OPERATIONS = ( ('app', 'wms_inventory_action_app'), ('disp', 'wms_inventory_action_disp'), ('telep', 'wms_inventory_action_telep'), ) type = Selection(selections=OPERATIONS, nullable=False) location = Many2One(model=Wms.PhysObj, nullable=False) destination = Many2One(model=Wms.PhysObj) """Optional destination container. This is useful if :attr:`type` is ``telep`` only. """ physobj_type = Many2One(model=Wms.PhysObj.Type, nullable=False) physobj_code = Text() physobj_properties = Jsonb() quantity = Integer(nullable=False) def __repr__(self): fmt = ("Wms.Inventory.Action(type={self.type!r}, " "node={self.node!r}, location_code={self.location.code!r}, ") if self.type == 'telep': fmt += "destination_code={self.destination.code!r}, " fmt += ("quantity={self.quantity}, " "physobj_type_code={self.physobj_type.code!r}, " "physobj_code={self.physobj_code!r}, " "physobj_properties={self.physobj_properties!r})") return fmt.format(self=self) __str__ = __repr__ @classmethod def simplify(cls, node): App = orm.aliased(cls, name='app') Disp = orm.aliased(cls, name='disp') # TODO, compare properties matching = (cls.registry.query(App, Disp).filter( App.node == node, App.type == 'app', Disp.node == node, Disp.type == 'disp', Disp.physobj_type_id == App.physobj_type_id, or_(Disp.physobj_code == App.physobj_code, and_(Disp.physobj_code.is_(None), App.physobj_code.is_(None)))).all()) for app, disp in matching: if app.type == 'telep' or disp.type == 'telep': # one is already rewritten continue diff_qty = app.quantity - disp.quantity dest = app.location if diff_qty >= 0: disp.update(type='telep', destination=dest) if diff_qty: app.quantity = diff_qty else: app.delete() else: app.update(type='telep', location=disp.location, destination=dest) disp.quantity = -diff_qty def customize_operation_fields(self, operation_fields): """Hook to modify fields of Operations spawned by :meth:`apply` This is meant for easy override by applications. :param dict operation_fields: prefilled by :meth:`apply` with the minimal required values in the generic case. This methods mutates it in place :returns: None The typical customization would consist of putting additional fields that make sense for the local business logic, but this method isn't limited to that. """ return def apply(self): """Perform Inventory Operations for the current Action. :return: tuple of the newly created Operations The new Operations will all point to the related Inventory. """ Operation = self.registry.Wms.Operation op_fields = dict(state='done', inventory=self.node.inventory) if self.type == 'app': Op = Operation.Apparition op_fields.update(physobj_type=self.physobj_type, physobj_code=self.physobj_code, physobj_properties=self.physobj_properties, quantity=self.quantity, location=self.location) elif self.type == 'disp': Op = Operation.Disparition else: Op = Operation.Teleportation op_fields['new_location'] = self.destination self.customize_operation_fields(op_fields) if self.type == 'app': return (Op.create(**op_fields), ) return tuple( Op.create(input=av, **op_fields) for av in self.choose_affected()) def choose_affected(self): """Choose Physical Objects to be taken for Disparition/Teleportation. if :attr:`physobj_code` is ``None``, we match only Physical Objects whose ``code`` is also ``None``. That's because the code should come directly from existing PhysObj records (that weren't reflected in Inventory Lines). Same remark would go for Properties, but: TODO implement Properties TODO adapt to wms-quantity """ PhysObj = self.registry.Wms.PhysObj Avatar = PhysObj.Avatar avatars_q = (Avatar.query().filter_by( location=self.location, state='present').join(PhysObj, Avatar.obj_id == PhysObj.id).filter( PhysObj.type == self.physobj_type, PhysObj.code == self.physobj_code)) Reservation = getattr(self.registry.Wms, 'Reservation', None) if Reservation is not None: avatars_q = (avatars_q.outerjoin( Reservation, Reservation.physobj_id == Avatar.obj_id).outerjoin( Reservation.request_item).order_by( Reservation.RequestItem.request_id.desc())) avatars = avatars_q.limit(self.quantity).all() if len(avatars) != self.quantity: raise ActionInputsMissing( self, len(avatars), "Couldn't find enough Avatars " "(only {nb_found} over {nb_expected}) " "to choose from in application of {action}") return avatars
class Observation(Mixin.WmsSingleInputOperation, Mixin.WmsSingleOutcomeOperation, Mixin.WmsInPlaceOperation, Operation): """Operation to change PhysObj Properties. Besides being commonly associated with some measurement or assessment being done in reality, this Operation is the preferred way to alter the Properties of a physical object (PhysObj), in a traceable, reversible way. For now, only whole Property values are supported, i.e., for :class:`dict`-valued Properties, we can't observe the value of just a subkey. Observations support oblivion in the standard way, by reverting the Properties of the physical object to their prior values. This is consistent with the general rule that oblivion is to be used in cases where the database values themselves are irrelevant (for instance if the Observation was for the wrong physical object). On the other hand, reverting an Observation is semantically more complicated. See :meth:`plan_revert_single` for more details. """ TYPE = 'wms_observation' id = Integer(label="Identifier", primary_key=True, autoincrement=False, foreign_key=Operation.use('id').options(ondelete='cascade')) """Primary key.""" name = Text(nullable=True) """The name of the observation, to identity quickly an observation This field is optional and depends on the developer's needs. """ observed_properties = Jsonb() """Result of the Observation. It is forbidden to fill this field for a planned Observation: this is thought to be contradictory with the idea of actually observing something. In the case of planned Observations, this field should be updated right before execution. TODO: rethink this, wouldn't it make sense actually to record some expected results, so that dependent Operations could be themselves planned ? This doesn't seem to be that useful though, since e.g., Assemblies can check different Properties during their different states. On the other hand, it could make sense in cases where the result is very often the same to prefill it. Another case would be for reversals: prefill the result. """ previous_properties = Jsonb() """Used in particular during oblivion. This records key/value pairs of *direct* properties before execution of the Observation TODO and maybe reversal """ required_properties = Jsonb() """List of Properties that must be present in :attr:`observed_properties` In other words, these are Properties the Observation must update. At execution time, the contents of :attr:`observed_properties` is examined and an error is raised if one of these properties is missing. """ def after_insert(self): inp_av = self.input physobj = inp_av.obj state = self.state if state != 'done' and self.observed_properties is not None: raise ObservationError( self, "Forbidden to create a planned or just started " "Observation together with its results (this " "would mean one knows result in advance).") dt_exec = self.dt_execution inp_av.update(dt_until=dt_exec, state='past') physobj.Avatar.insert( obj=physobj, state='future' if state == 'planned' else 'present', outcome_of=self, location=self.input.location, dt_from=dt_exec, dt_until=None) if self.state == 'done': self.apply_properties() def apply_properties(self): """Save previous properties, then apply :attr:`observed_properties`` The previous *direct* properties of the physical object get saved in :attr:`previous_properties`, then the key/value pairs of :attr:`observed_properties` are applied. In case an observed value is a new one, ie, there wasn't any *direct* key of that name before, it ends up simply to be absent from the :`previous_properties` dict (even if there was an inherited one). This allows for easy restoration of previous values in :meth:`obliviate_single`. """ observed = self.observed_properties if observed is None: raise ObservationError( self, "Can't execute with no observed properties") required = self.required_properties if required: if not set(required).issubset(observed): raise ObservationError( self, "observed_properties {observed!r} is missing " "some of the required {required!r} ", observed=set(observed), required=required) phobj = self.input.obj prev = {} existing = phobj.properties if existing: for k, v in observed.items(): prev_val = existing.get(k, _missing) if prev_val is _missing: continue prev[k] = prev_val self.previous_properties = prev phobj.update_properties(observed) def execute_planned(self): self.apply_properties() dt_exec = self.dt_execution self.input.update(dt_until=dt_exec, state='past') self.outcome.update(dt_from=dt_exec, state='present') def obliviate_single(self): """Restore the Properties as they were before execution. """ phobj = self.input.obj for k in self.observed_properties: old_val = self.previous_properties.get(k, _missing) if old_val is _missing: del phobj.properties[k] else: phobj.properties[k] = old_val super(Observation, self).obliviate_single() def is_reversible(self): """Observations are always reversible. See :meth:`plan_revert_single` for a full discussion of this. """ return True def plan_revert_single(self, dt_execution, follows=()): """Reverting an Observation is a no-op. For the time being, we find it sufficient to consider that Observations are really meant to find some information about the physical object (e.g a weight, a working condition). Therefore, reverting them doesn't make sense, while we don't want to consider them fully irreversible, so that a chain of Operations involving an Operation can still be reversed. The solution to this dilemma for the time being is that reverting an Observation does nothing. For instance, if an Observation follows some other Operation and has itself a follower, the outcome of the reversal of the follower is fed directly to the reversal of the previous operation. We may add more variants (reversal via a prefilled Observation etc.) in the future. """ if not follows: # of course the Observation is not its own reversal, but # this tells reversals of upstream Operations to follow the # Observation return self # An Observation has at most a single follower, to make its # reversal trivial, it's enough to return the reversal of that # single follower return next(iter(follows))