class Person: id = Integer(primary_key=True) address_1 = Many2One(model=Model.Address) address_2 = Many2One(model=Model.Address)
class Test2: id = Integer(primary_key=True) test_id = Integer(foreign_key=Model.Test.use('id'))
class Test: id = Integer(primary_key=True) update_at = DateTime(auto_update=True) val = String()
class Test: id = Integer(primary_key=True) name = String(foreign_key=Model.TestFk.use('name'))
class Split(SingleInput, InPlace, Operation): """A split of PhysObj record in two. Splits replace their input's :class:`PhysObj <anyblok_wms_base.quantity.physobj.PhysObj>` record with two of them, one having the wished :attr:`quantity`, along with Avatars at the same location, while keeping the same properties and the same total quantity. This is therefore destructive for the input's PhysObj, which is not conceptually satisfactory, but will be good enough at this stage of development. While non trivial in the database, they may have no physical counterpart in the real world. We call them *formal* in that case. Formal Splits are operations of a special kind, that have to be considered internal details of ``wms-core``, that are not guaranteed to exist in the future. Formal Splits can always be reverted with :class:`Aggregate <.aggregate.Aggregate>` Operations, but only some physical Splits can be reverted, depending on the PhysObj Type. .. seealso:: :class:`Model.Wms.PhysObj.Type <anyblok_wms_base.quantity.physobj.Type>` for a full discussion including use-cases of formal and physical splits and reversal of the latter. In the formal case, we've decided to represent this as an Operation for the sake of consistency, and especially to avoid too much special cases in implementation of various concrete Operations. The benefit is that Splits appear explicitely in the history, and this helps implementing :ref:`history manipulating methods <op_cancel_revert_obliviate>` a lot. The drawback is that we get a proliferation of PhysObj records, some of them even with a zero second lifespan, but even those could be simplified only for executed Splits. Splits are typically created and executed from :class:`Splitter Operations <.splitter.WmsSplitterOperation>`, and that explains the above-mentioned zero lifespans. """ TYPE = 'wms_split' """Polymorphic key""" id = Integer(label="Identifier", primary_key=True, autoincrement=False, foreign_key=Operation.use('id').options(ondelete='cascade')) quantity = Decimal() """The quantity to split.""" def specific_repr(self): return ("input={self.input!r}, " "quantity={self.quantity}").format(self=self) def after_insert(self): self.registry.flush() avatar = self.input phobj = avatar.obj qty = self.quantity new_phobj = dict( type=phobj.type, code=phobj.code, properties=phobj.properties, ) new_avatars = dict( location=avatar.location, outcome_of=self, dt_from=self.dt_execution, dt_until=avatar.dt_until, ) avatar.dt_until = self.dt_execution if self.state == 'done': avatar.update(state='past') new_avatars['state'] = 'present' else: new_avatars['state'] = 'future' return tuple( avatar.insert( obj=phobj.insert(quantity=new_qty, **new_phobj), **new_avatars) for new_qty in (qty, phobj.quantity - qty)) @property def wished_outcome(self): """Return the PhysObj record with the wished quantity. This is only one of :attr:`outcomes <anyblok_wms_base.core.operation.base.Operation.outcomes>` :rtype: :class:`Wms.PhysObj <anyblok_wms_base.core.physobj.PhysObj>` """ PhysObj = self.registry.Wms.PhysObj Avatar = PhysObj.Avatar # in case the split is exactly in half, there's no difference # between the two records we created, let's pick any. outcome = (Avatar.query().join(Avatar.obj) .filter(Avatar.outcome_of == self, PhysObj.quantity == self.quantity) .first()) if outcome is None: raise OperationError(self, "The split outcomes have disappeared") return outcome def check_execute_conditions(self): """Call the base class's version and check that quantity is suitable. """ super(Split, self).check_execute_conditions() phobj = self.input.obj if self.quantity > phobj.quantity: raise OperationQuantityError( self, "Can't execute {op}, whose quantity {op.quantity} is greater " "than on its input {phobj}, " "although it's been successfully planned.", op=self, phobj=self.input) def execute_planned(self): for outcome in self.outcomes: outcome.update(state='present', dt_from=self.dt_execution) self.registry.flush() self.input.update(state='past', dt_until=self.dt_execution) self.registry.flush() def is_reversible(self): """Reversibility depends on the relevant PhysObj Type. See :meth:`on Model.PhysObj.Type <anyblok_wms_base.core.physobj.Type.is_split_reversible>` """ return self.input.obj.type.is_split_reversible() def plan_revert_single(self, dt_execution, follows=()): Wms = self.registry.Wms Avatars = Wms.PhysObj.Avatar # here in that case, that's for multiple operations # in_ is not implemented for Many2Ones reason_ids = set(f.id for f in follows) to_aggregate = (Avatars.query() .filter(Avatars.outcome_of_id.in_(reason_ids)) .all()) to_aggregate.extend(self.leaf_outcomes()) return Wms.Operation.Aggregate.create(inputs=to_aggregate, dt_execution=dt_execution, state='planned') def obliviate_single(self): """Remove the created PhysObj in addition to base class operation. The base class would only take care of the created Avatars """ outcomes_objs = [o.obj for o in self.outcomes] super(Split, self).obliviate_single() for obj in outcomes_objs: obj.delete()
class Purchase: id = Integer(label="Identifier", primary_key=True) properties = Jsonb(label="Properties")
class Test: id = Integer(primary_key=True) val = Integer(nullable=False)
class Test2: id = Integer(primary_key=True) test = Many2One(model=Model.Test, column_names=( 'other_test_id', 'other_test_id2'))
class Test2(Mixin.MTest): id = Integer(primary_key=True)
class Test: id = Integer(primary_key=True) parent = Many2One(model='Model.Test', one2many='children')
class Test: id = Integer(primary_key=True, unique=True) id2 = Integer(primary_key=True, unique=True)
class Test: id = Integer(primary_key=True) seq = Sequence(primary_key=True, formater="V-{seq}")
class Test2: id = Integer(primary_key=True) test = Many2One(model=Model.Test)
class Space: code = String(primary_key=True) label = String(nullable=False) role = String() order = Integer(default=100, nullable=False) description = String() icon_code = String() icon_type = String() def get_path(self): query = text(""" with recursive menu_tree as ( select fm.id, fm.order, 0 as parent_order, fm.menu_type, fm.parent_id, fmr.label, false as default from furetui_menu fm join furetui_menu_root fmr on fmr.id = fm.id where fmr.space_code=:space_code union all select child.id, child.order, (parent.parent_order + parent.order) as parent_order, child.menu_type, child.parent_id, coalesce(node.label, resource.label) as label, coalesce(resource.default, false) as default from furetui_menu as child left outer join furetui_menu_node as node on node.id = child.id left outer join furetui_menu_resource resource on resource.id = child.id join menu_tree as parent on parent.id = child.parent_id ) select id from menu_tree where menu_type = 'Model.FuretUI.Menu.Resource' and "default" is :default order by parent_order asc, "order" asc, id asc limit 1; """) # take the first default found res = self.anyblok.execute( query.bindparams(space_code=self.code, default=True)).fetchone() if res is None: res = self.anyblok.execute( query.bindparams(space_code=self.code, default=False)).fetchone() query = [] if res: mre = self.anyblok.FuretUI.Menu.query().get(res[0]) if mre.order_by: query.append('orders=%s' % mre.order_by) if mre.tags: query.append('tags=%s' % mre.tags) if mre.filters: query.append('filters=%s' % json.dumps(mre.filters)) return '/space/%s/menu/%d/resource/%d?%s' % ( self.code, mre.id if mre else 0, mre.resource.id if mre else 0, '&'.join(query)) @classmethod def get_for_user(cls, authenticated_userid): roles = cls.anyblok.Pyramid.get_roles(authenticated_userid) query = cls.query().order_by(cls.order.asc()) query = query.filter( or_(cls.role.in_(roles), cls.role.is_(None), cls.role == '')) return query def get_menus(self): return self.anyblok.FuretUI.Menu.get_menus_from(space=self) def get_i18n_to_export(self, external_id): return [( f'space:{external_id}:{field}', getattr(self, field), ) for field in ('label', 'description')]
class Unpack(Mixin.WmsSingleInputOperation, Operation): """Unpacking some goods, creating new Goods and Avatar records. This is a destructive Operation, in the usual mild sense: once it's done, the input Goods Avatars is in the ``past`` state, and their underlying Goods have no new Avatars. It is meant to be reversible through appropriate Pack / Assembly Operations, which are not implemented yet at the time being. What happens during unpacking is specified as behaviours of the Goods Type of the Goods being unpacked. For the time being, Unpacks will create the new Avatar records in the same location. Downstream libraries and applications can prepend moves to unpacking areas, and/or append moves to final destinations. It's possible that we'd introduce an optional 'destination' column in the future, if the current schema is too inconvenient or bloats the database too much. """ TYPE = 'wms_unpack' id = Integer(label="Identifier", primary_key=True, autoincrement=False, foreign_key=Operation.use('id').options(ondelete='cascade')) @classmethod def check_create_conditions(cls, state, dt_execution, inputs=None, quantity=None, **kwargs): super(Unpack, cls).check_create_conditions(state, dt_execution, inputs=inputs, quantity=quantity, **kwargs) goods_type = inputs[0].type if 'unpack' not in goods_type.behaviours: raise OperationInputsError( cls, "Can't create an Unpack for {inputs} " "because their type {type} doesn't have the 'unpack' " "behaviour", inputs=inputs, type=goods_type) def execute_planned(self): packs = self.input # TODO PERF direct update query would probably be faster for outcome in self.outcomes: outcome.state = 'present' packs.update(state='past', reason=self) def create_unpacked_goods(self, fields, spec): """Create Goods record according to given specification. This singled out method is meant for easy subclassing (see, e.g, in ``wms-quantity`` Blok). :param fields: pre-baked fields, prepared by the base class. In the current implementation, they are fully derived from ``spec``, hence one may think of them as redundant, but the point is that they are outside the responsibility of this method. :param spec: specification for these Goods, should be used minimally in subclasses, typically for quantity related adjustments :return: the list of created Goods records. In ``wms-core``, there will be as many as the wished quantity, but in ``wms-quantity``, this maybe a single record bearing the total quantity. """ Goods = self.registry.Wms.Goods return [Goods.insert(**fields) for _ in range(spec['quantity'])] def after_insert(self): Goods = self.registry.Wms.Goods GoodsType = Goods.Type packs = self.input dt_execution = self.dt_execution spec = self.get_outcome_specs() type_ids = set(outcome['type'] for outcome in spec) outcome_types = { gt.id: gt for gt in GoodsType.query().filter(GoodsType.id.in_( type_ids)).all() } outcome_state = 'present' if self.state == 'done' else 'future' if self.state == 'done': packs.update(state='past', reason=self) for outcome_spec in spec: # TODO what would be *really* neat would be to be able # to recognize the goods after a chain of pack/unpack goods_fields = dict(type=outcome_types[outcome_spec['type']]) clone = outcome_spec.get('forward_properties') == 'clone' if clone: goods_fields['properties'] = packs.goods.properties for goods in self.create_unpacked_goods(goods_fields, outcome_spec): Goods.Avatar.insert(goods=goods, location=packs.location, reason=self, dt_from=dt_execution, dt_until=packs.dt_until, state=outcome_state) if not clone: self.forward_props(outcome_spec, goods) packs.dt_until = dt_execution def forward_props(self, spec, outcome): """Handle the properties for a given outcome (Goods record) :param spec: the relevant part of behaviour for this outcome :param outcome: just-created Goods instance """ packs = self.input.goods fwd_props = spec.get('forward_properties', ()) req_props = spec.get('required_properties') if req_props and not packs.properties: raise OperationInputsError( self, "Packs {inputs[0]} have no properties, yet their type {type} " "requires these for Unpack operation: {req_props}", type=packs.type, req_props=req_props) if not fwd_props: return for pname in fwd_props: pvalue = packs.get_property(pname) if pvalue is None: if pname not in req_props: continue raise OperationInputsError( self, "Packs {inputs[0]} lacks the property {prop}" "required by their type for Unpack operation", prop=pname) outcome.set_property(pname, pvalue) def get_outcome_specs(self): """Produce a complete behaviour for outcomes and their properties. Unless ``uniform_outcomes`` is set to ``True``, the outcomes of the Unpack are obtained by merging those defined in the Goods Types behaviour and in the packs (``self.input``) properties. This accomodates various use cases: - fixed outcomes: a 6-pack of orange juice bottles gets unpacked as 6 bottles - fully variable outcomes: a parcel with described contents - variable outcomes: a packaging with parts always present and some varying. The properties on outcomes are set from those of ``self.input`` according to the ``forward_properties`` and ``required_properties`` of the outcomes, unless again if ``uniform_outcomes`` is set to ``True``, in which case the properties of the packs (``self.input``) aren't even read, they but simply cloned (referenced again) in the outcomes. This should be better for performance in high volume operation. The same can be achieved on a given outcome by specifying the special ``'clone'`` value for ``forward_properties``. Otherwise, the ``forward_properties`` and ``required_properties`` unpack behaviour from the Goods Type of the packs (``self.input``) are merged with those of the outcomes, so that, for instance ``forward_properties`` have three key/value sources: - top-level at the Goods Type ``unpack`` behaviour - in each outcome of the Goods Type - in each outcome of the Goods record (``unpack_outcomes`` property) Here's a use-case: imagine the some purchase order reference is tracked as property ``po_ref`` (could be important for accounting). A Goods Type representing an incoming package holding various Goods could specify that ``po_ref`` must be forwarded upon Unpack in all cases. For instance, a Goods record with that type could then specify that its outcomes are a phone with a given ``color`` property (to be forwarded upon Unpack) and a power adapter (whose colour is not tracked). Both the phone and the power adapter would get the ``po_ref`` forwarded, with no need to specify it on each in the incoming pack properties. TODO DOC move a lot to global doc """ # TODO PERF playing safe by performing a copy, in order not # to propagate mutability to the DB. Not sure how much of it # is necessary. packs = self.input behaviour = packs.type.behaviours['unpack'] specs = behaviour.get('outcomes', [])[:] if behaviour.get('uniform_outcomes', False): for outcome in specs: outcome['forward_properties'] = 'clone' return specs specific_outcomes = packs.get_property('unpack_outcomes', ()) specs.extend(specific_outcomes) if not specs: raise OperationInputsError( self, "unpacking {inputs[0]} yields no outcomes. " "Type {type} 'unpack' behaviour: {behaviour}, " "specific outcomes from Goods properties: " "{specific}", type=packs.type, behaviour=behaviour, specific=specific_outcomes) global_fwd = behaviour.get('forward_properties', ()) global_req = behaviour.get('required_properties', ()) for outcome in specs: if outcome.get('forward_properties') == 'clone': continue outcome.setdefault('forward_properties', []).extend(global_fwd) outcome.setdefault('required_properties', []).extend(global_req) return specs def cancel_single(self): """Remove the newly created Goods, not only their Avatars.""" self.reset_inputs_original_values() self.registry.flush() all_goods = set() # TODO PERF in two queries using RETURNING, or be braver and # make the avatars cascade for avatar in self.outcomes: all_goods.add(avatar.goods) avatar.delete() for goods in all_goods: goods.delete()
class Address: id = Integer(primary_key=True) street = String() zip = String() city = String()
class Authorization: """Store the autorization rules""" id = Integer(primary_key=True) order = Integer(default=100, nullable=False) resource = String() model = String( foreign_key=Declarations.Model.System.Model.use('name').options( ondelete="cascade")) primary_keys = Json(default={}) filter = Json(default={}) # next step role = Many2One(model=User.Role, foreign_key_options={'ondelete': 'cascade'}) login = String(foreign_key=User.use('login').options(ondelete="cascade")) user = Many2One(model=User) perms = Json(default={}) perm_create = JsonRelated(json_column='perms', keys=['create']) perm_read = JsonRelated(json_column='perms', keys=['read']) perm_update = JsonRelated(json_column='perms', keys=['update']) perm_delete = JsonRelated(json_column='perms', keys=['delete']) @classmethod def get_acl_filter_model(cls): """Return the Model to use to check the permission""" return { 'User': cls.registry.User, 'Role': cls.registry.User.Role, } @classmethod def get_acl(cls, login, resource, params=None): """Return the Pyramid ACL in function of the resource and user :param login: str, login of the user :param resource: str, name of the resource """ # cache the method User = cls.registry.User Role = cls.registry.User.Role query = cls.query() query = query.filter( or_(cls.resource == resource, cls.model == resource)) query = query.order_by(cls.order) Q1 = query.filter(cls.login == login) Q2 = query.join(cls.role).filter(Role.name.in_(User.get_roles(login))) res = [] for query in (Q1, Q2): for self in query.all(): allow_perms = [] deny_perms = [] perms = list((self.perms or {}).keys()) perms.sort() for perm in perms: p = self.perms[perm] query = User.query() query = query.filter(User.login == login) query = query.join(User.roles) if self.filter: query = query.condition_filter( self.filter, cls.get_acl_filter_model()) if 'condition' in p: query = query.condition_filter( p['condition'], cls.get_acl_filter_model()) ismatched = True if query.count() else False if p.get('matched' if ismatched else 'unmatched') is True: allow_perms.append(perm) elif (p.get('matched' if ismatched else 'unmatched') is False): deny_perms.append(perm) if len(allow_perms): res.append((Allow, login, allow_perms)) if len(deny_perms): res.append((Deny, login, deny_perms)) res.append((Deny, login, ALL_PERMISSIONS)) return res @classmethod def before_insert_orm_event(cls, mapper, connection, target): target.check_validity() @classmethod def before_update_orm_event(cls, mapper, connection, target): target.check_validity() def check_validity(self): """Check at the insert or update that all rule match :exception: AuthorizationValidationException """ if not (self.role or self.login or self.user or self.role_name): raise AuthorizationValidationException( "No role and login to apply in the authorization (%s)" % self) if not (self.resource or self.model): raise AuthorizationValidationException( "No resource and model to apply in the authorization (%s)" % self) if not self.model and self.primary_keys: raise AuthorizationValidationException( "Primary keys without model to apply in the authorization " "(%s)" % self)
class TestM2O: id = Integer(primary_key=True) test = Many2One(model=Model.Test, nullable=True)
class Sequence: """Database sequences. This Model allows applications to define and use Database sequences easily. It is a rewrapping of `SQLAlchemy sequences <http://docs.sqlalchemy.org/en/latest/core/defaults.html #sqlalchemy.schema.Sequence>`_, with additional formatting capabilities to use them, e.g, in fields of applicative Models. Sample usage:: sequence = registry.System.Sequence.insert( code="string code", formater="One prefix {seq} One suffix") .. seealso:: The :attr:`formater` field. To get the next formatted value of the sequence:: sequence.nextval() Full example in a Python shell:: >>> seq = Sequence.insert(code='SO', formater="{code}-{seq:06d}") >>> seq.nextval() 'SO-000001' >>> seq.nextval() 'SO-000002' You can create a Sequence without gap warranty using `no_gap` while creating the sequence:: >>> seq = Sequence.insert( code='SO', formater="{code}-{seq:06d}", no_gap=True) >>> commit() >>> # Transaction 1: >>> Sequence.nextvalBy(code='SO') 'SO-000001' >>> # Concurrent transaction 2: >>> Sequence.nextvalBy(code='SO') ... sqlalchemy.exc.OperationalError: (psycopg2.errors.LockNotAvailable) ... """ _cls_seq_name = 'system_sequence_seq_name' id = Integer(primary_key=True) code = String(nullable=False) start = Integer(default=1) current = Integer(default=None) seq_name = String(nullable=False) """Name of the sequence in the database. Most databases identify sequences by names which must be globally unique. If not passed at insertion, the value of this field is automatically generated. """ formater = String(nullable=False, default="{seq}") """Python format string to render the sequence values. This format string is used in :meth:`nextval`. Within it, you can use the following variables: * seq: current value of the underlying database sequence * code: :attr:`code` field * id: :attr:`id` field """ no_gap = Boolean(default=False, nullable=False) """If no_gap is False, it will use Database sequence. Otherwise, if `True` it will ensure there is no gap while getting next value locking the sequence row until transaction is released (rollback/commit). If a concurrent transaction try to get a lock an `sqlalchemy.exc.OperationalError: (psycopg2.errors.LockNotAvailable)` exception is raised. """ @classmethod def initialize_model(cls): """ Create the sequence to determine name """ super(Sequence, cls).initialize_model() seq = SQLASequence(cls._cls_seq_name) seq.create(cls.registry.bind) to_create = getattr(cls.registry, '_need_sequence_to_create_if_not_exist', ()) if to_create is None: return for vals in to_create: if cls.query().filter(cls.code == vals['code']).count(): continue formatter = vals.get('formater') if formatter is None: del vals['formater'] cls.insert(**vals) @classmethod def create_sequence(cls, values): """Create the database sequence for an instance of Sequence Model. :return: suitable field values for insertion of the Model instance :rtype: dict """ seq_name = values.get('seq_name') start = values.setdefault('start', 1) if values.get("no_gap"): values.setdefault('seq_name', values.get("code", "no_gap")) else: if seq_name is None: seq_id = cls.registry.execute(SQLASequence(cls._cls_seq_name)) seq_name = '%s_%d' % (cls.__tablename__, seq_id) values['seq_name'] = seq_name seq = SQLASequence(seq_name, start=start) seq.create(cls.registry.bind) return values @classmethod def insert(cls, **kwargs): """Overwrite to call :meth:`create_sequence` on the fly.""" return super(Sequence, cls).insert(**cls.create_sequence(kwargs)) @classmethod def multi_insert(cls, *args): """Overwrite to call :meth:`create_sequence` on the fly.""" res = [cls.create_sequence(x) for x in args] return super(Sequence, cls).multi_insert(*res) def nextval(self): """Format and return the next value of the sequence. :rtype: str """ if self.no_gap: self.refresh(with_for_update={"nowait": True}) nextval = self.start if self.current is None else self.current + 1 else: nextval = self.registry.execute(SQLASequence(self.seq_name)) self.update(current=nextval) return self.formater.format(code=self.code, seq=nextval, id=self.id) @classmethod def nextvalBy(cls, **crit): """Return next value of the first Sequence matching given criteria. :param crit: criteria to match, e.g., ``code=SO`` :return: :meth:`next_val` result for the first matching Sequence, or ``None`` if there's no match. """ filters = [getattr(cls, k) == v for k, v in crit.items()] seq = cls.query().filter(*filters).first() if seq is None: return None return seq.nextval()
class TestM2O: id = Integer(primary_key=True) test = Many2One(model=Model.Test, nullable=True, foreign_key_options={'ondelete': 'cascade'})
class Test2: id = Integer(primary_key=True) name = String()
class Address: id = Integer(primary_key=True)
class Test: id = Integer(primary_key=True) type = String()
class RequestItem: id = Integer(label="Identifier", primary_key=True) """Primary key. Note that ``serial`` columns in PostgreSQL don't induce conflicts, as the sequence is evaluated out of transaction. """ request = Many2One(model=Wms.Reservation.Request) goods_type = Many2One(model='Model.Wms.Goods.Type') quantity = Integer(nullable=False) properties = Jsonb() @classmethod def define_table_args(cls): return super(RequestItem, cls).define_table_args() + (CheckConstraint( 'quantity > 0', name='positive_qty'), ) def lookup(self, quantity): """Try and find Goods matchin the specified conditions. :return: the matching Goods that were found and the quantity each accounts for. The Goods may not be of the requested type. What matters is how much of the requested quantity each one represents. :rtype: list(int, :class:`Goods <anyblok_wms_base/bloks/wms_core/goods.Goods`>) This method is where most business logic should lie. This default implementation does only equal matching on Goods Type and each property, and therefore is not able to return other Goods Type accounting for more than one of the wished. Downstream libraries and applications are welcome to override it. """ Wms = self.registry.Wms Goods = Wms.Goods Reservation = Wms.Reservation Avatar = Goods.Avatar Props = Goods.Properties # TODO PERF this returns from the DB one Goods line per # Avatar, but SQLA reassembles them as exactly one (seen while # tracing the test_reserve_avatars_once() under pdb) # SELECT DISTINCT ON would be better # TODO provide ordering by Avatar state and/or dt_from query = Goods.query().join(Avatar.goods).outerjoin( Reservation, Reservation.goods_id == Goods.id).filter( Reservation.goods_id.is_(None), Goods.type == self.goods_type, Avatar.state.in_(('present', 'future'))) props = self.properties.copy() if props: query = query.join(Goods.properties) pfields = Props.fields_description() for p in set(props).intersection(pfields): query = query.filter(getattr(Props, p) == props.pop(p)) if props: query = query.filter(Props.flexible.contains(props)) return [(1, g) for g in query.limit(quantity).all()] def reserve(self): """Perform the wished reservations. :return bool: if the RequestItem is completely reserved. TODO: shall we store it directly in DB ? """ Reservation = self.registry.Wms.Reservation already = Reservation.query(func.sum(Reservation.quantity)).filter( Reservation.request_item_id == self.id).one()[0] if already is None: already = 0 if already >= self.quantity: # its legit to be greater, think of reserving 2 packs of 10 # to use 17. Maybe later, we'll unpack just one of them and update # the reservation to add just 7 of the Unpack outcomes. return True added = 0 for quantity, goods in self.lookup(self.quantity - already): # TODO use a o2m ? Reservation.insert(goods=goods, quantity=quantity, request_item=self) added += quantity return already + added >= self.quantity
class Test: id = Integer(primary_key=True)
class Request: id = Integer(label="Identifier", primary_key=True) """Primary key. In this model, the ordering of ``id`` ordering is actually important (whereas on many others, it's a matter of habit to have a serial id): the smaller it is, the older the Request. Requests have to be reserved in order. Note that ``serial`` columns in PostgreSQL don't induce conflicts, as the sequence is evaluated out of transaction. """ purpose = Jsonb() """Flexible field to describe what the reservations will be for. This is typically used by a planner, to produce an appropriate chain of Operations to fulfill that purpose. Example: in a simple sales system, we would record a sale order reference here, and the planner would then take the related Goods and issue (planned) Moves and Departures for their 'present' or 'future' Avatars. """ reserved = Boolean(nullable=False, default=False) """Indicates that all reservations are taken. TODO: find a way to represent if the Request is partially done ? Some use-cases would require planning partial deliveries and the like in that case. """ planned = Boolean(nullable=False, default=False) """Indicates that the planner has finished with that Request. It's better than deleting, because it allows to cancel all Operations, set this back to ``True``, and plan again. """ txn_owned_reservations = set() """The set of Request ids whose current transaction owns reservations.""" @classmethod @contextmanager def claim_reservations(cls, request_id=None): """Context manager to claim ownership over this Request's reservations. This is meant for planners and works on fully reserved Requests. Example:: Request = registry.Wms.Reservation.Request with Request.claim_reservations() as req_id: request = Request.query().get(req_id) (...) read request.purpose, plan Operations (...) By calling this, the current transaction becomes responsible for all Request's reservations, meaning that it has the liberty to issue any Operation affecting its Goods or their Avatars. :return: id of claimed Request :param int request_id: if not specified, the oldest not already claimed Request is claimed, otherwise the one with the given id is. This is a numeric id so that the caller doesn't necessarily need to fetch all fields. This is safe with respect to concurrency: no other transaction can claim the same Request (guaranteed by a PostgreSQL lock). The session will forget about this Request as soon as one exits the ``with`` statement, and the underlying PG lock is released at the end of the transaction. TODO for now it's a context manager. I'd found it more elegant to tie it to the transaction, to get automatic release, without a ``with`` syntax, but that requires more digging into SQLAlchemy and Anyblok internals. TODO I think FOR UPDATE actually creates a new internal PG row (table bloat). Shall we switch to advisory locks (see PG doc) with an harcoded mapping to an integer ? If that's true, then performance-wise it's equivalent for us to set the txn id in some service column (but that would require inconditional cleanup, a complication) """ query = cls.query('id').filter(cls.reserved.is_(True)) if request_id is not None: query = query.filter(cls.id == request_id) # issues a SELECT FOR UPDATE SKIP LOCKED (search # 'with_for_update' within # http://docs.sqlalchemy.org/en/latest/core/selectable.html # also, noteworthy, SKIP LOCKED appeared within PostgreSQL 9.5 # (https://www.postgresql.org/docs/current/static/release-9-5.html) cols = query.with_for_update(skip_locked=True).order_by(cls.id).first() request_id = None if cols is None else cols[0] if request_id is not None: cls.txn_owned_reservations.add(request_id) yield request_id if request_id is not None: cls.txn_owned_reservations.discard(request_id) def is_txn_reservations_owner(self): """Tell if transaction is the owner of this Request's reservations. :return: ``True`` if the current transaction has claimed ownership, using the :meth:``claim_reservations`` method. """ return self.id in self.txn_owned_reservations def reserve(self): """Try and perform reservation for all RequestItems. :return: ``True`` if all reservations are now taken :rtype: bool Should not fail if reservations are already done. """ Item = self.registry.Wms.Reservation.RequestItem # could use map() and all(), but it's not recommended style # if there are strong side effects. all_reserved = True for item in Item.query().filter(Item.request == self).all(): all_reserved = all_reserved and item.reserve() self.reserved = all_reserved return all_reserved @classmethod def lock_unreserved(cls, batch_size, query_filter=None, offset=0): """Take exclusivity over not yet reserved Requests This is used in :ref:`Reservers <arch_reserver>` implementations. :param int batch: maximum of reservations to lock at once. Since reservations have to be taken in order, this produces a hard error in case there's a conflicting database lock, instead of skipping them like :meth:`claim_reservations` does. This conflicts in particular locks taken with :meth`claim_reservations`, but in principle, only :ref:`reservers <arch_reserver>` should take locks over reservation Requests that are not reserved yet, and these should not run in concurrency (or in a very controlled way, using ``query_filter``). """ query = cls.query().filter(cls.reserved.is_(False)) if query_filter is not None: query = query_filter(query) query = query.with_for_update(nowait=True).order_by(cls.id) try: return query.limit(batch_size).offset(offset).all() except sqlalchemy.exc.OperationalError as op_err: cls.registry.rollback() raise cls.ReservationsLocked(op_err) class ReservationsLocked(RuntimeError): """Used to rewrap concurrency errors while taking locks.""" def __init__(self, db_exc): self.db_exc = db_exc @classmethod def reserve_all(cls, batch_size=10, nb_attempts=5, retry_delay=1, query_filter=None): """Try and perform all reservations for pending Requests. This walks all pending (:attr:`reserved` equal to ``False``) Requests that haven't been reserved from the oldest and locks them by batches of ``batch_size``. Reservation is attempted for each request, in order, meaning that each request will grab as much Goods as it can before the next one gets processed. :param int batch_size: number of pending Requests to grab at each iteration :param nb_attempts: number of attempts (in the face of conflicts) for each batch :param retry_delay: time to wait before retrying to grab a batch (hoping other transactions holding locks would have released them) :param query_filter: optional function to add filtering to the query used to grab the reservations. The caller can use this to implement controlled concurrency in the reservation process: several processes can focus on different Requests, as long as they don't compete for Goods to reserve. The transaction is committed for each batch. """ skip = 0 while True: # TODO log.info count = 1 while True: try: requests = cls.lock_unreserved(batch_size, offset=skip, query_filter=query_filter) except cls.ReservationsLocked: # TODO log.warning if count == nb_attempts: raise time.sleep(retry_delay) count += 1 else: break if not requests: break for request in requests: if not request.reserve(): skip += 1 cls.registry.commit()
class Unpack(Mixin.WmsSingleInputOperation, Mixin.WmsInPlaceOperation, Operation): """Unpacking some goods, creating new PhysObj and Avatar records. This is a destructive Operation, in the usual mild sense: once it's done, the input PhysObj Avatars is in the ``past`` state, and their underlying PhysObj have no new Avatars. It is conditionally reversible through appropriate Assembly Operations. Which PhysObj will get created and which Properties they will bear is specified in the ``unpack`` behaviour of the Type of the PhysObj being unpacked, together with their ``contents`` optional Properties. See :meth:`get_outcome_specs` and :meth:`forward_props` for details about these and how to achieve the wished functionality. Unpacks happen in place: the newly created Avatar appear in the location where the input was. It is thus the caller's responsibility to prepend moves to unpacking areas, and/or append moves to final destinations. """ TYPE = 'wms_unpack' id = Integer(label="Identifier", primary_key=True, autoincrement=False, foreign_key=Operation.use('id').options(ondelete='cascade')) @classmethod def check_create_conditions(cls, state, dt_execution, inputs=None, quantity=None, **kwargs): # TODO quantity is now irrelevant in wms-core super(Unpack, cls).check_create_conditions(state, dt_execution, inputs=inputs, quantity=quantity, **kwargs) goods_type = inputs[0].obj.type if 'unpack' not in goods_type.behaviours: raise OperationInputsError( cls, "Can't create an Unpack for {inputs} " "because their type {type} doesn't have the 'unpack' " "behaviour", inputs=inputs, type=goods_type) def execute_planned(self): packs = self.input # TODO PERF direct update query would probably be faster for outcome in self.outcomes: outcome.state = 'present' packs.update(state='past') def create_unpacked_goods(self, fields, spec): """Create PhysObj record according to given specification. This singled out method is meant for easy subclassing (see, e.g, in :ref:`wms-quantity Blok <blok_wms_quantity>`). :param fields: pre-baked fields, prepared by the base class. In the current implementation, they are fully derived from ``spec``, hence one may think of them as redundant, but the point is that they are outside the responsibility of this method. :param spec: specification for these PhysObj, should be used minimally in subclasses, typically for quantity related adjustments. Also, if the special ``local_physobj_ids`` is provided, this method should attempt to reuse the PhysObj record with that ``id`` (interplay with quantity might depend on the implementation). :return: the list of created PhysObj records. In ``wms-core``, there will be as many as the wished quantity, but in ``wms-quantity``, this maybe a single record bearing the total quantity. """ PhysObj = self.registry.Wms.PhysObj existing_ids = spec.get('local_physobj_ids') target_qty = spec['quantity'] if existing_ids is not None: if len(existing_ids) != target_qty: raise OperationInputsError( self, "final outcome specification {spec!r} has " "'local_physobj_ids' parameter, but they don't provide " "the wished total quantity {target_qty} " "Detailed input: {inputs[0]!r}", spec=spec, target_qty=target_qty) return [PhysObj.query().get(eid) for eid in existing_ids] return [PhysObj.insert(**fields) for _ in range(spec['quantity'])] def after_insert(self): PhysObj = self.registry.Wms.PhysObj PhysObjType = PhysObj.Type packs = self.input dt_execution = self.dt_execution spec = self.get_outcome_specs() type_codes = set(outcome['type'] for outcome in spec) outcome_types = { gt.code: gt for gt in (PhysObjType.query().filter( PhysObjType.code.in_(type_codes)).all()) } outcome_state = 'present' if self.state == 'done' else 'future' if self.state == 'done': packs.update(state='past') for outcome_spec in spec: self.create_outcomes_for_spec(outcome_types, outcome_spec, outcome_state) packs.dt_until = dt_execution def create_outcomes_for_spec(self, types_cache, spec, outcome_state): PhysObj = self.registry.Wms.PhysObj # TODO what would be *really* neat would be to be able # to recognize the goods after a chain of pack/unpack goods_fields = dict(type=types_cache[spec['type']]) packs = self.input clone = spec.get('forward_properties') == 'clone' if clone: goods_fields['properties'] = packs.obj.properties for physobj in self.create_unpacked_goods(goods_fields, spec): PhysObj.Avatar.insert(obj=physobj, location=packs.location, outcome_of=self, dt_from=self.dt_execution, dt_until=packs.dt_until, state=outcome_state) if not clone: physobj.update_properties(self.outcome_props_update(spec)) @classmethod def plan_for_outcomes(cls, inputs, outcomes, dt_execution=None): """Create a planned Unpack of which some outcomes are already given. This is useful for planning refinements, in cases the given ``future`` outcomes already exist in the database, typically because they are from Arrivals that are in the :meth:`process of being superseded <anyblok_wms_base.core.operation.arrival.Arrival.refine_with_trailing_unpack>` :param inputs: should be made of only one element, an Avatar of the physical object to be unpacked, yet it's convenient to get it as an iterable (also for the caller). :param outcomes: candidate Avatars to reinterpret as outcomes of the newly created Unpack. It is possible that the Unpack produces some extra ones, and conversely that some of them are not produced by the Unpack. :returns: a pair made of - the created Unpack - the sublist of ``outcomes`` that have been attached. This method ensures that the newly created Unpack instance produces at least the same properties as already present on the given outcomes, and actually uses the properties as a match criteria to perform the attachments. It is on the other hand perfectly acceptable that the Unpack adds more properties, for instance because they were previously unplannable or irrelevant for the planning (use cases: serial and batch numbers, expiry dates...) """ # noqa (unbreakable Sphinx crossref) if dt_execution is None: # TODO improve using outcomes dt_from dt_execution = max(inp.dt_from for inp in inputs) cls.check_create_conditions('planned', dt_execution, inputs) unpack = cls.insert(state='planned', dt_execution=dt_execution) unpack.link_inputs(inputs) input_obj = next(iter(inputs)).obj to_match = set(outcomes) attached = [] PhysObj = cls.registry.Wms.PhysObj POT = PhysObj.Type # TODO PERF this has quadratic complexity. # I suppose it's ok because outcomes shoudl not be too big, # but it could be improved by presorting specs and outcomes # TODO it's quite possible that some of the outcome can't be matched # because the spec item that would match it has already been used # to match one with less properties code_to_type = {} for spec in unpack.get_outcome_specs(): code = spec['type'] stype = code_to_type.get(code) if stype is None: stype = POT.query().filter_by(code=code).one() code_to_type[code] = stype for i in range(spec['quantity']): # breaking out of this loops means we already match as much # as possible for candidate in to_match: # breaking out of this loop signals a match cand_obj = candidate.obj if cand_obj.type != stype: continue sprops = spec['forward_properties'] if cand_obj.properties is None: # easy case: no properties to match, only new ones # to create if sprops == 'clone': cand_obj.properties = input_obj.properties else: cand_obj.update_properties( unpack.outcome_props_update(spec)) break # else, we check if candidate's properties are a subdict # of what the Unpack would give rise to props_from_spec = unpack.outcome_props_update(spec) cand_props = cand_obj.properties.as_dict() if all( props_from_spec.get(k) == v for k, v in cand_props.items()): cand_obj.update_properties(props_from_spec) break else: break to_match.remove(candidate) attached.append(candidate) candidate.update(outcome_of=unpack, dt_from=dt_execution) else: continue # next spec spec['quantity'] -= i unpack.create_outcomes_for_spec(code_to_type, spec, 'future') return unpack, attached def outcome_props_update(self, spec): """Handle the properties for a given outcome (PhysObj record) This is actually a bit more that just forwarding. :param dict spec: the relevant specification for this outcome, as produced by :meth:`get_outcome_specs` (see below for the contents). :param outcome: the just created PhysObj instance :return: the properties to update, as a :class:`dict` *Specification contents* * ``properties``: A direct mapping of properties to set on the outcome. These have the lowest precedence, meaning that they will be overridden by properties forwarded from ``self.input``. Also, if spec has the ``local_physobj_id`` key, ``properties`` is ignored. The rationale for this is that normally, there are no present or future Avatar for these PhysObj, and therefore the Properties of outcome should not have diverged from the contents of ``properties`` since the spec (which must itself not come from the behaviour, but instead from ``contents``) has been created (typically by an Assembly). * ``required_properties``: list (or iterable) of properties that are required on ``self.input``. If one is missing, then :class:`OperationInputsError` gets raised. ``forward_properties``. * ``forward_properties``: list (or iterable) of properties to copy if present from ``self.input`` to ``outcome``. Required properties aren't automatically forwarded, so that it's possible to require one for checking purposes without polluting the Properties of ``outcome``. To forward and require a property, it has thus to be in both lists. """ props_upd = {} direct_props = spec.get('properties') if direct_props is not None and 'local_physobj_ids' not in spec: props_upd.update(direct_props) packs = self.input.obj fwd_props = spec.get('forward_properties', ()) req_props = spec.get('required_properties') if req_props and not packs.properties: raise OperationInputsError( self, "Packs {inputs[0]} have no properties, yet their type {type} " "requires these for Unpack operation: {req_props}", type=packs.type, req_props=req_props) if not fwd_props: return props_upd for pname in fwd_props: pvalue = packs.get_property(pname) if pvalue is None: if pname not in req_props: continue raise OperationInputsError( self, "Packs {inputs[0]} lacks the property {prop}" "required by their type for Unpack operation", prop=pname) props_upd[pname] = pvalue return props_upd def get_outcome_specs(self): """Produce a complete specification for outcomes and their properties. In what follows "the behaviour" means the value associated with the ``unpack`` key in the PhysObj Type :attr:`behaviours <anyblok_wms_base.core.physobj.Type.behaviours>`. Unless ``uniform_outcomes`` is set to ``True`` in the behaviour, the outcomes of the Unpack are obtained by merging those defined in the behaviour (under the ``outcomes`` key) and in the packs (``self.input``) ``contents`` Property. This accomodates various use cases: - fixed outcomes: a 6-pack of orange juice bottles gets unpacked as 6 bottles - fully variable outcomes: a parcel with described contents - variable outcomes: a packaging with parts always present and some varying. The properties on outcomes are set from those of ``self.input`` according to the ``forward_properties`` and ``required_properties`` of the outcomes, unless again if ``uniform_outcomes`` is set to ``True``, in which case the properties of the packs (``self.input``) aren't even read, but simply cloned (referenced again) in the outcomes. This should be better for performance in high volume operation. The same can be achieved on a given outcome by specifying the special ``'clone'`` value for ``forward_properties``. Otherwise, the ``forward_properties`` and ``required_properties`` unpack behaviour from the PhysObj Type of the packs (``self.input``) are merged with those of the outcomes, so that, for instance ``forward_properties`` have three key/value sources: - at toplevel of the behaviour (``uniform_outcomes=True``) - in each outcome of the behaviour (``outcomes`` key) - in each outcome of the PhysObj record (``contents`` property) Here's a use-case: imagine the some purchase order reference is tracked as property ``po_ref`` (could be important for accounting). A PhysObj Type representing an incoming package holding various PhysObj could specify that ``po_ref`` must be forwarded upon Unpack in all cases. For instance, a PhysObj record with that type could then specify that its outcomes are a phone with a given ``color`` property (to be forwarded upon Unpack) and a power adapter (whose colour is not tracked). Both the phone and the power adapter would get the ``po_ref`` forwarded, with no need to specify it on each in the incoming pack properties. TODO DOC move a lot to global doc """ # TODO PERF playing safe by performing a copy, in order not # to propagate mutability to the DB. Not sure how much of it # is necessary. packs = self.input goods_type = packs.obj.type behaviour = goods_type.get_behaviour('unpack') specs = behaviour.get('outcomes', [])[:] if behaviour.get('uniform_outcomes', False): for outcome in specs: outcome['forward_properties'] = 'clone' return specs specific_outcomes = packs.get_property(CONTENTS_PROPERTY, ()) specs.extend(specific_outcomes) if not specs: raise OperationInputsError( self, "unpacking {inputs[0]} yields no outcomes. " "Type {type} 'unpack' behaviour: {behaviour}, " "specific outcomes from PhysObj properties: " "{specific}", type=goods_type, behaviour=behaviour, specific=specific_outcomes) global_fwd = behaviour.get('forward_properties', ()) global_req = behaviour.get('required_properties', ()) for outcome in specs: if outcome.get('forward_properties') == 'clone': continue outcome.setdefault('forward_properties', []).extend(global_fwd) outcome.setdefault('required_properties', []).extend(global_req) return specs def cancel_single(self): """Remove the newly created PhysObj, not only their Avatars.""" self.reset_inputs_original_values() self.registry.flush() all_goods = set() # TODO PERF in two queries using RETURNING, or be braver and # make the avatars cascade for avatar in self.outcomes: all_goods.add(avatar.obj) avatar.delete() for goods in all_goods: goods.delete() def reverse_assembly_name(self): """Return the name of Assembly that can revert this Unpack.""" behaviour = self.input.obj.type.get_behaviour('unpack') default = 'pack' if behaviour is None: return default # probably not useful, but that's consistent return behaviour.get('reverse_assembly', default) def is_reversible(self): """Unpack can be reversed by an Assembly. The exact criterion is that Unpack can be reversed, if there exists an :class:`Assembly <anyblok_wms_base.bloks.core.operation.assembly` whose name is given by the ``reverse_assembly`` key in the behaviour, with a default: ``'pack'`` """ gt = self.input.obj.type # TODO define a has_behaviour() API on goods_type ass_beh = gt.get_behaviour('assembly') if ass_beh is None: return False return self.reverse_assembly_name() in ass_beh def plan_revert_single(self, dt_execution, follows=()): """Plan reversal Currently, there is no way to specify extra inputs to be consumed by the reverse Assembly. As a consequence, Unpack reversal is only meaningful in the following cases: * wrapping material is not tracked in the system at all * wrapping material is tracked, and is not destroyed by the Unpack, so that it is both one of the Unpack outcomes, and one of the packing Assembly inputs. Also, currently the Assembly will have to take place exactly where the Unpack took place. This may not fit some concrete work organizations in warehouses. """ # we need to pack the outcomes of reversals of downstream operations # together with our outcomes that aren't themselves inputs of a # downstream operation. pack_inputs = [out for op in follows for out in op.outcomes] # self.outcomes has actually only those outcomes that aren't inputs # of downstream operations # TODO maybe change that and create a new method instead # for API clarity pack_inputs.extend(self.leaf_outcomes()) return self.registry.Wms.Operation.Assembly.create( outcome_type=self.input.obj.type, dt_execution=dt_execution, name=self.reverse_assembly_name(), inputs=pack_inputs)
class Family: """Product.Family class """ FAMILY_CODE = None family_schema = None template_schema = None item_schema = None id = Integer(label="Identifier", primary_key=True) create_date = DateTime(default=datetime.now, nullable=False) edit_date = DateTime(default=datetime.now, nullable=False, auto_update=True) code = String(label="Family code", unique=True, nullable=False) name = String(label="Family name", size=256) description = Text(label="Family description") properties = Jsonb(label="Family properties") family_code = Selection(selections='get_family_codes') items = Function(fget="fget_items") @classmethod def get_family_codes(cls): return dict() def fget_items(self): """Returns a list of products instance from this family """ return self.registry.InstrumentedList( set([i for t in self.templates for i in t.items])) @classmethod def create(cls, **kwargs): data = kwargs.copy() if cls.family_schema: sch = cls.family_schema(registry=cls.registry) data = sch.load(kwargs) return cls.insert(**data) def amend(self, **kwargs): data = kwargs.copy() properties = data.pop('properties', dict()) if properties: for k, v in properties.items(): self.properties[k] = v if self.family_schema: sch = self.family_schema(registry=self.registry) data.update(dict(properties=self.properties)) data = sch.load(data) self.update(**data) return self @classmethod def query(cls, *args, **kwargs): query = super(Family, cls).query(*args, **kwargs) if cls.__registry_name__ != 'Model.Product.Family': query = query.filter(cls.family_code == cls.FAMILY_CODE) return query @classmethod def define_mapper_args(cls): mapper_args = super(Family, cls).define_mapper_args() if cls.__registry_name__ == 'Model.Product.Family': mapper_args.update({'polymorphic_on': cls.family_code}) mapper_args.update({'polymorphic_identity': cls.FAMILY_CODE}) return mapper_args def __str__(self): return "%s : %s" % (self.code, self.name) def __repr__(self): return "<Product.Family(code=%s, name=%s)>" % (self.code, self.name)
class Test: id = Integer(primary_key=True) val = String(default='val')
class Address: id = Integer(primary_key=True) persons = One2Many(model='Model.Person')