Example #1
0
    class Test2:
        __db_schema__ = 'test_db_fk_schema2'

        id = Integer(primary_key=True)
        test = String(foreign_key=Model.Test.use('name'))
Example #2
0
class Aggregate(Operation):
    """An aggregation of PhysObj record.

    Aggregate is the converse of Split.

    Like Splits, Aggregates are operations of a special kind,
    that have to be considered internal details of wms_core, and are not
    guaranteed to exist in the future.

    Aggregates replace some records of PhysObj
    sharing equal properties and type, with Avatars at the same location with
    a single one bearing the total quantity, and a new Avatar.

    While non trivial in the database, they may have no physical counterpart in
    the real world. We call them *formal* in that case.

    Formal Aggregate Operations can always be reverted with Splits,
    but only some physical Aggregate Operations can be reverted, depending on
    the PhysObj Type. See :class:`Model.Wms.PhysObj.Type` for a full
    discussion of this, with use cases.

    In the formal case, we've decided to represent this as an Operation
    for the sake of consistency, and especially to avoid too much special
    cases implementation of various Operations.
    The benefit is that they appear explicitely in the history.

    TODO implement :meth:`plan_revert_single`
    """
    TYPE = 'wms_aggregate'
    UNIFORM_GOODS_FIELDS = ('type', 'properties', 'code')
    UNIFORM_AVATAR_FIELDS = ('location', )

    id = Integer(label="Identifier",
                 primary_key=True,
                 autoincrement=False,
                 foreign_key=Operation.use('id').options(ondelete='cascade'))

    def specific_repr(self):
        return "inputs={self.inputs!r}, ".format(self=self)

    @staticmethod
    def field_is_equal(field, goods_rec1, goods_rec2):
        """Return True if given field is equal in the two goods records.

        This is singled out because of properties, for which we don't want
        to assert equality of the properties lines, but of their content.

        TODO: see if implementing __eq__ etc for properties would be a
        bad idea (would it confuse the Anyblok or SQLAlchemy?)
        """
        val1, val2 = getattr(goods_rec1, field), getattr(goods_rec2, field)
        if field != 'properties' or val1 is None or val2 is None:
            return val1 == val2

        # TODO implement on Properties class and test separately
        # TODO PERF (minor): we could use fields_description() directly
        props1 = val1.to_dict()
        props2 = val2.to_dict()
        props1.pop('id')
        props2.pop('id')
        return props1 == props2

    @classmethod
    def check_create_conditions(cls,
                                state,
                                dt_execution,
                                inputs=None,
                                **kwargs):
        """Check that the inputs to aggregate are indeed indistinguishable.

        This performs the check from superclasses, and then compares all
        fields from :attr:`UNIFROM_AVATAR_FIELDS` on the inputs (Avatars) and
        :attr:`UNIFORM_GOODS_FIELDS` (on the underlying PhysObj).
        """
        super(Aggregate, cls).check_create_conditions(state,
                                                      dt_execution,
                                                      inputs=inputs,
                                                      **kwargs)
        first = inputs[0]
        first_goods = first.goods
        for avatar in inputs:
            goods = avatar.goods
            diff = {}  # field name -> (first value, second value)
            for field in cls.UNIFORM_GOODS_FIELDS:
                if not cls.field_is_equal(field, first_goods, goods):
                    diff[field] = (getattr(first_goods,
                                           field), getattr(goods, field))
            for field in cls.UNIFORM_AVATAR_FIELDS:
                first_value = getattr(first, field)
                second_value = getattr(avatar, field)
                if first_value != second_value:
                    diff[field] = (first_value, second_value)

            if diff:
                raise OperationInputsError(
                    cls, "Can't create Aggregate with inputs {inputs} "
                    "because of discrepancy in field {field!r}: "
                    "Here's a mapping giving by field the differing "
                    "values between the record with id {first.id} "
                    "and the one with id {second.id}: {diff!r} ",
                    inputs=inputs,
                    field=field,
                    first=first,
                    second=avatar,
                    diff=diff)

    def after_insert(self):
        """Business logic after the inert insertion
        """
        # TODO find a way to pass the actual wished PhysObj up to here, then
        # use it (to maintain PhysObj record in case of reverts)
        self.registry.flush()
        inputs = self.inputs
        dt_exec = self.dt_execution
        PhysObj = self.registry.Wms.PhysObj

        outcome_dt_until = min_upper_bounds(g.dt_until for g in inputs)

        if self.state == 'done':
            update = dict(dt_until=dt_exec, state='past', reason=self)
        else:
            update = dict(dt_until=dt_exec)
        for record in inputs:
            record.update(**update)

        tpl_avatar = inputs[0]
        tpl_goods = tpl_avatar.goods
        uniform_goods_fields = {
            field: getattr(tpl_goods, field)
            for field in self.UNIFORM_GOODS_FIELDS
        }
        uniform_avatar_fields = {
            field: getattr(tpl_avatar, field)
            for field in self.UNIFORM_AVATAR_FIELDS
        }
        aggregated_goods = PhysObj.insert(quantity=sum(a.goods.quantity
                                                       for a in inputs),
                                          **uniform_goods_fields)

        return PhysObj.Avatar.insert(
            goods=aggregated_goods,
            reason=self,
            dt_from=dt_exec,
            # dt_until in states 'present' and 'future' is theoretical anyway
            dt_until=outcome_dt_until,
            state='present' if self.state == 'done' else 'future',
            **uniform_avatar_fields)

    def execute_planned(self):
        self.outcomes[0].update(state='present', dt_from=self.dt_execution)
        for record in self.inputs:
            record.update(state='past',
                          reason=self,
                          dt_until=self.dt_execution)

    def is_reversible(self):
        """Reversibility depends on the relevant PhysObj Type.

        See :class:`Operation` for what reversibility exactly means in that
        context.
        """
        # that all Good Types are equal is part of pre-creation checks
        return self.inputs[0].goods.type.is_aggregate_reversible()
Example #3
0
 class Test2:
     id = Integer(primary_key=True)
     name = String()
     test = Many2One(model=Model.Test, one2many="test2")
Example #4
0
 class Test:
     id = Integer(primary_key=True)
     name = String()
Example #5
0
class Unpack(Mixin.WmsSingleInputOperation, Operation):
    """Unpacking some goods, creating new PhysObj and Avatar records.

    This is a destructive Operation, in the usual mild sense: once it's done,
    the input PhysObj Avatars is in the ``past`` state, and their underlying
    PhysObj have no new Avatars.

    It is conditionally reversible through appropriate Assembly Operations.

    Which PhysObj will get created and which Properties they will bear is
    specified in the ``unpack`` behaviour of the Type of the PhysObj being
    unpacked, together with their ``contents`` optional Properties.
    See :meth:`get_outcome_specs` and :meth:`forward_props` for details
    about these and how to achieve the wished functionality.

    Unpacks happen in place: the newly created Avatar appear in the
    location where the input was. It is thus the caller's responsibility to
    prepend moves to unpacking areas, and/or append moves to final
    destinations.
    """
    TYPE = 'wms_unpack'

    id = Integer(label="Identifier",
                 primary_key=True,
                 autoincrement=False,
                 foreign_key=Operation.use('id').options(ondelete='cascade'))

    @classmethod
    def check_create_conditions(cls, state, dt_execution,
                                inputs=None, quantity=None, **kwargs):
        # TODO quantity is now irrelevant in wms-core
        super(Unpack, cls).check_create_conditions(
            state, dt_execution, inputs=inputs,
            quantity=quantity,
            **kwargs)

        goods_type = inputs[0].obj.type
        if 'unpack' not in goods_type.behaviours:
            raise OperationInputsError(
                cls,
                "Can't create an Unpack for {inputs} "
                "because their type {type} doesn't have the 'unpack' "
                "behaviour", inputs=inputs, type=goods_type)

    def execute_planned(self):
        packs = self.input
        # TODO PERF direct update query would probably be faster
        for outcome in self.outcomes:
            outcome.state = 'present'
        packs.update(state='past', reason=self)

    def create_unpacked_goods(self, fields, spec):
        """Create PhysObj record according to given specification.

        This singled out method is meant for easy subclassing (see, e.g,
        in :ref:`wms-quantity Blok <blok_wms_quantity>`).

        :param fields: pre-baked fields, prepared by the base class. In the
                       current implementation, they are fully derived from
                       ``spec``, hence one may think of them as redundant,
                       but the point is that they are outside the
                       responsibility of this method.
        :param spec: specification for these PhysObj, should be used minimally
                     in subclasses, typically for quantity related adjustments.
                     Also, if the special ``local_goods_ids`` is provided,
                     this method should attempt to reuse the PhysObj record
                     with that ``id`` (interplay with quantity might depend
                     on the implementation).
        :return: the list of created PhysObj records. In ``wms-core``, there
                 will be as many as the wished quantity, but in
                 ``wms-quantity``, this maybe a single record bearing the
                 total quantity.
        """
        PhysObj = self.registry.Wms.PhysObj
        existing_ids = spec.get('local_goods_ids')
        target_qty = spec['quantity']
        if existing_ids is not None:
            if len(existing_ids) != target_qty:
                raise OperationInputsError(
                    self,
                    "final outcome specification {spec!r} has "
                    "'local_goods_ids' parameter, but they don't provide "
                    "the wished total quantity {target_qty} "
                    "Detailed input: {inputs[0]!r}",
                    spec=spec, target_qty=target_qty)
            return [PhysObj.query().get(eid) for eid in existing_ids]
        return [PhysObj.insert(**fields) for _ in range(spec['quantity'])]

    def after_insert(self):
        PhysObj = self.registry.Wms.PhysObj
        PhysObjType = PhysObj.Type
        packs = self.input
        dt_execution = self.dt_execution
        spec = self.get_outcome_specs()
        type_codes = set(outcome['type'] for outcome in spec)
        outcome_types = {gt.code: gt for gt in PhysObjType.query().filter(
            PhysObjType.code.in_(type_codes)).all()}

        outcome_state = 'present' if self.state == 'done' else 'future'
        if self.state == 'done':
            packs.update(state='past', reason=self)
        for outcome_spec in spec:
            # TODO what would be *really* neat would be to be able
            # to recognize the goods after a chain of pack/unpack
            goods_fields = dict(type=outcome_types[outcome_spec['type']])
            clone = outcome_spec.get('forward_properties') == 'clone'
            if clone:
                goods_fields['properties'] = packs.obj.properties
            for goods in self.create_unpacked_goods(goods_fields,
                                                    outcome_spec):
                PhysObj.Avatar.insert(obj=goods,
                                      location=packs.location,
                                      reason=self,
                                      dt_from=dt_execution,
                                      dt_until=packs.dt_until,
                                      state=outcome_state)
                if not clone:
                    self.forward_props(outcome_spec, goods)
        packs.dt_until = dt_execution

    def forward_props(self, spec, outcome):
        """Handle the properties for a given outcome (PhysObj record)

        This is actually a bit more that just forwarding.

        :param dict spec: the relevant specification for this outcome, as
                          produced by :meth:`get_outcome_specs` (see below
                          for the contents).
        :param outcome: the just created PhysObj instance

        *Specification contents*

        * ``properties``:
            A direct mapping of properties to set on the outcome. These have
            the lowest precedence, meaning that they will
            be overridden by properties forwarded from ``self.input``.

            Also, if spec has the ``local_goods_id`` key, ``properties`` is
            ignored. The rationale for this is that normally, there are no
            present or future Avatar for these PhysObj, and therefore the
            Properties of outcome should not have diverged from the contents
            of ``properties`` since the spec (which must itself not come from
            the behaviour, but instead from ``contents``) has been
            created (typically by an Assembly).
        * ``required_properties``:
            list (or iterable) of properties that are required on
            ``self.input``. If one is missing, then
            :class:`OperationInputsError` gets raised.
            ``forward_properties``.
        * ``forward_properties``:
            list (or iterable) of properties to copy if present from
            ``self.input`` to ``outcome``.

        Required properties aren't automatically forwarded, so that it's
        possible to require one for checking purposes without polluting the
        Properties of ``outcome``. To forward and require a property, it has
        thus to be in both lists.
        """
        direct_props = spec.get('properties')
        if direct_props is not None and 'local_goods_ids' not in spec:
            outcome.update_properties(direct_props)
        packs = self.input.obj
        fwd_props = spec.get('forward_properties', ())
        req_props = spec.get('required_properties')

        if req_props and not packs.properties:
            raise OperationInputsError(
                self,
                "Packs {inputs[0]} have no properties, yet their type {type} "
                "requires these for Unpack operation: {req_props}",
                type=packs.type, req_props=req_props)
        if not fwd_props:
            return
        upd = []
        for pname in fwd_props:
            pvalue = packs.get_property(pname)
            if pvalue is None:
                if pname not in req_props:
                    continue
                raise OperationInputsError(
                    self,
                    "Packs {inputs[0]} lacks the property {prop}"
                    "required by their type for Unpack operation",
                    prop=pname)
            upd.append((pname, pvalue))
        outcome.update_properties(upd)

    def get_outcome_specs(self):
        """Produce a complete specification for outcomes and their properties.

        In what follows "the behaviour" means the value associated with the
        ``unpack`` key in the PhysObj Type :attr:`behaviours
        <anyblok_wms_base.core.physobj.Type.behaviours>`.

        Unless ``uniform_outcomes`` is set to ``True`` in the behaviour,
        the outcomes of the Unpack are obtained by merging those defined in
        the behaviour (under the ``outcomes`` key) and in the
        packs (``self.input``) ``contents`` Property.

        This accomodates various use cases:

        - fixed outcomes:
            a 6-pack of orange juice bottles gets unpacked as 6 bottles
        - fully variable outcomes:
            a parcel with described contents
        - variable outcomes:
            a packaging with parts always present and some varying.

        The properties on outcomes are set from those of ``self.input``
        according to the ``forward_properties`` and ``required_properties``
        of the outcomes, unless again if ``uniform_outcomes`` is set to
        ``True``, in which case the properties of the packs (``self.input``)
        aren't even read, but simply
        cloned (referenced again) in the outcomes. This should be better
        for performance in high volume operation.
        The same can be achieved on a given outcome by specifying the
        special ``'clone'`` value for ``forward_properties``.

        Otherwise, the ``forward_properties`` and ``required_properties``
        unpack behaviour from the PhysObj Type of the packs (``self.input``)
        are merged with those of the outcomes, so that, for instance
        ``forward_properties`` have three key/value sources:

        - at toplevel of the behaviour (``uniform_outcomes=True``)
        - in each outcome of the behaviour (``outcomes`` key)
        - in each outcome of the PhysObj record (``contents`` property)

        Here's a use-case: imagine the some purchase order reference is
        tracked as property ``po_ref`` (could be important for accounting).

        A PhysObj Type representing an incoming package holding various PhysObj
        could specify that ``po_ref`` must be forwarded upon Unpack in all
        cases. For instance, a PhysObj record with that type could then
        specify that its outcomes are a phone with a given ``color``
        property (to be forwarded upon Unpack)
        and a power adapter (whose colour is not tracked).
        Both the phone and the power adapter would get the ``po_ref``
        forwarded, with no need to specify it on each in the incoming pack
        properties.

        TODO DOC move a lot to global doc
        """
        # TODO PERF playing safe by performing a copy, in order not
        # to propagate mutability to the DB. Not sure how much of it
        # is necessary.
        packs = self.input
        goods_type = packs.obj.type
        behaviour = goods_type.get_behaviour('unpack')
        specs = behaviour.get('outcomes', [])[:]
        if behaviour.get('uniform_outcomes', False):
            for outcome in specs:
                outcome['forward_properties'] = 'clone'
            return specs

        specific_outcomes = packs.get_property(CONTENTS_PROPERTY, ())
        specs.extend(specific_outcomes)
        if not specs:
            raise OperationInputsError(
                self,
                "unpacking {inputs[0]} yields no outcomes. "
                "Type {type} 'unpack' behaviour: {behaviour}, "
                "specific outcomes from PhysObj properties: "
                "{specific}",
                type=goods_type, behaviour=behaviour,
                specific=specific_outcomes)

        global_fwd = behaviour.get('forward_properties', ())
        global_req = behaviour.get('required_properties', ())
        for outcome in specs:
            if outcome.get('forward_properties') == 'clone':
                continue
            outcome.setdefault('forward_properties', []).extend(global_fwd)
            outcome.setdefault('required_properties', []).extend(global_req)
        return specs

    def cancel_single(self):
        """Remove the newly created PhysObj, not only their Avatars."""
        self.reset_inputs_original_values()
        self.registry.flush()
        all_goods = set()
        # TODO PERF in two queries using RETURNING, or be braver and
        # make the avatars cascade
        for avatar in self.outcomes:
            all_goods.add(avatar.obj)
            avatar.delete()
        for goods in all_goods:
            goods.delete()

    def reverse_assembly_name(self):
        """Return the name of Assembly that can revert this Unpack."""
        behaviour = self.input.obj.type.get_behaviour('unpack')
        default = 'pack'
        if behaviour is None:
            return default  # probably not useful, but that's consistent
        return behaviour.get('reverse_assembly', default)

    def is_reversible(self):
        """Unpack can be reversed by an Assembly.

        The exact criterion is that Unpack can be reversed, if there exists
        an :class:`Assembly <anyblok_wms_base.bloks.core.operation.assembly`
        whose name is given by the ``reverse_assembly`` key in the behaviour,
        with a default: ``'pack'``
        """
        gt = self.input.obj.type
        # TODO define a has_behaviour() API on goods_type
        ass_beh = gt.get_behaviour('assembly')
        if ass_beh is None:
            return False
        return self.reverse_assembly_name() in ass_beh

    def plan_revert_single(self, dt_execution, follows=()):
        """Plan reversal

        Currently, there is no way to specify extra inputs to be consumed
        by the reverse Assembly. As a consequence, Unpack reversal is only
        meaningful in the following cases:

        * wrapping material is not tracked in the system at all
        * wrapping material is tracked, and is not destroyed by the Unpack,
          so that it is both one of the Unpack outcomes, and one of the
          packing Assembly inputs.

        Also, currently the Assembly will have to take place exactly where the
        Unpack took place. This may not fit some concrete work organizations
        in warehouses.
        """
        pack_inputs = [out for op in follows for out in op.outcomes]
        # self.outcomes has actually only those outcomes that aren't inputs
        # of downstream operations
        # TODO maybe change that and create a new method instead
        # for API clarity
        pack_inputs.extend(self.outcomes)
        return self.registry.Wms.Operation.Assembly.create(
            outcome_type=self.input.obj.type,
            dt_execution=dt_execution,
            name=self.reverse_assembly_name(),
            inputs=pack_inputs)
Example #6
0
            class Test:

                id = Integer(primary_key=True)
                properties = Json()
                name = JsonRelated(keys=['name'])
Example #7
0
class Request:
    id = Integer(label="Identifier", primary_key=True)
    """Primary key.

    In this model, the ordering of ``id`` ordering is actually
    important (whereas on many others, it's a matter of habit to have
    a serial id): the smaller it is, the older the Request.

    Requests have to be reserved in order.

    Note that ``serial`` columns in PostgreSQL don't induce conflicts, as
    the sequence is evaluated out of transaction.
    """

    purpose = Jsonb()
    """Flexible field to describe what the reservations will be for.

    This is typically used by a planner, to produce an appropriate
    chain of Operations to fulfill that purpose.

    Example: in a simple sales system, we would record a sale order
    reference here, and the planner would then take the related PhysObj
    and issue (planned) Moves and Departures for their 'present' or
    'future' Avatars.
    """

    reserved = Boolean(nullable=False, default=False)
    """Indicates that all reservations are taken.

    TODO: find a way to represent if the Request is partially done ?
    Some use-cases would require planning partial deliveries and the
    like in that case.
    """

    planned = Boolean(nullable=False, default=False)
    """Indicates that the planner has finished with that Request.

    It's better than deleting, because it allows to cancel all
    Operations, set this back to ``True``, and plan again.
    """

    txn_owned_reservations = set()
    """The set of Request ids whose current transaction owns reservations."""

    @classmethod
    @contextmanager
    def claim_reservations(cls, query=None, **filter_by):
        """Context manager to claim ownership over this Request's reservations.

        This is meant for planners and works on fully reserved Requests.
        Example::

           Request = registry.Wms.Reservation.Request
           with Request.claim_reservations() as req_id:
               request = Request.query().get(req_id)
               (...) read request.purpose, plan Operations (...)

        By calling this, the current transaction becomes responsible
        for all Request's reservations, meaning that it has the
        liberty to issue any Operation affecting its PhysObj or their Avatars.

        :return: id of claimed Request
        :param dict filter_by: direct filtering criteria to add to the
                               query, e.g, a planner looking for planning to
                               be done would pass ``planned=False``.
        :param query: if specified, is used to form the final SQL query,
                      instead of creating a new one.
                      The passed query must have the present model class in
                      its ``FROM`` clause and return only the ``id`` column
                      of the present model. The criteria of
                      ``filter_by`` are still applied if also provided.

        This is safe with respect to concurrency: no other transaction
        can claim the same Request (guaranteed by a PostgreSQL lock).

        The session will forget about this Request as soon as one
        exits the ``with`` statement, and the underlying PG lock is
        released at the end of the transaction.

        TODO for now it's a context manager. I'd found it more
        elegant to tie it to the transaction, to get automatic
        release, without a ``with`` syntax, but that requires more
        digging into SQLAlchemy and Anyblok internals.

        TODO I think FOR UPDATE actually creates a new internal PG row
        (table bloat). Shall we switch to advisory locks (see PG doc) with an
        harcoded mapping to an integer ?
        If that's true, then performance-wise it's equivalent for us
        to set the txn id in some service column (but that would
        require inconditional cleanup, a complication)
        """
        if query is None:
            query = cls.query('id')
        if filter_by is not None:
            query = query.filter_by(reserved=True, **filter_by)

        # issues a SELECT FOR UPDATE SKIP LOCKED (search
        #   'with_for_update' within
        #   http://docs.sqlalchemy.org/en/latest/core/selectable.html
        # also, noteworthy, SKIP LOCKED appeared within PostgreSQL 9.5
        #   (https://www.postgresql.org/docs/current/static/release-9-5.html)
        cols = query.with_for_update(skip_locked=True, of=cls).order_by(
            cls.id).first()
        request_id = None if cols is None else cols[0]

        if request_id is not None:
            cls.txn_owned_reservations.add(request_id)
        yield request_id

        if request_id is not None:
            cls.txn_owned_reservations.discard(request_id)

    def is_txn_reservations_owner(self):
        """Tell if transaction is the owner of this Request's reservations.

        :return:
          ``True`` if the current transaction has claimed ownership,
          using the :meth:``claim_reservations`` method.
        """
        return self.id in self.txn_owned_reservations

    def reserve(self):
        """Try and perform reservation for all RequestItems.

        :return: ``True`` if all reservations are now taken
        :rtype: bool

        Should not fail if reservations are already done.
        """
        Item = self.registry.Wms.Reservation.RequestItem
        # could use map() and all(), but it's not recommended style
        # if there are strong side effects.
        all_reserved = True
        for item in Item.query().filter(Item.request == self).all():
            all_reserved = all_reserved and item.reserve()
        self.reserved = all_reserved
        return all_reserved

    @classmethod
    def lock_unreserved(cls, batch_size, query_filter=None, offset=0):
        """Take exclusivity over not yet reserved Requests

        This is used in :ref:`Reservers <arch_reserver>` implementations.

        :param int batch: maximum of reservations to lock at once.

        Since reservations have to be taken in order, this produces a hard
        error in case there's a conflicting database lock, instead of skipping
        them like :meth:`claim_reservations` does.

        This conflicts in particular locks taken with
        :meth`claim_reservations`, but in principle,
        only :ref:`reservers <arch_reserver>` should take locks
        over reservation Requests that are not reserved yet, and these should
        not run in concurrency (or in a very controlled way, using
        ``query_filter``).
        """
        query = cls.query().filter(cls.reserved.is_(False))
        if query_filter is not None:
            query = query_filter(query)
        query = query.with_for_update(nowait=True).order_by(cls.id)
        try:
            return query.limit(batch_size).offset(offset).all()
        except sqlalchemy.exc.OperationalError as op_err:
            cls.registry.rollback()
            raise cls.ReservationsLocked(op_err)

    class ReservationsLocked(RuntimeError):
        """Used to rewrap concurrency errors while taking locks."""
        def __init__(self, db_exc):
            self.db_exc = db_exc

    @classmethod
    def reserve_all(cls, batch_size=10, nb_attempts=5, retry_delay=1,
                    query_filter=None):
        """Try and perform all reservations for pending Requests.

        This walks all pending (:attr:`reserved` equal to ``False``)
        Requests that haven't been reserved from the oldest and locks
        them by batches of ``batch_size``.

        Reservation is attempted for each request, in order, meaning that
        each request will grab as much PhysObj as it can before the next one
        gets processed.

        :param int batch_size:
           number of pending Requests to grab at each iteration
        :param nb_attempts:
           number of attempts (in the face of conflicts) for each batch
        :param retry_delay:
           time to wait before retrying to grab a batch (hoping other
           transactions holding locks would have released them)
        :param query_filter:
           optional function to add filtering to the query used to grab the
           reservations. The caller can use this to implement controlled
           concurrency in the reservation process: several processes can
           focus on different Requests, as long as they don't compete for
           PhysObj to reserve.

        The transaction is committed for each batch, and that's essential
        for proper operation under concurrency.
        """
        skip = 0
        while True:
            # TODO log.info
            count = 1
            while True:
                try:
                    requests = cls.lock_unreserved(batch_size,
                                                   offset=skip,
                                                   query_filter=query_filter)
                except cls.ReservationsLocked:
                    # TODO log.warning
                    if count == nb_attempts:
                        raise
                    time.sleep(retry_delay)
                    count += 1
                else:
                    break
            if not requests:
                break

            for request in requests:
                if not request.reserve():
                    skip += 1
            cls.registry.commit()
Example #8
0
 class Exemple:
     id = Integer(primary_key=True)
     name = Text()
Example #9
0
 class Test:
     id = Integer(primary_key=True)
     val = Integer(nullable=False)
Example #10
0
class RequestItem:

    id = Integer(label="Identifier", primary_key=True)
    """Primary key.

    Note that ``serial`` columns in PostgreSQL don't induce conflicts, as
    the sequence is evaluated out of transaction.
    """

    request = Many2One(model=Wms.Reservation.Request)

    goods_type = Many2One(model='Model.Wms.PhysObj.Type')

    quantity = Integer(nullable=False)

    properties = Jsonb()

    @classmethod
    def define_table_args(cls):
        return super(RequestItem, cls).define_table_args() + (
            CheckConstraint('quantity > 0', name='positive_qty'),
        )

    def lookup(self, quantity):
        """Try and find PhysObj matchin the specified conditions.

        :return: the matching PhysObj that were found and the quantity each
                 accounts for. The PhysObj may not be of the requested type.
                 What matters is how much of the requested quantity
                 each one represents.

        :rtype: list(int, :class:`PhysObj
                   <anyblok_wms_base/bloks/wms_core/goods.PhysObj`>)

        This method is where most business logic should lie.

        This default
        implementation does only equal matching on PhysObj Type and each
        property, and therefore is not able to return other PhysObj Type
        accounting for more than one of the wished.
        Downstream libraries and applications are welcome to override it.
        """
        Wms = self.registry.Wms
        PhysObj = Wms.PhysObj
        Reservation = Wms.Reservation
        Avatar = PhysObj.Avatar
        Props = PhysObj.Properties
        # TODO PERF this returns from the DB one PhysObj line per
        # Avatar, but SQLA reassembles them as exactly one (seen while
        # tracing the test_reserve_avatars_once() under pdb)
        # SELECT DISTINCT ON would be better
        # TODO provide ordering by Avatar state and/or dt_from
        query = PhysObj.query().join(Avatar.goods).outerjoin(
            Reservation, Reservation.physobj_id == PhysObj.id).filter(
                Reservation.physobj_id.is_(None),
                PhysObj.type == self.goods_type,
                Avatar.state.in_(('present', 'future')))
        if self.properties:
            props = self.properties.copy()
            query = query.join(PhysObj.properties)
            pfields = Props.fields_description()
            for p in set(props).intersection(pfields):
                query = query.filter(getattr(Props, p) == props.pop(p))
            if props:
                query = query.filter(Props.flexible.contains(props))
        return [(1, g) for g in query.limit(quantity).all()]

    def reserve(self):
        """Perform the wished reservations.

        :return bool: if the RequestItem is completely reserved.
                      TODO: shall we store it directly in DB ?
        """
        Reservation = self.registry.Wms.Reservation
        already = Reservation.query(func.sum(Reservation.quantity)).filter(
            Reservation.request_item_id == self.id).one()[0]
        if already is None:
            already = 0
        if already >= self.quantity:
            # its legit to be greater, think of reserving 2 packs of 10
            # to use 17. Maybe later, we'll unpack just one of them and update
            # the reservation to add just 7 of the Unpack outcomes.
            return True
        added = 0
        for quantity, goods in self.lookup(self.quantity - already):
            # TODO use a o2m ?
            Reservation.insert(goods=goods, quantity=quantity,
                               request_item=self)
            added += quantity
        return already + added >= self.quantity
Example #11
0
            class Test:

                id = Integer(primary_key=True)
                val = String(default='val')
Example #12
0
 class Test:
     id = Integer(primary_key=True)
     update_at = DateTime(auto_update=True)
     val = String()
Example #13
0
    class Test2:

        id = Integer(primary_key=True)
        test = String(foreign_key=Model.Test.use('name'))
Example #14
0
class Split(SingleInput, Operation):
    """A split of PhysObj record in two.

    Splits replace their input's :class:`PhysObj
    <anyblok_wms_base.quantity.goods.PhysObj>` record with
    two of them, one having the wished :attr:`quantity`, along with
    Avatars at the same location, while
    keeping the same properties and the same total quantity.

    This is therefore destructive for the input's PhysObj, which is not
    conceptually satisfactory, but will be good enough at this stage of
    development.

    While non trivial in the database, they may have no physical counterpart in
    the real world. We call them *formal* in that case.

    Formal Splits are operations of a special kind, that have to be considered
    internal details of ``wms-core``, that are not guaranteed to exist in the
    future.

    Formal Splits can always be reverted with
    :class:`Aggregate <.aggregate.Aggregate>` Operations,
    but only some physical Splits can be reverted, depending on
    the PhysObj Type.

    .. seealso:: :class:`Model.Wms.PhysObj.Type
                 <anyblok_wms_base.quantity.goods.Type>`
                 for a full discussion including use-cases of formal and
                 physical splits and reversal of the latter.

    In the formal case, we've decided to represent this as an Operation for
    the sake of consistency, and especially to avoid too much special cases
    in implementation of various concrete Operations.

    The benefit is that Splits appear explicitely in the history, and this
    helps implementing :ref:`history manipulating methods
    <op_cancel_revert_obliviate>` a lot.

    The drawback is that we get a proliferation of PhysObj records, some of
    them even with a zero second lifespan, but even those could be simplified
    only for executed Splits.

    Splits are typically created and executed from :class:`Splitter Operations
    <.splitter.WmsSplitterOperation>`, and that explains the
    above-mentioned zero lifespans.
    """
    TYPE = 'wms_split'
    """Polymorphic key"""

    id = Integer(label="Identifier",
                 primary_key=True,
                 autoincrement=False,
                 foreign_key=Operation.use('id').options(ondelete='cascade'))

    quantity = Decimal()
    """The quantity to split."""
    def specific_repr(self):
        return ("input={self.input!r}, "
                "quantity={self.quantity}").format(self=self)

    def after_insert(self):
        self.registry.flush()
        avatar = self.input
        goods = avatar.goods
        qty = self.quantity
        new_goods = dict(
            type=goods.type,
            code=goods.code,
            properties=goods.properties,
        )
        new_avatars = dict(
            location=avatar.location,
            reason=self,
            dt_from=self.dt_execution,
            dt_until=avatar.dt_until,
        )
        avatar.dt_until = self.dt_execution
        if self.state == 'done':
            avatar.update(state='past', reason=self)
            new_avatars['state'] = 'present'
        else:
            new_avatars['state'] = 'future'

        return tuple(
            avatar.insert(goods=goods.insert(quantity=new_qty, **new_goods),
                          **new_avatars)
            for new_qty in (qty, goods.quantity - qty))

    @property
    def wished_outcome(self):
        """Return the PhysObj record with the wished quantity.

        This is only one of :attr:`outcomes
        <anyblok_wms_base.core.operation.base.Operation.outcomes>`

        :rtype: :class:`Wms.PhysObj
                <anyblok_wms_base.core.physobj.PhysObj>`
        """
        PhysObj = self.registry.Wms.PhysObj
        Avatar = PhysObj.Avatar
        # in case the split is exactly in half, there's no difference
        # between the two records we created, let's pick any.
        outcome = Avatar.query().join(Avatar.goods).filter(
            Avatar.reason == self, Avatar.state != 'past',
            PhysObj.quantity == self.quantity).first()
        if outcome is None:
            raise OperationError(self, "The split outcomes have disappeared")
        return outcome

    def check_execute_conditions(self):
        """Call the base class's version and check that quantity is suitable.
        """
        super(Split, self).check_execute_conditions()
        goods = self.input.goods
        if self.quantity > goods.quantity:
            raise OperationQuantityError(
                self,
                "Can't execute {op}, whose quantity {op.quantity} is greater "
                "than on its input {goods}, "
                "although it's been successfully planned.",
                op=self,
                goods=self.input)

    def execute_planned(self):
        for outcome in self.outcomes:
            outcome.update(state='present', dt_from=self.dt_execution)
        self.registry.flush()
        self.input.update(state='past',
                          dt_until=self.dt_execution,
                          reason=self)
        self.registry.flush()

    def is_reversible(self):
        """Reversibility depends on the relevant PhysObj Type.

        See :meth:`on Model.PhysObj.Type
        <anyblok_wms_base.core.physobj.Type.is_split_reversible>`
        """
        return self.input.goods.type.is_split_reversible()

    def plan_revert_single(self, dt_execution, follows=()):
        if not follows:
            # reversal of an end-of-chain split
            follows = [self]
        Wms = self.registry.Wms
        Avatars = Wms.PhysObj.Avatar
        # here in that case, that's for multiple operations
        # in_ is not implemented for Many2Ones
        reason_ids = set(f.id for f in follows)
        reason_ids.add(self.id)
        to_aggregate = Avatars.query().filter(
            Avatars.reason_id.in_(reason_ids), Avatars.state != 'past').all()
        return Wms.Operation.Aggregate.create(inputs=to_aggregate,
                                              dt_execution=dt_execution,
                                              state='planned')

    def obliviate_single(self):
        """Remove the created PhysObj in addition to base class operation.

        The base class would only take care of the created Avatars
        """
        outcomes_goods = [o.goods for o in self.outcomes]
        super(Split, self).obliviate_single()
        for goods in outcomes_goods:
            goods.delete()
Example #15
0
class Product:

    id = Integer(primary_key=True)
    code = String(nullable=False, index=True)
    label = String(nullable=False)
Example #16
0
class Apparition(Mixin.WmsInventoryOperation, Operation):
    """Inventory Operation to record unexpected physical objects.

    This is similar to Arrival, but has a distinct functional meaning.
    Apparitions can exist only in the ``done`` :ref:`state <op_states>`.

    Another difference with Arrivals is that Apparitions have a
    :attr:`quantity` field.
    """
    TYPE = 'wms_apparition'

    id = Integer(label="Identifier",
                 primary_key=True,
                 autoincrement=False,
                 foreign_key=Operation.use('id').options(ondelete='cascade'))
    """Primary key."""
    physobj_type = Many2One(model='Model.Wms.PhysObj.Type')
    """Observed :class:`PhysObj Type
    <anyblok_wms_base.core.physobj.Type>`.
    """
    quantity = Integer()
    """The number of identical PhysObj that have appeared.

    Here, identical means "same type, code and properties"
    """
    physobj_properties = Jsonb()
    """Observed :class:`Properties
    <anyblok_wms_base.core.physobj.Properties>`.

    They are copied over to the newly created :class:`PhysObj
    <anyblok_wms_base.core.physobj.PhysObj>`. Then the Properties can evolve on
    the PhysObj, while this Apparition field will keep the exact values
    that were observed during inventory.
    """
    physobj_code = Text()
    """Observed :attr:`PhysObj code
    <anyblok_wms_base.core.physobj.PhysObj.code>`.
    """
    location = Many2One(model='Model.Wms.PhysObj')
    """Location of appeared PhysObj.

    This will be the location of the initial Avatars.
    """

    goods_type = Function(fget='_goods_type_get',
                          fset='_goods_type_set',
                          fexpr='_goods_type_expr')
    """Compatibility wrapper.

    Before version 0.9.0, :attr:`physobj_type` was ``goods_type``.

    This does not extend to compatibility of the former low level
    ``goods_type_id`` column.
    """

    goods_properties = Function(fget='_goods_properties_get',
                                fset='_goods_properties_set',
                                fexpr='_goods_properties_expr')
    """Compatibility wrapper.

    Before version 0.9.0, :attr:`physobj_properties` was ``goods_properties``.
    """

    goods_code = Function(fget='_goods_code_get',
                          fset='_goods_code_set',
                          fexpr='_goods_code_expr')
    """Compatibility wrapper.

    Before version 0.9.0, :attr:`physobj_code` was ``goods_code``.
    """

    inputs_number = 0
    """This Operation is a purely creative one."""
    def specific_repr(self):
        return ("physobj_type={self.physobj_type!r}, "
                "location={self.location!r}").format(self=self)

    def _goods_col_get(self, suffix):
        deprecation_warn_goods_col(self, suffix)
        return getattr(self, 'physobj_' + suffix)

    def _goods_col_set(self, suffix, value):
        deprecation_warn_goods_col(self, suffix)
        setattr(self, 'physobj_' + suffix, value)

    @classmethod
    def _goods_col_expr(cls, suffix):
        deprecation_warn_goods_col(cls, suffix)
        return getattr(cls, 'physobj_' + suffix)

    def _goods_type_get(self):
        return self._goods_col_get('type')

    def _goods_type_set(self, value):
        self._goods_col_set('type', value)

    @classmethod
    def _goods_type_expr(cls):
        return cls._goods_col_expr('type')

    def _goods_properties_get(self):
        return self._goods_col_get('properties')

    def _goods_properties_set(self, value):
        self._goods_col_set('properties', value)

    @classmethod
    def _goods_properties_expr(cls):
        return cls._goods_col_expr('properties')

    def _goods_code_get(self):
        return self._goods_col_get('code')

    def _goods_code_set(self, value):
        self._goods_col_set('code', value)

    @classmethod
    def _goods_code_expr(cls):
        return cls._goods_col_expr('code')

    @classmethod
    def check_create_conditions(cls,
                                state,
                                dt_execution,
                                location=None,
                                **kwargs):
        """Forbid creation with wrong states, check location is a container.

        :raises: :class:`OperationForbiddenState
                 <anyblok_wms_base.exceptions.OperationForbiddenState>`
                 if state is not ``'done'``

                 :class:`OperationContainerExpected
                 <anyblok_wms_base.exceptions.OperationContainerExpected>`
                 if location is not a container.
        """
        if location is None or not location.is_container():
            raise OperationContainerExpected(cls,
                                             "location field value {offender}",
                                             offender=location)

        super().check_create_conditions(state, dt_execution, **kwargs)

    def after_insert(self):
        """Create the PhysObj and their Avatars.

        In the ``wms-core`` implementation, the :attr:`quantity` field
        gives rise to as many PhysObj records.
        """
        PhysObj = self.registry.Wms.PhysObj
        self_props = self.physobj_properties
        if self_props is None:
            props = None
        else:
            props = PhysObj.Properties.create(**self_props)

        for _ in range(self.quantity):
            PhysObj.Avatar.insert(obj=PhysObj.insert(type=self.physobj_type,
                                                     properties=props,
                                                     code=self.physobj_code),
                                  location=self.location,
                                  outcome_of=self,
                                  state='present',
                                  dt_from=self.dt_execution)
Example #17
0
class Product:
    __db_schema__ = "Lens"

    id = Integer(primary_key=True, db_column_name="IDD")
    code = String(nullable=False, index=True, db_column_name="Attr1")
    label = String(nullable=False, db_column_name="Display")
Example #18
0
class Cache:

    last_cache_id = None
    lrus = {}

    id = Integer(primary_key=True)
    registry_name = String(nullable=False)
    method = String(nullable=False)

    @classmethod
    def get_last_id(cls):
        """ Return the last primary key ``id`` value
        """
        res = cls.query('id').order_by(cls.id.desc()).limit(1).first()
        if res:
            return res[0]

        return 0

    @classmethod
    def initialize_model(cls):
        """ Initialize the last_cache_id known
        """
        super(Cache, cls).initialize_model()
        cls.last_cache_id = cls.get_last_id()

    @classmethod
    def invalidate_all(cls):
        res = []
        for registry_name, methods in cls.registry.caches.items():
            for method in methods.keys():
                res.append(dict(registry_name=registry_name, method=method))

        if res:
            cls.multi_insert(*res)

        cls.clear_invalidate_cache()

    @classmethod
    def invalidate(cls, registry_name, method):
        """ Call the invalidation for a specific method cached on a model

        :param registry_name: namespace of the model
        :param method: name of the method on the model
        :exception: CacheException
        """
        caches = cls.registry.caches

        def insert(registry_name=None, method=None):
            if registry_name in caches:
                if method in caches[registry_name]:
                    cls.insert(registry_name=registry_name, method=method)
                else:
                    raise CacheException("Unknown cached method %r" % method)
            else:
                raise CacheException("Unknown cached model %r" % registry_name)

        if isinstance(registry_name, str):
            insert(registry_name=registry_name, method=method)
        elif hasattr(registry_name, '__registry_name__'):
            insert(registry_name=registry_name.__registry_name__,
                   method=method)

        cls.clear_invalidate_cache()

    @classmethod
    def detect_invalidation(cls):
        """ Return True if a new invalidation is found in the table

        :rtype: Boolean
        """
        return cls.last_cache_id < cls.get_last_id()

    @classmethod
    def get_invalidation(cls):
        """ Return the pointer of the method to invalidate
        """
        res = []
        if cls.detect_invalidation():
            caches = cls.registry.caches
            for i in cls.query().filter(cls.id > cls.last_cache_id).all():
                res.extend(caches[i.registry_name][i.method])

            cls.last_cache_id = cls.get_last_id()

        return res

    @classmethod
    def clear_invalidate_cache(cls):
        """ Invalidate the cache that needs to be invalidated
        """
        for cache in cls.get_invalidation():
            cache.cache_clear()
Example #19
0
class Family:
    """Product.Family class
    """
    FAMILY_CODE = None
    family_schema = None
    template_schema = None
    item_schema = None

    id = Integer(label="Identifier", primary_key=True)
    create_date = DateTime(default=datetime.now, nullable=False)
    edit_date = DateTime(default=datetime.now,
                         nullable=False,
                         auto_update=True)
    code = String(label="Family code", unique=True, nullable=False)
    name = String(label="Family name", size=256)
    description = Text(label="Family description")
    properties = Jsonb(label="Family properties")

    family_code = Selection(selections='get_family_codes')
    items = Function(fget="fget_items")

    @classmethod
    def get_family_codes(cls):
        return dict()

    def fget_items(self):
        """Returns a list of products instance from this family
        """
        return self.registry.InstrumentedList(
            set([i for t in self.templates for i in t.items]))

    @classmethod
    def create(cls, **kwargs):
        data = kwargs.copy()
        if cls.family_schema:
            sch = cls.family_schema(registry=cls.registry)
            data = sch.load(kwargs)
        return cls.insert(**data)

    @classmethod
    def query(cls, *args, **kwargs):
        query = super(Family, cls).query(*args, **kwargs)
        if cls.__registry_name__ != 'Model.Product.Family':
            query = query.filter(cls.family_code == cls.FAMILY_CODE)

        return query

    @classmethod
    def define_mapper_args(cls):
        mapper_args = super(Family, cls).define_mapper_args()
        if cls.__registry_name__ == 'Model.Product.Family':
            mapper_args.update({'polymorphic_on': cls.family_code})

        mapper_args.update({'polymorphic_identity': cls.FAMILY_CODE})
        return mapper_args

    def __str__(self):
        return "%s : %s" % (self.code, self.name)

    def __repr__(self):
        return "<Product.Family(code=%s, name=%s)>" % (self.code, self.name)
Example #20
0
    class Test:

        id = Integer(primary_key=True)
        field = OneField()
Example #21
0
 class Exemple:
     id = Integer(primary_key=True)
     number = Integer()
Example #22
0
    class Test:

        id = Integer(primary_key=True)
        properties = Json()
        name = JsonRelated(json_column='properties', keys=['name'])
Example #23
0
 class Test2:
     id = Integer(primary_key=True)
     test = Many2One(model=Declarations.Model.Test)
Example #24
0
 class Test:
     id = Integer(primary_key=True)
     id2 = Integer()
     select = Selection(selections=[('key', 'value'), ('key2', 'value2')],
                        default='key')
Example #25
0
class Menu:
    id = Integer(primary_key=True)
    parent_id = Integer(foreign_key='Model.FuretUI.Menu=>id')
    order = Integer(nullable=False, default=100)
    icon_code = String()
    icon_type = String()
    menu_type = Selection(selections={
        'Model.FuretUI.Menu.Root': 'Root',
        'Model.FuretUI.Menu.Node': 'Node',
        'Model.FuretUI.Menu.Resource': 'Resource',
        'Model.FuretUI.Menu.Url': 'Url',
        'Model.FuretUI.Menu.Call': 'Call',
    },
                          nullable=False)

    @classmethod
    def define_mapper_args(cls):
        mapper_args = super(Menu, cls).define_mapper_args()
        if cls.__registry_name__ == 'Model.FuretUI.Menu':
            mapper_args.update({'polymorphic_on': cls.menu_type})
            mapper_args.update({'polymorphic_identity': None})
        else:
            mapper_args.update({'polymorphic_identity': cls.__registry_name__})

        return mapper_args

    def check_acl(self):
        return True

    def to_dict(self, *a, **kw):
        res = super().to_dict(*a, **kw)
        if 'label' in res and res['label']:
            mapping = self.anyblok.IO.Mapping.get_from_entry(self)
            if mapping:
                lang = self.context.get('lang', 'en')
                res['label'] = Translation.get(lang, f'menu:{mapping.key}',
                                               res['label'])

        return res

    @classmethod
    def rec_get_children_menus(cls, children, resource=None):
        res = []
        for child in children:
            if child.check_acl():
                children = []
                definition = child.to_dict('id', 'order', 'label', 'icon_code',
                                           'icon_type')

                if child.menu_type == 'Model.FuretUI.Menu.Node':
                    children = cls.rec_get_children_menus(child.children,
                                                          resource=resource)
                elif child.menu_type == 'Model.FuretUI.Menu.Resource':
                    definition['resource'] = child.resource.id
                    definition.update(
                        child.to_dict('tags', 'order_by', 'filters'))
                elif child.menu_type == 'Model.FuretUI.Menu.Url':
                    definition.update(child.to_dict('url'))
                elif child.menu_type == 'Model.FuretUI.Menu.Call':
                    definition['resource'] = resource.id if resource else None

                    definition.update(child.to_dict('model', 'method'))

                res.append({'children': children, **definition})

        return res

    @classmethod
    def get_menus_from(cls, space=None, resource=None):
        menus = []
        Menu = cls.anyblok.FuretUI.Menu
        MRo = cls.anyblok.FuretUI.Menu.Root
        mros = MRo.query()

        if space is not None:
            mros = mros.filter(MRo.space == space)
        elif resource is not None:
            mros = mros.filter(MRo.resource == resource)

        mros = mros.order_by(MRo.order.asc())
        for mro in mros:
            mres = Menu.query().filter(Menu.parent_id == mro.id)
            mres = mres.order_by(Menu.order.asc()).order_by(Menu.id.asc())
            mres = mres.all()
            if not mres:
                continue

            mres = cls.rec_get_children_menus(mro.children, resource=resource)

            if not mres:
                continue

            if mro.label:
                menus.append({
                    'children':
                    mres,
                    **mro.to_dict('id', 'order', 'label', 'icon_code', 'icon_type')
                })
            else:
                menus.extend(mres)

        return menus

    def delete(self, *a, **kw):
        menu_id = self.id
        super().delete(*a, **kw)
        if self.__registry_name__ != 'Model.FuretUI.Menu':
            query = f"delete from furetui_menu where id={menu_id};"
            self.execute_sql_statement(text(query))
Example #26
0
 class Test2:
     id = Integer(primary_key=True)
     name = String()
     test = One2One(model=Model.Test, backref='test2')
Example #27
0
 class Item:
     id = Integer(primary_key=True,
                  db_column_name='ProductDetailId')
     template_code = String(db_column_name='ProductId',
                            foreign_key=Model.Template.use('code'))