コード例 #1
0
 def formula_rules(self):
     """ execute un-pruned formulae, in dependency order """
     self.log_engine("formula_rules")
     formula_rules = rule_bank_withdraw.rules_of_class(self, Formula)
     formula_rules.sort(key=lambda formula: formula._exec_order)
     for each_formula in formula_rules:
         if not self.is_formula_pruned(each_formula):
             each_formula.execute(self)
コード例 #2
0
 def early_row_events(self):
     self.log_engine("early_events")
     early_row_events = rule_bank_withdraw.generic_rules_of_class(
         EarlyRowEvent)
     for each_row_event in early_row_events:
         each_row_event.execute(self)
     early_row_events = rule_bank_withdraw.rules_of_class(
         self, EarlyRowEvent)
     for each_row_event in early_row_events:
         each_row_event.execute(self)
コード例 #3
0
ファイル: logic_row.py プロジェクト: valhuber/LogicBank
    def check_parents_on_update(self):
        """ per ParentCheck rule, verify parents exist.

        If disabled, ignore (with warning).
        """

        list_ref_integ_rules = rule_bank_withdraw.rules_of_class(
            self, ParentCheck)
        if list_ref_integ_rules:
            ref_integ_rule = list_ref_integ_rules[0]
            if ref_integ_rule._enable:
                child_mapper = object_mapper(self.row)
                my_relationships = child_mapper.relationships
                for each_relationship in my_relationships:  # eg, order has parents cust & emp, child orderdetail
                    if each_relationship.direction == sqlalchemy.orm.interfaces.MANYTOONE:  # cust, emp
                        parent_role_name = each_relationship.key  # eg, OrderList
                        if not self.is_foreign_key_null(each_relationship):
                            # continue
                            reason = "Cascading PK change to: " + \
                                     each_relationship.key + "->" + \
                                     each_relationship.back_populates
                            if self.reason == reason:
                                """
                                The parent doing the cascade obviously exists,
                                and note: try to getattr it will fail
                                (FIXME design review - perhaps SQLAlchemy is not checking cache?)
                                """
                                pass
                            else:
                                self.get_parent_logic_row(
                                    parent_role_name)  # sets the accessor
                                does_parent_exist = getattr(
                                    self.row, parent_role_name)
                                if does_parent_exist is None and ref_integ_rule._enable == True:
                                    msg = "Missing Parent: " + parent_role_name
                                    self.log(msg)
                                    ll = RuleBank()
                                    if ll.constraint_event:
                                        ll.constraint_event(message=msg,
                                                            logic_row=self,
                                                            constraint=None)
                                    raise ConstraintException(msg)
                                else:
                                    self.log("Warning: Missing Parent: " +
                                             parent_role_name)
                                    pass  # if you don't care, I don't care
        return self
コード例 #4
0
ファイル: logic_row.py プロジェクト: valhuber/LogicBank
    def load_parents_on_insert(self):
        """ sqlalchemy lazy does not work for inserts... do it here because...
        1. RI would require the sql anyway
        2. Provide a consistent model - your parents are always there for you
            - eg, see add_order event rule - references {sales_rep.Manager.FirstName}
        """

        ref_integ_enabled = True
        list_ref_integ_rules = rule_bank_withdraw.rules_of_class(
            self, ParentCheck)
        if list_ref_integ_rules:
            ref_integ_rule = list_ref_integ_rules[0]

        child_mapper = object_mapper(self.row)
        my_relationships = child_mapper.relationships
        for each_relationship in my_relationships:  # eg, order has parents cust & emp, child orderdetail
            if each_relationship.direction == sqlalchemy.orm.interfaces.MANYTOONE:  # cust, emp
                parent_role_name = each_relationship.key  # eg, OrderList
                if self.is_foreign_key_null(each_relationship) is False:
                    # continue - foreign key not null - parent *should* exist
                    self.get_parent_logic_row(
                        parent_role_name)  # sets the accessor
                    does_parent_exist = getattr(self.row, parent_role_name)
                    if does_parent_exist:
                        pass  # yes, parent exists... it's all fine
                    elif ref_integ_enabled:
                        msg = "Missing Parent: " + parent_role_name
                        self.log(msg)
                        ll = RuleBank()
                        if ll.constraint_event:
                            ll.constraint_event(message=msg,
                                                logic_row=self,
                                                constraint=None)
                        raise ConstraintException(msg)
                    else:
                        self.log("Warning: Missing Parent: " +
                                 parent_role_name)
                        pass  # if you don't care, I don't care
        return self
コード例 #5
0
ファイル: logic_row.py プロジェクト: valhuber/LogicBank
    def get_derived_attributes(self) -> List[InstrumentedAttribute]:
        """
            returns a list of derived attributes

            Example:
                def handle_all(logic_row: LogicRow):
                    row = logic_row.row
                    if logic_row.ins_upd_dlt == "ins" and hasattr(row, "CreatedOn"):
                        row.CreatedOn = datetime.datetime.now()
                        logic_row.log("early_row_event_all_classes - handle_all sets 'Created_on"'')

                    if logic_row.nest_level == 0:  # client updates should not alter derivations
                        derived_attributes = logic_row.get_derived_attributes()
                        if logic_row.are_attributes_changed(derived_attributes):
                            # NOTE: this does not trigger constraint_event registered in activate
                            raise ConstraintException("One or more derived attributes are changed")
        """
        result_derived_attrs = []
        derivations = rule_bank_withdraw.rules_of_class(self, Derivation)
        for each_derivation in derivations:
            result_derived_attrs.append(each_derivation._derive)
        return result_derived_attrs
コード例 #6
0
ファイル: logic_row.py プロジェクト: valhuber/LogicBank
    def parent_cascade_pk_change(self):  # ???
        """
        cascade pk change (if any) to children, unconditionally.

        Presumption: children ref the same pKey (vs. some other "candidate key")
        """
        if self.is_primary_key_changed():
            list_parent_cascade_rules = rule_bank_withdraw.rules_of_class(
                self, ParentCascade)
            defined_relns = {}
            for each_parent_cascade_rule in list_parent_cascade_rules:
                defined_relns[each_parent_cascade_rule.
                              _relationship] = each_parent_cascade_rule
            parent_mapper = object_mapper(self.row)
            my_relationships = parent_mapper.relationships
            for each_relationship in my_relationships:  # eg, order has parents cust & emp, child orderdetail
                if each_relationship.direction == sqlalchemy.orm.interfaces.ONETOMANY:  # cust, emp
                    reason = "Cascading PK change to: " +\
                             each_relationship.backref + "->" +\
                             each_relationship.key
                    child_rows = self.get_old_child_rows(
                        relationship=each_relationship)
                    for each_child_row in child_rows:
                        old_child = self.make_copy(each_child_row)
                        each_child_logic_row = LogicRow(row=each_child_row,
                                                        old_row=old_child,
                                                        ins_upd_dlt="upd",
                                                        nest_level=1 +
                                                        self.nest_level,
                                                        a_session=self.session,
                                                        row_sets=self.row_sets)
                        for p, c in each_relationship.local_remote_pairs:
                            setattr(each_child_row, c.name,
                                    getattr(self.row, p.name))
                        each_child_logic_row.update(reason=reason)

        return self
コード例 #7
0
def before_flush(a_session: session, a_flush_context, an_instances):
    """
    Logic Execution processes LogicRows: row and old_row

    Note old_row is critical for:
        * user logic (did the value change?  by how much?)
        * performance / pruning (skip rules iff no dependent values change)
        * performance / optimization (1 row adjustments, not expensive select sum/count)
    """

    """
    Logic Phase
    """
    logic_bank.logic_logger.debug("Logic Phase:\t\tROW LOGIC (sqlalchemy before_flush)\t\t\t")
    # print("\n***************** sqlalchemy calls logic_bank\n")

    row_sets = RowSets()  # type : RowSet
    for each_instance in a_session.dirty:
        row_sets.add_submitted(each_instance)

    bug_explore = None  # None to disable, [None, None] to enable
    if bug_explore is not None:  # temp hack - order rows to explore bug (upd_order_reuse)
        temp_debug(a_session, bug_explore, row_sets)
    else:
        for each_instance in a_session.dirty:
            table_name = each_instance.__tablename__
            old_row = get_old_row(each_instance)
            logic_row = LogicRow(row=each_instance, old_row=old_row, ins_upd_dlt="upd",
                                 nest_level=0, a_session=a_session, row_sets=row_sets)
            logic_row.update(reason="client")

    for each_instance in a_session.new:
        table_name = each_instance.__tablename__
        logic_row = LogicRow(row=each_instance, old_row=None, ins_upd_dlt="ins",
                             nest_level=0, a_session=a_session, row_sets=row_sets)
        logic_row.insert(reason="client")

    for each_instance in a_session.deleted:
        table_name = each_instance.__tablename__
        logic_row = LogicRow(row=each_instance, old_row=None, ins_upd_dlt="dlt",
                             nest_level=0, a_session=a_session, row_sets=row_sets)
        logic_row.delete(reason="client")


    """
    Commit Logic Phase
    """
    logic_bank.logic_logger.debug("Logic Phase:\t\tCOMMIT   \t\t\t\t\t\t\t\t\t")
    processed_rows = dict.copy(row_sets.processed_rows)
    for each_logic_row_key in processed_rows:
        each_logic_row = processed_rows[each_logic_row_key]
        logic_bank.engine_logger.debug("visit: " + each_logic_row.__str__())
        commit_row_events = rule_bank_withdraw.rules_of_class(each_logic_row, CommitRowEvent)
        for each_row_event in commit_row_events:
            each_logic_row.log("Commit Event")
            each_row_event.execute(each_logic_row)

    """
    Proceed with sqlalchemy flush processing
    """
    logic_bank.logic_logger.debug("Logic Phase:\t\tFLUSH   (sqlalchemy flush processing       \t")
コード例 #8
0
 def constraints(self):
     """ execute constraints (throw error if one fails) """
     # self.log("constraints")
     constraint_rules = rule_bank_withdraw.rules_of_class(self, Constraint)
     for each_constraint in constraint_rules:
         each_constraint.execute(self)
コード例 #9
0
ファイル: logic_row.py プロジェクト: valhuber/LogicBank
    def cascade_delete_children(self):
        """
        This recursive descent is required to adjust dependent sums/counts on passive_deletes; ie,

        when (and only when) the DBMS - and *not* SQLAlchemy - does the deletes.

        (When SQLAlchemy does deletes, these are queued through the normal delete logic.)
        @see nw/tests/test_dlt_order.py
        """

        parent_mapper = object_mapper(self.row)
        my_relationships = parent_mapper.relationships
        for each_relationship in my_relationships:  # eg, cust has child OrderDetail
            if each_relationship.direction == sqlalchemy.orm.interfaces.ONETOMANY:  # eg, OrderDetail
                child_role_name = each_relationship.key  # eg, OrderList
                if each_relationship.cascade.delete and each_relationship.passive_deletes:
                    child_rows = getattr(self.row, child_role_name)
                    for each_child_row in child_rows:
                        old_child = self.make_copy(each_child_row)
                        each_child_logic_row = LogicRow(row=each_child_row,
                                                        old_row=old_child,
                                                        ins_upd_dlt="dlt",
                                                        nest_level=1 +
                                                        self.nest_level,
                                                        a_session=self.session,
                                                        row_sets=self.row_sets)
                        each_child_logic_row.delete(
                            reason="Cascade Delete to run rules on - " +
                            child_role_name)
                        self.session.delete(
                            each_child_row
                        )  # deletes in beforeFlush are not re-queued
        enforce_cascade = False
        if enforce_cascade:  # disabled - SQLAlchemy DOES enforce cascade delete/nullify; prevent way less important
            """
            per parent_cascade rule(s), nullify (child FKs), delete (children), prevent (if children exist)

            Default is ParentCascadeAction.PREVENT.

            This recursive descent is required to adjust dependent sums/counts.
            """
            list_parent_cascade_rules = rule_bank_withdraw.rules_of_class(
                self, ParentCascade)
            defined_relns = {}
            for each_parent_cascade_rule in list_parent_cascade_rules:
                defined_relns[each_parent_cascade_rule.
                              _relationship] = each_parent_cascade_rule
            for each_relationship in my_relationships:  # eg, Order has child OrderDetail
                if each_relationship.direction == sqlalchemy.orm.interfaces.ONETOMANY:  # eg, OrderDetail
                    each_child_role_name = each_relationship.key  # eg, OrderDetailList
                    refinteg_action = ParentCascadeAction.PREVENT
                    if each_child_role_name in defined_relns:
                        refinteg_action = defined_relns[
                            each_child_role_name]._action
                    child_rows = getattr(self.row, each_child_role_name)
                    for each_child_row in child_rows:
                        old_child = self.make_copy(each_child_row)
                        each_child_logic_row = LogicRow(row=each_child_row,
                                                        old_row=old_child,
                                                        ins_upd_dlt="dlt",
                                                        nest_level=1 +
                                                        self.nest_level,
                                                        a_session=self.session,
                                                        row_sets=self.row_sets)

                        if refinteg_action == ParentCascadeAction.DELETE:  # each_relationship.cascade.delete:
                            each_child_logic_row.delete(
                                reason="Cascade Delete - " +
                                each_child_role_name)

                        elif refinteg_action == ParentCascadeAction.NULLIFY:
                            for p, c in each_relationship.local_remote_pairs:
                                setattr(each_child_row, c.name, None)
                            each_child_logic_row.update(
                                reason="Cascade Nullify - " +
                                each_child_role_name)

                        elif refinteg_action == ParentCascadeAction.PREVENT:
                            msg = "Delete rejected - " + each_child_role_name + " has rows"
                            ll = RuleBank()
                            if ll.constraint_event:
                                ll.constraint_event(message=msg,
                                                    logic_row=self,
                                                    constraint=None)
                            raise ConstraintException(msg)
                        else:
                            raise Exception("Invalid parent_cascade action: " +
                                            refinteg_action)
コード例 #10
0
ファイル: logic_row.py プロジェクト: valhuber/LogicBank
 def row_events(self):
     self.log_engine("row_events")
     row_events = rule_bank_withdraw.rules_of_class(self, RowEvent)
     for each_row_event in row_events:
         each_row_event.execute(self)
コード例 #11
0
def before_flush(a_session: session, a_flush_context, an_instances):
    """
    Logic Execution processes LogicRows: row and old_row

    Note old_row is critical for:
        * user logic (did the value change?  by how much?)
        * performance / pruning (skip rules iff no dependent values change)
        * performance / optimization (1 row adjustments, not expensive select sum/count)
    """

    """
    Logic Phase
    """
    logic_bank.logic_logger.info(f'Logic Phase:\t\tROW LOGIC(session={str(hex(id(a_session)))}) (sqlalchemy before_flush)\t\t\t')

    row_sets = RowSets()  # type : RowSet
    client_inserts = []

    for each_instance in a_session.dirty:
        row_sets.add_submitted(each_instance)

    for each_instance in a_session.new:
        row_sets.add_submitted(each_instance)
        """ inserts first...
            SQLAlchemy queues these on a_session.new (but *not* updates!)
            so, process the client changes, so that triggered inserts (eg. audit) aren't run twice
        """
        client_inserts.append(each_instance)

    bug_explore = None  # None to disable, [None, None] to enable
    if bug_explore is not None:  # temp hack - order rows to explore bug (upd_order_reuse)
        temp_debug(a_session, bug_explore, row_sets)
    else:
        for each_instance in a_session.dirty:
            old_row = get_old_row(each_instance, a_session)
            logic_row = LogicRow(row=each_instance, old_row=old_row, ins_upd_dlt="upd",
                                 nest_level=0, a_session=a_session, row_sets=row_sets)
            logic_row.update(reason="client")

    for each_instance in client_inserts:  # a_session.new:
        logic_row = LogicRow(row=each_instance, old_row=None, ins_upd_dlt="ins",
                             nest_level=0, a_session=a_session, row_sets=row_sets)
        logic_row.insert(reason="client")

    # if len(a_session.deleted) > 0:
        # print("deleting")
    for each_instance in a_session.deleted:
        logic_row = LogicRow(row=each_instance, old_row=None, ins_upd_dlt="dlt",
                             nest_level=0, a_session=a_session, row_sets=row_sets)
        logic_row.delete(reason="client")


    """
    Commit Logic Phase
    """
    logic_bank.logic_logger.info(f'Logic Phase:\t\tCOMMIT(session={str(hex(id(a_session)))})   \t\t\t\t\t\t\t\t\t\t')
    processed_rows = dict.copy(row_sets.processed_rows)  # set in LogicRow ctor
    for each_logic_row_key in processed_rows:
        each_logic_row = processed_rows[each_logic_row_key]
        logic_bank.engine_logger.debug("visit: " + each_logic_row.__str__())
        commit_row_events = rule_bank_withdraw.rules_of_class(each_logic_row, CommitRowEvent)
        for each_row_event in commit_row_events:
            each_logic_row.log("Commit Event")
            each_row_event.execute(each_logic_row)

    """
    Proceed with sqlalchemy flush processing
    """
    logic_bank.logic_logger.info(f'Logic Phase:\t\tFLUSH(session={str(hex(id(a_session)))})   (sqlalchemy flush processing)       \t')