Example #1
0
    def init_class_attribute(self, mapper):
        self.is_class_level = True

        strategies._register_attribute(self,
            mapper,
            useobject=True,
            impl_class=DynamicAttributeImpl,
            target_mapper=self.parent_property.mapper,
            order_by=self.parent_property.order_by,
            query_class=self.parent_property.query_class
        )

    def create_row_processor(self, selectcontext, path, mapper, row, adapter):
        return (None, None)

log.class_logger(DynaLoader)

class DynamicAttributeImpl(attributes.AttributeImpl):
    uses_objects = True
    accepts_scalar_loader = False

    def __init__(self, class_, key, typecallable,
                     target_mapper, order_by, query_class=None, **kwargs):
        super(DynamicAttributeImpl, self).__init__(class_, key, typecallable, **kwargs)
        self.target_mapper = target_mapper
        self.order_by = order_by
        if not query_class:
            self.query_class = AppenderQuery
        elif AppenderMixin in query_class.mro():
            self.query_class = query_class
        else:
Example #2
0
                col = adapter.columns[col]
            if col is not None and col in row:

                def new_execute(state, dict_, row):
                    dict_[key] = row[col]

                return new_execute, None
        else:

            def new_execute(state, dict_, row):
                state.expire_attribute_pre_commit(dict_, key)

            return new_execute, None


log.class_logger(ColumnLoader)


class CompositeColumnLoader(ColumnLoader):
    """Strategize the loading of a composite column-based MapperProperty."""

    def init_class_attribute(self, mapper):
        self.is_class_level = True
        self.logger.info("%s register managed composite attribute", self)

        def copy(obj):
            if obj is None:
                return None
            return self.parent_property.composite_class(*obj.__composite_values__())

        def compare(a, b):
Example #3
0
        for item, cycles in nodes:
            task = self.get_task_by_mapper(item)
            if cycles:
                for t in task._sort_circular_dependencies(self, [self.get_task_by_mapper(i) for i in cycles]):
                    ret.append(t)
            else:
                ret.append(task)

        if self._should_log_debug:
            self.logger.debug("Dependent tuples:\n" + "\n".join(
                    "(%s->%s)" % (d[0].class_.__name__, d[1].class_.__name__)
                    for d in self.dependencies))
            self.logger.debug("Dependency sort:\n"+ str(ret))
        return ret

log.class_logger(UOWTransaction)

class UOWTask(object):
    """A collection of mapped states corresponding to a particular mapper."""

    def __init__(self, uowtransaction, mapper, base_task=None):
        self.uowtransaction = uowtransaction

        # base_task is the UOWTask which represents the "base mapper"
        # in our mapper's inheritance chain.  if the mapper does not
        # inherit from any other mapper, the base_task is self.
        # the _inheriting_tasks dictionary is a dictionary present only
        # on the "base_task"-holding UOWTask, which maps all mappers within
        # an inheritance hierarchy to their corresponding UOWTask instances.
        if base_task is None:
            self.base_task = self
Example #4
0
            if cycles:
                for t in task._sort_circular_dependencies(
                        self, [self.get_task_by_mapper(i) for i in cycles]):
                    ret.append(t)
            else:
                ret.append(task)

        if self._should_log_debug:
            self.logger.debug("Dependent tuples:\n" + "\n".join(
                "(%s->%s)" % (d[0].class_.__name__, d[1].class_.__name__)
                for d in self.dependencies))
            self.logger.debug("Dependency sort:\n" + str(ret))
        return ret


log.class_logger(UOWTransaction)


class UOWTask(object):
    """A collection of mapped states corresponding to a particular mapper."""
    def __init__(self, uowtransaction, mapper, base_task=None):
        self.uowtransaction = uowtransaction

        # base_task is the UOWTask which represents the "base mapper"
        # in our mapper's inheritance chain.  if the mapper does not
        # inherit from any other mapper, the base_task is self.
        # the _inheriting_tasks dictionary is a dictionary present only
        # on the "base_task"-holding UOWTask, which maps all mappers within
        # an inheritance hierarchy to their corresponding UOWTask instances.
        if base_task is None:
            self.base_task = self
Example #5
0
    def init_class_attribute(self, mapper):
        self.is_class_level = True

        strategies._register_attribute(self,
            mapper,
            useobject=True,
            impl_class=DynamicAttributeImpl, 
            target_mapper=self.parent_property.mapper, 
            order_by=self.parent_property.order_by, 
            query_class=self.parent_property.query_class
        )

    def create_row_processor(self, selectcontext, path, mapper, row, adapter):
        return (None, None)

log.class_logger(DynaLoader)

class DynamicAttributeImpl(attributes.AttributeImpl):
    uses_objects = True
    accepts_scalar_loader = False

    def __init__(self, class_, key, typecallable, 
                     target_mapper, order_by, query_class=None, **kwargs):
        super(DynamicAttributeImpl, self).__init__(class_, key, typecallable, **kwargs)
        self.target_mapper = target_mapper
        self.order_by = order_by
        if not query_class:
            self.query_class = AppenderQuery
        else:
            self.query_class = mixin_user_query(query_class)
Example #6
0
            result_list = util.UniqueAppender(collection,
                                              'append_without_event')
            context.attributes[(state, key)] = result_list
            for nested_row in row[our_col]:
                _instance(nested_row, result_list)

        def load_collection_from_nested_exec(state, dict_, row):
            for nested_row in row[our_col]:
                _instance(nested_row, None)

        return load_collection_from_nested_new_row, \
                None, \
                None, load_collection_from_nested_exec

    def _create_scalar_loader(self, context, key, our_col, _instance):
        def load_scalar_from_nested_new_row(state, dict_, row):
            nested_row = row[our_col].first()
            dict_[key] = _instance(nested_row, None)

        def load_scalar_from_nested_exec(state, dict_, row):
            nested_row = row[our_col].first()
            _instance(nested_row, None)

        return load_scalar_from_nested_new_row, \
                None, \
                None, load_scalar_from_nested_exec

log.class_logger(NestedLoader)

_factory["akiban_nested"] = NestedLoader
Example #7
0
        Cached per-connection. This value can not change without a server
        restart.
        """

        return 0

    def _detect_collations(self, connection):
        """Pull the active COLLATIONS list from the server.

        Cached per-connection.
        """

        collations = {}
        charset = self._connection_charset
        rs = connection.execute(
            'SELECT CHARACTER_SET_NAME, COLLATION_NAME FROM'
            ' data_dictionary.COLLATIONS')
        for row in self._compat_fetchall(rs, charset):
            collations[row[0]] = row[1]
        return collations

    def _detect_ansiquotes(self, connection):
        """Detect and adjust for the ANSI_QUOTES sql mode."""

        self._server_ansiquotes = False
        self._backslash_escapes = False


log.class_logger(DrizzleDialect)
Example #8
0
        Cached per-connection. This value can not change without a server
        restart.
        """

        return 0

    def _detect_collations(self, connection):
        """Pull the active COLLATIONS list from the server.

        Cached per-connection.
        """

        collations = {}
        charset = self._connection_charset
        rs = connection.execute(
            'SELECT CHARACTER_SET_NAME, COLLATION_NAME FROM'
            ' data_dictionary.COLLATIONS')
        for row in self._compat_fetchall(rs, charset):
            collations[row[0]] = row[1]
        return collations

    def _detect_ansiquotes(self, connection):
        """Detect and adjust for the ANSI_QUOTES sql mode."""

        self._server_ansiquotes = False
        self._backslash_escapes = False


log.class_logger(DrizzleDialect)
Example #9
0
                    "%s returning active column fetcher" % self,
                    lambda state, row, **flags: "%s populating %s" %
                    (self, mapperutil.state_attribute_str(state, key)))
            return (new_execute, None)
        else:

            def new_execute(state, row, isnew, **flags):
                if isnew:
                    state.expire_attributes([key])

            if self._should_log_debug:
                self.logger.debug("%s deferring load" % self)
            return (new_execute, None)


log.class_logger(ColumnLoader)


class CompositeColumnLoader(ColumnLoader):
    """Strategize the loading of a composite column-based MapperProperty."""
    def init_class_attribute(self, mapper):
        self.is_class_level = True
        self.logger.info("%s register managed composite attribute" % self)

        def copy(obj):
            if obj is None:
                return None
            return self.parent_property.composite_class(
                *obj.__composite_values__())

        def compare(a, b):
Example #10
0
                col = adapter.columns[col]
            if col is not None and col in row:

                def new_execute(state, dict_, row):
                    dict_[key] = row[col]

                return new_execute, None, None
        else:

            def new_execute(state, dict_, row):
                state.expire_attribute_pre_commit(dict_, key)

            return new_execute, None, None


log.class_logger(ColumnLoader)


class DeferredColumnLoader(LoaderStrategy):
    """Provide loading behavior for a deferred :class:`.ColumnProperty`."""
    def create_row_processor(self, context, path, reduced_path, mapper, row,
                             adapter):
        col = self.columns[0]
        if adapter:
            col = adapter.columns[col]

        key = self.key
        if col in row:
            return self.parent_property._get_strategy(ColumnLoader).\
                        create_row_processor(
                                context, path, reduced_path, mapper, row, adapter)
Example #11
0
                return self.prop.columns[0]._annotate({"parententity": self.mapper})
                
        def operate(self, op, *other, **kwargs):
            return op(self.__clause_element__(), *other, **kwargs)

        def reverse_operate(self, op, other, **kwargs):
            col = self.__clause_element__()
            return op(col._bind_param(other), col, **kwargs)
    
    # TODO: legacy..do we need this ? (0.5)
    ColumnComparator = Comparator
    
    def __str__(self):
        return str(self.parent.class_.__name__) + "." + self.key

log.class_logger(ColumnProperty)

class CompositeProperty(ColumnProperty):
    """subclasses ColumnProperty to provide composite type support."""
    
    def __init__(self, class_, *columns, **kwargs):
        if 'comparator' in kwargs:
            util.warn_deprecated("The 'comparator' argument to CompositeProperty is deprecated.  Use comparator_factory.")
            kwargs['comparator_factory'] = kwargs['comparator']
        super(CompositeProperty, self).__init__(*columns, **kwargs)
        self._col_position_map = util.column_dict((c, i) for i, c in enumerate(columns))
        self.composite_class = class_
        self.strategy_class = strategies.CompositeColumnLoader

    def copy(self):
        return CompositeProperty(deferred=self.deferred, group=self.group, composite_class=self.composite_class, *self.columns)
Example #12
0
                return self.prop.columns[0]._annotate({"parententity": self.mapper})

        def operate(self, op, *other, **kwargs):
            return op(self.__clause_element__(), *other, **kwargs)

        def reverse_operate(self, op, other, **kwargs):
            col = self.__clause_element__()
            return op(col._bind_param(other), col, **kwargs)

    ColumnComparator = Comparator

    def __str__(self):
        return str(self.parent.class_.__name__) + "." + self.key


log.class_logger(ColumnProperty)


class CompositeProperty(ColumnProperty):
    """subclasses ColumnProperty to provide composite type support."""

    def __init__(self, class_, *columns, **kwargs):
        if "comparator" in kwargs:
            util.warn_deprecated(
                "The 'comparator' argument to CompositeProperty is deprecated.  Use comparator_factory."
            )
            kwargs["comparator_factory"] = kwargs["comparator"]
        super(CompositeProperty, self).__init__(*columns, **kwargs)
        self._col_position_map = util.column_dict((c, i) for i, c in enumerate(columns))
        self.composite_class = class_
        self.strategy_class = strategies.CompositeColumnLoader
Example #13
0
                                              'append_without_event')
            context.attributes[(state, key)] = result_list
            for nested_row in row[our_col]:
                _instance(nested_row, result_list)

        def load_collection_from_nested_exec(state, dict_, row):
            for nested_row in row[our_col]:
                _instance(nested_row, None)

        return load_collection_from_nested_new_row, \
                None, \
                None, load_collection_from_nested_exec

    def _create_scalar_loader(self, context, key, our_col, _instance):
        def load_scalar_from_nested_new_row(state, dict_, row):
            nested_row = row[our_col].first()
            dict_[key] = _instance(nested_row, None)

        def load_scalar_from_nested_exec(state, dict_, row):
            nested_row = row[our_col].first()
            _instance(nested_row, None)

        return load_scalar_from_nested_new_row, \
                None, \
                None, load_scalar_from_nested_exec


log.class_logger(NestedLoader)

_factory["akiban_nested"] = NestedLoader
Example #14
0
        key = self.key
        # look through list of columns represented here
        # to see which, if any, is present in the row.
        for col in self.columns:
            if adapter:
                col = adapter.columns[col]
            if col is not None and col in row:
                def new_execute(state, dict_, row):
                    dict_[key] = row[col]
                return new_execute, None, None
        else:
            def new_execute(state, dict_, row):
                state.expire_attribute_pre_commit(dict_, key)
            return new_execute, None, None

log.class_logger(ColumnLoader)

class DeferredColumnLoader(LoaderStrategy):
    """Provide loading behavior for a deferred :class:`.ColumnProperty`."""

    def create_row_processor(self, context, path, reduced_path, mapper, row, adapter):
        col = self.columns[0]
        if adapter:
            col = adapter.columns[col]

        key = self.key
        if col in row:
            return self.parent_property._get_strategy(ColumnLoader).\
                        create_row_processor(
                                context, path, reduced_path, mapper, row, adapter)