Exemplo n.º 1
0
    def test_load_only_path_specific(self):
        User = self.classes.User
        Address = self.classes.Address
        Order = self.classes.Order

        users = self.tables.users
        addresses = self.tables.addresses
        orders = self.tables.orders

        mapper(User, users, properties=util.OrderedDict([
                ("addresses", relationship(Address, lazy="joined")),
                ("orders", relationship(Order, lazy="joined"))
            ]))

        mapper(Address, addresses)
        mapper(Order, orders)

        sess = create_session()

        q = sess.query(User).options(
                load_only("name").defaultload("addresses").load_only("id", "email_address"),
                defaultload("orders").load_only("id")
            )

        # hmmmm joinedload seems to be forcing users.id into here...
        self.assert_compile(
            q,
            "SELECT users.id AS users_id, users.name AS users_name, "
            "addresses_1.id AS addresses_1_id, "
            "addresses_1.email_address AS addresses_1_email_address, "
            "orders_1.id AS orders_1_id FROM users "
            "LEFT OUTER JOIN addresses AS addresses_1 "
            "ON users.id = addresses_1.user_id "
            "LEFT OUTER JOIN orders AS orders_1 ON users.id = orders_1.user_id"
        )
Exemplo n.º 2
0
    def test_serialize_context_dict(self):
        reg = util.OrderedDict()
        umapper = inspect(self.classes.User)
        amapper = inspect(self.classes.Address)

        p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
        p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
        p3 = PathRegistry.coerce((amapper, amapper.attrs.email_address))

        p1.set(reg, "p1key", "p1value")
        p2.set(reg, "p2key", "p2value")
        p3.set(reg, "p3key", "p3value")
        eq_(
            reg,
            {
                ("p1key", p1.path): "p1value",
                ("p2key", p2.path): "p2value",
                ("p3key", p3.path): "p3value",
            },
        )

        serialized = PathRegistry.serialize_context_dict(
            reg, ("p1key", "p2key"))
        eq_(
            serialized,
            [
                (("p1key", p1.serialize()), "p1value"),
                (("p2key", p2.serialize()), "p2value"),
            ],
        )
Exemplo n.º 3
0
    def test_update_ordereddict(self):
        table1 = self.tables.mytable

        # Confirm that ordered dicts are treated as normal dicts,
        # columns sorted in table order
        values = util.OrderedDict((
            (table1.c.name, table1.c.name + "lala"),
            (table1.c.myid, func.do_stuff(table1.c.myid, literal("hoho"))),
        ))

        self.assert_compile(
            update(
                table1,
                (table1.c.myid == func.hoho(4))
                & (table1.c.name
                   == literal("foo") + table1.c.name + literal("lala")),
                values=values,
            ),
            "UPDATE mytable "
            "SET "
            "myid=do_stuff(mytable.myid, :param_1), "
            "name=(mytable.name || :name_1) "
            "WHERE "
            "mytable.myid = hoho(:hoho_1) AND "
            "mytable.name = :param_2 || mytable.name || :param_3",
        )
Exemplo n.º 4
0
    def test_undefer_group_multi_pathed(self):
        orders, Order = self.tables.orders, self.classes.Order

        mapper(Order,
               orders,
               properties=util.OrderedDict([
                   ('userident', deferred(orders.c.user_id, group='primary')),
                   ('description',
                    deferred(orders.c.description, group='primary')),
                   ('opened', deferred(orders.c.isopen, group='secondary'))
               ]))

        sess = create_session()
        q = sess.query(Order).order_by(Order.id)

        def go():
            result = q.options(
                Load(Order).undefer_group('primary').undefer_group(
                    'secondary')).all()
            o2 = result[2]
            eq_(o2.opened, 1)
            eq_(o2.userident, 7)
            eq_(o2.description, 'order 3')

        self.sql_eq_(go, [("SELECT orders.user_id AS orders_user_id, "
                           "orders.description AS orders_description, "
                           "orders.isopen AS orders_isopen, "
                           "orders.id AS orders_id, "
                           "orders.address_id AS orders_address_id "
                           "FROM orders ORDER BY orders.id", {})])
Exemplo n.º 5
0
    def test_custompk(self):
        """test that the primary_key attribute is propagated to the polymorphic mapper"""

        class T1(object):pass
        class T2(T1):pass

        # create a polymorphic union with the select against the base table first.
        # with the join being second, the alias of the union will
        # pick up two "primary key" columns.  technically the alias should have a
        # 2-col pk in any case but the leading select has a NULL for the "t2id" column
        d = util.OrderedDict()
        d['t1'] = t1.select(t1.c.type=='t1')
        d['t2'] = t1.join(t2)
        pjoin = polymorphic_union(d, None, 'pjoin')

        mapper(T1, t1, polymorphic_on=t1.c.type, polymorphic_identity='t1', with_polymorphic=('*', pjoin), primary_key=[pjoin.c.id])
        mapper(T2, t2, inherits=T1, polymorphic_identity='t2')
        print [str(c) for c in class_mapper(T1).primary_key]
        ot1 = T1()
        ot2 = T2()
        sess = create_session()
        sess.save(ot1)
        sess.save(ot2)
        sess.flush()
        sess.clear()

        # query using get(), using only one value.  this requires the select_table mapper
        # has the same single-col primary key.
        assert sess.query(T1).get(ot1.id).id == ot1.id

        ot1 = sess.query(T1).get(ot1.id)
        ot1.data = 'hi'
        sess.flush()
Exemplo n.º 6
0
    def __init__(cls, classname, bases, dict_):
        if '_decl_class_registry' in cls.__dict__:
            return type.__init__(cls, classname, bases, dict_)

        cls._decl_class_registry[classname] = cls
        our_stuff = util.OrderedDict()
        for k in dict_:
            value = dict_[k]
            if (isinstance(value, tuple) and len(value) == 1
                    and isinstance(value[0], (Column, MapperProperty))):
                util.warn("Ignoring declarative-like tuple value of attribute "
                          "%s: possibly a copy-and-paste error with a comma "
                          "left at the end of the line?" % k)
                continue
            if not isinstance(value, (Column, MapperProperty)):
                continue
            prop = _deferred_relation(cls, value)
            our_stuff[k] = prop

        table = None
        if '__table__' not in cls.__dict__:
            if '__tablename__' in cls.__dict__:
                tablename = cls.__tablename__
                autoload = cls.__dict__.get('__autoload__')
                if autoload:
                    table_kw = {'autoload': True}
                else:
                    table_kw = {}
                cols = []
                for key, c in our_stuff.iteritems():
                    if isinstance(c, ColumnProperty):
                        for col in c.columns:
                            if isinstance(col, Column) and col.table is None:
                                _undefer_column_name(key, col)
                                cols.append(col)
                    elif isinstance(c, Column):
                        _undefer_column_name(key, c)
                        cols.append(c)
                cls.__table__ = table = Table(tablename, cls.metadata, *cols,
                                              **table_kw)
        else:
            table = cls.__table__

        mapper_args = getattr(cls, '__mapper_args__', {})
        if 'inherits' not in mapper_args:
            inherits = cls.__mro__[1]
            inherits = cls._decl_class_registry.get(inherits.__name__, None)
            mapper_args['inherits'] = inherits

        if hasattr(cls, '__mapper_cls__'):
            mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
        else:
            mapper_cls = mapper
        cls.__mapper__ = mapper_cls(cls,
                                    table,
                                    properties=our_stuff,
                                    **mapper_args)
        return type.__init__(cls, classname, bases, dict_)
Exemplo n.º 7
0
    def test_odict(self):
        o = util.OrderedDict()
        o['a'] = 1
        o['b'] = 2
        o['snack'] = 'attack'
        o['c'] = 3

        eq_(o.keys(), ['a', 'b', 'snack', 'c'])
        eq_(o.values(), [1, 2, 'attack', 3])

        o.pop('snack')
        eq_(o.keys(), ['a', 'b', 'c'])
        eq_(o.values(), [1, 2, 3])

        try:
            o.pop('eep')
            assert False
        except KeyError:
            pass

        eq_(o.pop('eep', 'woot'), 'woot')

        try:
            o.pop('whiff', 'bang', 'pow')
            assert False
        except TypeError:
            pass

        eq_(o.keys(), ['a', 'b', 'c'])
        eq_(o.values(), [1, 2, 3])

        o2 = util.OrderedDict(d=4)
        o2['e'] = 5

        eq_(o2.keys(), ['d', 'e'])
        eq_(o2.values(), [4, 5])

        o.update(o2)
        eq_(o.keys(), ['a', 'b', 'c', 'd', 'e'])
        eq_(o.values(), [1, 2, 3, 4, 5])

        o.setdefault('c', 'zzz')
        o.setdefault('f', 6)
        eq_(o.keys(), ['a', 'b', 'c', 'd', 'e', 'f'])
        eq_(o.values(), [1, 2, 3, 4, 5, 6])
Exemplo n.º 8
0
 def __init__(self, uowtransaction, mapper):
     if uowtransaction is not None:
         uowtransaction.tasks[mapper] = self
     self.uowtransaction = uowtransaction
     self.mapper = mapper
     self.objects = util.OrderedDict()
     self.dependencies = []
     self.cyclical_dependencies = []
     self.circular = None
     self.postcircular = None
     self.childtasks = []
Exemplo n.º 9
0
    def test_odict_copy(self):
        o = util.OrderedDict()
        o["zzz"] = 1
        o["aaa"] = 2
        eq_(o.keys(), ['zzz', 'aaa'])

        o2 = o.copy()
        eq_(o2.keys(), o.keys())

        o3 = copy.copy(o)
        eq_(o3.keys(), o.keys())
Exemplo n.º 10
0
    def process_dependencies(self, task, deplist, uowcommit, delete=False):
        #print self.mapper.table.name + " " + self.key + " " + repr(len(deplist)) + " process_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
        for obj in deplist:
            childlist = self.get_object_dependencies(obj,
                                                     uowcommit,
                                                     passive=True)
            if childlist is None: continue

            # for the association mapper, the list of association objects is organized into a unique list based on the
            # "primary key".  newly added association items which correspond to existing association items are "merged"
            # into the existing one by moving the "_instance_key" over to the added item, so instead of insert/delete you
            # just get an update operation.
            if not delete:
                tosave = util.OrderedDict()
                for child in childlist:
                    self._synchronize(obj, child, None, False)
                    key = self.mapper.instance_key(child)
                    tosave[key] = child
                    uowcommit.unregister_object(child)

                todelete = {}
                for child in childlist.deleted_items():
                    self._synchronize(obj, child, None, False)
                    key = self.mapper.instance_key(child)
                    if not tosave.has_key(key):
                        todelete[key] = child
                    else:
                        tosave[key]._instance_key = key
                    uowcommit.unregister_object(child)

                for child in childlist.unchanged_items():
                    key = self.mapper.instance_key(child)
                    tosave[key]._instance_key = key

                #print "OK for the save", [(o, getattr(o, '_instance_key', None)) for o in tosave.values()]
                #print "OK for the delete", [(o, getattr(o, '_instance_key', None)) for o in todelete.values()]

                for obj in tosave.values():
                    uowcommit.register_object(obj)
                for obj in todelete.values():
                    uowcommit.register_object(obj, isdelete=True)
            else:
                todelete = {}
                for child in childlist.unchanged_items(
                ) + childlist.deleted_items():
                    self._synchronize(obj, child, None, False)
                    key = self.mapper.instance_key(child)
                    todelete[key] = child
                for obj in todelete.values():
                    uowcommit.register_object(obj, isdelete=True)
Exemplo n.º 11
0
    def test_undefer_group_from_relationship_subqueryload(self):
        users, Order, User, orders = \
            (self.tables.users,
             self.classes.Order,
             self.classes.User,
             self.tables.orders)

        mapper(
            User,
            users,
            properties=dict(orders=relationship(Order, order_by=orders.c.id)))
        mapper(Order,
               orders,
               properties=util.OrderedDict([
                   ('userident', deferred(orders.c.user_id, group='primary')),
                   ('description',
                    deferred(orders.c.description, group='primary')),
                   ('opened', deferred(orders.c.isopen, group='primary'))
               ]))

        sess = create_session()
        q = sess.query(User).filter(User.id == 7).options(
            subqueryload(User.orders).undefer_group('primary'))

        def go():
            result = q.all()
            o2 = result[0].orders[1]
            eq_(o2.opened, 1)
            eq_(o2.userident, 7)
            eq_(o2.description, 'order 3')

        self.sql_eq_(go, [
            ("SELECT users.id AS users_id, users.name AS users_name "
             "FROM users WHERE users.id = :id_1", {
                 "id_1": 7
             }),
            ("SELECT orders.user_id AS orders_user_id, orders.description "
             "AS orders_description, orders.isopen AS orders_isopen, "
             "orders.id AS orders_id, orders.address_id AS orders_address_id, "
             "anon_1.users_id AS anon_1_users_id FROM (SELECT users.id AS "
             "users_id FROM users WHERE users.id = :id_1) AS anon_1 "
             "JOIN orders ON anon_1.users_id = orders.user_id ORDER BY "
             "anon_1.users_id, orders.id", [{
                 'id_1': 7
             }])
        ])
Exemplo n.º 12
0
    def test_group(self):
        """Deferred load with a group"""

        orders, Order = self.tables.orders, self.classes.Order

        mapper(Order,
               orders,
               properties=util.OrderedDict([
                   ('userident', deferred(orders.c.user_id, group='primary')),
                   ('addrident', deferred(orders.c.address_id,
                                          group='primary')),
                   ('description',
                    deferred(orders.c.description, group='primary')),
                   ('opened', deferred(orders.c.isopen, group='primary'))
               ]))

        sess = create_session()
        q = sess.query(Order).order_by(Order.id)

        def go():
            result = q.all()
            o2 = result[2]
            eq_(o2.opened, 1)
            eq_(o2.userident, 7)
            eq_(o2.description, 'order 3')

        self.sql_eq_(go, [("SELECT orders.id AS orders_id "
                           "FROM orders ORDER BY orders.id", {}),
                          ("SELECT orders.user_id AS orders_user_id, "
                           "orders.address_id AS orders_address_id, "
                           "orders.description AS orders_description, "
                           "orders.isopen AS orders_isopen "
                           "FROM orders WHERE orders.id = :param_1", {
                               'param_1': 3
                           })])

        o2 = q.all()[2]
        eq_(o2.description, 'order 3')
        assert o2 not in sess.dirty
        o2.description = 'order 3'

        def go():
            sess.flush()

        self.sql_count_(0, go)
Exemplo n.º 13
0
    def test_undefer_group_from_relationship_joinedload_colexpr(self):
        users, Order, User, orders = \
            (self.tables.users,
             self.classes.Order,
             self.classes.User,
             self.tables.orders)

        mapper(
            User,
            users,
            properties=dict(orders=relationship(Order, order_by=orders.c.id)))
        mapper(Order,
               orders,
               properties=util.OrderedDict([
                   ('userident', deferred(orders.c.user_id, group='primary')),
                   ('lower_desc',
                    deferred(sa.func.lower(orders.c.description).label(None),
                             group='primary')),
                   ('opened', deferred(orders.c.isopen, group='primary'))
               ]))

        sess = create_session()
        q = sess.query(User).filter(User.id == 7).options(
            joinedload(User.orders).undefer_group('primary'))

        def go():
            result = q.all()
            o2 = result[0].orders[1]
            eq_(o2.opened, 1)
            eq_(o2.userident, 7)
            eq_(o2.lower_desc, 'order 3')

        self.sql_eq_(go, [
            ("SELECT users.id AS users_id, users.name AS users_name, "
             "orders_1.user_id AS orders_1_user_id, "
             "lower(orders_1.description) AS lower_1, "
             "orders_1.isopen AS orders_1_isopen, orders_1.id AS orders_1_id, "
             "orders_1.address_id AS orders_1_address_id, "
             "orders_1.description AS orders_1_description FROM users "
             "LEFT OUTER JOIN orders AS orders_1 ON users.id = "
             "orders_1.user_id WHERE users.id = :id_1 "
             "ORDER BY orders_1.id", {
                 "id_1": 7
             })
        ])
Exemplo n.º 14
0
    def test_undefer_star(self):
        orders, Order = self.tables.orders, self.classes.Order

        mapper(Order, orders, properties=util.OrderedDict([
            ('userident', deferred(orders.c.user_id)),
            ('description', deferred(orders.c.description)),
            ('opened', deferred(orders.c.isopen))
        ]))

        sess = create_session()
        q = sess.query(Order).options(Load(Order).undefer('*'))
        self.assert_compile(q,
                            "SELECT orders.user_id AS orders_user_id, "
                            "orders.description AS orders_description, "
                            "orders.isopen AS orders_isopen, "
                            "orders.id AS orders_id, "
                            "orders.address_id AS orders_address_id "
                            "FROM orders")
Exemplo n.º 15
0
    def _setup_stock_mapping(cls):
        Node, composite_pk_table, users, Keyword, items, Dingaling, \
            order_items, item_keywords, Item, User, dingalings, \
            Address, keywords, CompositePk, nodes, Order, orders, \
            addresses = cls.classes.Node, \
            cls.tables.composite_pk_table, cls.tables.users, \
            cls.classes.Keyword, cls.tables.items, \
            cls.classes.Dingaling, cls.tables.order_items, \
            cls.tables.item_keywords, cls.classes.Item, \
            cls.classes.User, cls.tables.dingalings, \
            cls.classes.Address, cls.tables.keywords, \
            cls.classes.CompositePk, cls.tables.nodes, \
            cls.classes.Order, cls.tables.orders, cls.tables.addresses

        # use OrderedDict on this one to support some tests that
        # assert the order of attributes (e.g. orm/test_inspect)
        mapper(User, users, properties=util.OrderedDict(
            [('addresses', relationship(Address, backref='user', order_by=addresses.c.id)),
            ('orders', relationship(Order, backref='user', order_by=orders.c.id)), # o2m, m2o
            ]
        ))
        mapper(Address, addresses, properties={
            'dingaling':relationship(Dingaling, uselist=False, backref="address")  #o2o
        })
        mapper(Dingaling, dingalings)
        mapper(Order, orders, properties={
            'items':relationship(Item, secondary=order_items, order_by=items.c.id),  #m2m
            'address':relationship(Address),  # m2o
        })
        mapper(Item, items, properties={
            'keywords':relationship(Keyword, secondary=item_keywords) #m2m
        })
        mapper(Keyword, keywords)

        mapper(Node, nodes, properties={
            'children':relationship(Node,
                backref=backref('parent', remote_side=[nodes.c.id])
            )
        })

        mapper(CompositePk, composite_pk_table)

        configure_mappers()
Exemplo n.º 16
0
    def test_update_ordereddict(self):
        table1 = self.tables.mytable

        # Confirm that ordered dicts are treated as normal dicts,
        # columns sorted in table order
        values = util.OrderedDict(
            ((table1.c.name, table1.c.name + 'lala'),
             (table1.c.myid, func.do_stuff(table1.c.myid, literal('hoho')))))

        self.assert_compile(
            update(table1, (table1.c.myid == func.hoho(4)) &
                   (table1.c.name
                    == literal('foo') + table1.c.name + literal('lala')),
                   values=values), 'UPDATE mytable '
            'SET '
            'myid=do_stuff(mytable.myid, :param_1), '
            'name=(mytable.name || :name_1) '
            'WHERE '
            'mytable.myid = hoho(:hoho_1) AND '
            'mytable.name = :param_2 || mytable.name || :param_3')
Exemplo n.º 17
0
    def _get_polymorphics(cls):
        people, engineers, managers, boss = (
            cls.tables.people,
            cls.tables.engineers,
            cls.tables.managers,
            cls.tables.boss,
        )
        person_join = polymorphic_union(
            util.OrderedDict([
                ("engineer", people.join(engineers)),
                ("manager", people.join(managers)),
            ]),
            None,
            "pjoin",
        )

        manager_join = people.join(managers).outerjoin(boss)
        person_with_polymorphic = ([Person, Manager, Engineer], person_join)
        manager_with_polymorphic = ("*", manager_join)
        return person_with_polymorphic, manager_with_polymorphic
Exemplo n.º 18
0
    def setUpViewlet(self, domain_model):
        model_schema = list(interface.implementedBy(domain_model))[0]
        mapper = orm.class_mapper(domain_model)
        domain_annotation = model.queryModelDescriptor(model_schema)

        grouped = util.OrderedDict()

        for property in mapper.iterate_properties:
            if not self.checkProperty(property, model_schema,
                                      domain_annotation):
                continue

            property_name = property.key
            descriptor = domain_annotation.get(property_name)
            if descriptor.group in grouped:
                grouped[descriptor.group].append(property_name)
            else:
                grouped[descriptor.group] = [property_name]

        for group in grouped:

            viewlet_name = self.viewlet_name_template % (domain_model.__name__,
                                                         group)
            viewlet_name.replace('_', '')

            if getattr(content, viewlet_name, None):
                continue

            inverse_model = mapper.get_property(
                grouped[group][0]).mapper.class_
            d = dict(group_name=group,
                     properties=grouped[group],
                     domain_model=inverse_model)
            viewlet_class = type(viewlet_name, (self.base_viewlet, ), d)

            zcml_snippet = self.zcml_template % (
                "%s.%s" % (domain_model.__name__, group), named(model_schema),
                viewlet_name)

            setattr(content, viewlet_name, viewlet_class)
Exemplo n.º 19
0
    def __init__(self, uowtransaction, mapper, circular_parent=None):
        if not circular_parent:
            uowtransaction.tasks[mapper] = self

        # the transaction owning this UOWTask
        self.uowtransaction = uowtransaction

        # the Mapper which this UOWTask corresponds to
        self.mapper = mapper

        # a dictionary mapping object instances to a corresponding UOWTaskElement.
        # Each UOWTaskElement represents one instance which is to be saved or
        # deleted by this UOWTask's Mapper.
        # in the case of the row-based "circular sort", the UOWTaskElement may
        # also reference further UOWTasks which are dependent on that UOWTaskElement.
        self.objects = util.OrderedDict()

        # a list of UOWDependencyProcessors which are executed after saves and
        # before deletes, to synchronize data to dependent objects
        self.dependencies = util.Set()

        # a list of UOWTasks that are dependent on this UOWTask, which
        # are to be executed after this UOWTask performs saves and post-save
        # dependency processing, and before pre-delete processing and deletes
        self.childtasks = []

        # whether this UOWTask is circular, meaning it holds a second
        # UOWTask that contains a special row-based dependency structure.
        self.circular = None

        # for a task thats part of that row-based dependency structure, points
        # back to the "public facing" task.
        self.circular_parent = circular_parent

        # a list of UOWDependencyProcessors are derived from the main
        # set of dependencies, referencing sub-UOWTasks attached to this
        # one which represent portions of the total list of objects.
        # this is used for the row-based "circular sort"
        self.cyclical_dependencies = util.Set()
Exemplo n.º 20
0
    def test_pk_collapses(self):
        """test that a composite primary key attribute formed by a join is "collapsed" into its
        minimal columns"""

        class T1(object):pass
        class T2(T1):pass

        # create a polymorphic union with the select against the base table first.
        # with the join being second, the alias of the union will
        # pick up two "primary key" columns.  technically the alias should have a
        # 2-col pk in any case but the leading select has a NULL for the "t2id" column
        d = util.OrderedDict()
        d['t1'] = t1.select(t1.c.type=='t1')
        d['t2'] = t1.join(t2)
        pjoin = polymorphic_union(d, None, 'pjoin')

        mapper(T1, t1, polymorphic_on=t1.c.type, polymorphic_identity='t1', with_polymorphic=('*', pjoin))
        mapper(T2, t2, inherits=T1, polymorphic_identity='t2')
        assert len(class_mapper(T1).primary_key) == 1

        print [str(c) for c in class_mapper(T1).primary_key]
        ot1 = T1()
        ot2 = T2()
        sess = create_session()
        sess.add(ot1)
        sess.add(ot2)
        sess.flush()
        sess.expunge_all()

        # query using get(), using only one value.  this requires the select_table mapper
        # has the same single-col primary key.
        assert sess.query(T1).get(ot1.id).id == ot1.id

        ot1 = sess.query(T1).get(ot1.id)
        ot1.data = 'hi'
        sess.flush()
Exemplo n.º 21
0
 def test_odict_constructor(self):
     o = util.OrderedDict([('name', 'jbe'), ('fullname', 'jonathan'),
                           ('password', '')])
     eq_(o.keys(), ['name', 'fullname', 'password'])
Exemplo n.º 22
0
def _as_declarative(cls, classname, dict_):
    cls._decl_class_registry[classname] = cls
    our_stuff = util.OrderedDict()
    for k in dict_:
        value = dict_[k]
        if (isinstance(value, tuple) and len(value) == 1
                and isinstance(value[0], (Column, MapperProperty))):
            util.warn("Ignoring declarative-like tuple value of attribute "
                      "%s: possibly a copy-and-paste error with a comma "
                      "left at the end of the line?" % k)
            continue
        if not isinstance(value, (Column, MapperProperty)):
            continue
        prop = _deferred_relation(cls, value)
        our_stuff[k] = prop

    # set up attributes in the order they were created
    our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)

    table = None
    if '__table__' not in cls.__dict__:
        if '__tablename__' in cls.__dict__:
            tablename = cls.__tablename__

            table_args = cls.__dict__.get('__table_args__')
            if isinstance(table_args, dict):
                args, table_kw = (), table_args
            elif isinstance(table_args, tuple):
                args = table_args[0:-1]
                table_kw = table_args[-1]
            else:
                args, table_kw = (), {}

            autoload = cls.__dict__.get('__autoload__')
            if autoload:
                table_kw['autoload'] = True

            cols = []
            for key, c in our_stuff.iteritems():
                if isinstance(c, ColumnProperty):
                    for col in c.columns:
                        if isinstance(col, Column) and col.table is None:
                            _undefer_column_name(key, col)
                            cols.append(col)
                elif isinstance(c, Column):
                    _undefer_column_name(key, c)
                    cols.append(c)
                    # if the column is the same name as the key,
                    # remove it from the explicit properties dict.
                    # the normal rules for assigning column-based properties
                    # will take over, including precedence of columns
                    # in multi-column ColumnProperties.
                    if key == c.key:
                        del our_stuff[key]
            cls.__table__ = table = Table(tablename, cls.metadata,
                                          *(tuple(cols) + tuple(args)),
                                          **table_kw)
    else:
        table = cls.__table__

    mapper_args = getattr(cls, '__mapper_args__', {})
    if 'inherits' not in mapper_args:
        inherits = cls.__mro__[1]
        inherits = cls._decl_class_registry.get(inherits.__name__, None)
        if inherits:
            mapper_args['inherits'] = inherits
            if not mapper_args.get(
                    'concrete', False
            ) and table and 'inherit_condition' not in mapper_args:
                # figure out the inherit condition with relaxed rules
                # about nonexistent tables, to allow for ForeignKeys to
                # not-yet-defined tables (since we know for sure that our
                # parent table is defined within the same MetaData)
                mapper_args['inherit_condition'] = sql_util.join_condition(
                    inherits.__table__, table, ignore_nonexistent_tables=True)

    if hasattr(cls, '__mapper_cls__'):
        mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
    else:
        mapper_cls = mapper

    cls.__mapper__ = mapper_cls(cls,
                                table,
                                properties=our_stuff,
                                **mapper_args)
Exemplo n.º 23
0
def _history_mapper(local_mapper):  # noqa (C901 too complex)
    cls = local_mapper.class_

    # set the "active_history" flag
    # on on column-mapped attributes so that the old version
    # of the info is always loaded (currently sets it on all attributes)
    for prop in local_mapper.iterate_properties:
        getattr(local_mapper.class_, prop.key).impl.active_history = True

    super_mapper = local_mapper.inherits
    super_history_mapper = getattr(cls, '__history_mapper__', None)

    polymorphic_on = None
    super_fks = []

    def _col_copy(col):
        orig = col
        col = col.copy()
        orig.info['history_copy'] = col
        col.unique = False

        # if the column is nullable, we could end up overwriting an on-purpose null value with a default.
        # if it's not nullable, however, the default may be relied upon to correctly set values within the database,
        # so we should preserve it
        if col.nullable:
            col.default = col.server_default = None
        return col

    properties = util.OrderedDict()
    if not super_mapper or \
            local_mapper.local_table is not super_mapper.local_table:
        cols = []
        version_meta = {"version_meta": True}
        for column in local_mapper.local_table.c:
            if _is_versioning_col(column):
                continue

            col = _col_copy(column)

            if super_mapper and \
                    col_references_table(column, super_mapper.local_table):
                super_fks.append(
                    (col.key,
                     list(super_history_mapper.local_table.primary_key)[0]))

            cols.append(col)

            if column is local_mapper.polymorphic_on:
                polymorphic_on = col

            orig_prop = local_mapper.get_property_by_column(column)
            # carry over column re-mappings
            if len(orig_prop.columns) > 1 or \
                    orig_prop.columns[0].key != orig_prop.key:
                properties[orig_prop.key] = tuple(col.info['history_copy']
                                                  for col in orig_prop.columns)

        if super_mapper:
            super_fks.append(
                ('version', super_history_mapper.local_table.c.version))

        # "version" stores the integer version id.  This column is
        # required.
        cols.append(
            Column('version',
                   Integer,
                   primary_key=True,
                   autoincrement=False,
                   info=version_meta))

        if super_fks:
            cols.append(ForeignKeyConstraint(*zip(*super_fks)))

        table = Table(local_mapper.local_table.name + '_history',
                      local_mapper.local_table.metadata,
                      *cols,
                      schema=local_mapper.local_table.schema)
    else:
        # single table inheritance.  take any additional columns that may have
        # been added and add them to the history table.
        for column in local_mapper.local_table.c:
            if column.key not in super_history_mapper.local_table.c:
                col = _col_copy(column)
                super_history_mapper.local_table.append_column(col)
        table = None

    if super_history_mapper:
        bases = (super_history_mapper.class_, )

        if table is not None:
            properties['changed'] = (
                (table.c.changed, ) +
                tuple(super_history_mapper.attrs.changed.columns))

    else:
        bases = local_mapper.base_mapper.class_.__bases__
    versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})

    m = mapper(versioned_cls,
               table,
               inherits=super_history_mapper,
               polymorphic_on=polymorphic_on,
               polymorphic_identity=local_mapper.polymorphic_identity,
               properties=properties)
    cls.__history_mapper__ = m

    if not super_history_mapper:
        local_mapper.local_table.append_column(
            Column('version', Integer, default=1, nullable=False))
        local_mapper.add_property("version",
                                  local_mapper.local_table.c.version)
Exemplo n.º 24
0
def history_mapper(local_mapper):
    cls = local_mapper.class_

    for prop in local_mapper.iterate_properties:
        getattr(cls, prop.key).impl.active_history = True

    super_mapper = local_mapper.inherits
    super_history_mapper = getattr(cls, '__history_mapper__', None)

    polymorphic_on = None
    super_fks = []

    def _col_copy(col):
        copy = col.copy()
        col.info['history_copy'] = copy
        copy.unique = False
        copy.default = None
        copy.server_default = None
        return copy

    # we don't create copies of these columns on the version table b/c we don't save them anyways
    untracked_cols = set(getattr(cls, '__chrononaut_untracked__', []))
    hidden_cols = set(getattr(cls, '__chrononaut_hidden__', []))

    properties = util.OrderedDict()
    if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
        cols = []
        # add column.info to identify columns specific to versioning
        version_meta = {"version_meta": True}

        for column in local_mapper.local_table.c:
            if ('version_meta' in column.info or column.key in hidden_cols
                    or column.key in untracked_cols):
                continue

            col = _col_copy(column)

            if super_mapper and col_references_table(column,
                                                     super_mapper.local_table):
                super_fks.append(
                    (col.key,
                     list(super_history_mapper.local_table.primary_key)[0]))

            cols.append(col)

            if column is local_mapper.polymorphic_on:
                polymorphic_on = col

            orig_prop = local_mapper.get_property_by_column(column)
            # carry over column re-mappings
            if len(orig_prop.columns
                   ) > 1 or orig_prop.columns[0].key != orig_prop.key:
                properties[orig_prop.key] = tuple(col.info['history_copy']
                                                  for col in orig_prop.columns)

        if super_mapper:
            super_fks.append(
                ('version', super_history_mapper.local_table.c.version))

        # "version" stores the integer version id.  This column is required.
        cols.append(
            Column('version',
                   Integer,
                   primary_key=True,
                   autoincrement=False,
                   info=version_meta))

        # "changed" column stores the UTC timestamp of when the history row was created.
        # This column is optional and can be omitted.
        cols.append(
            Column('changed',
                   DateTime(timezone=True),
                   default=lambda: datetime.now(pytz.utc),
                   info=version_meta))

        # Append some JSON metadata about the change too
        cols.append(
            Column('change_info',
                   postgresql.JSONB,
                   default=None,
                   info=version_meta))

        if super_fks:
            cols.append(ForeignKeyConstraint(*zip(*super_fks)))

        history_tablename = getattr(cls, '__chrononaut_tablename__',
                                    local_mapper.local_table.name + '_history')
        table = Table(history_tablename,
                      local_mapper.local_table.metadata,
                      *cols,
                      schema=local_mapper.local_table.schema)
    else:
        # single table inheritance.  take any additional columns that may have
        # been added and add them to the history table.
        for column in local_mapper.local_table.c:
            if column.key not in super_history_mapper.local_table.c:
                col = _col_copy(column)
                super_history_mapper.local_table.append_column(col)
        table = None

    if super_history_mapper:
        bases = (super_history_mapper.class_, )

        if table is not None:
            properties['changed'] = (
                (table.c.changed, ) +
                tuple(super_history_mapper.attrs.changed.columns))
    else:
        bases = local_mapper.base_mapper.class_.__bases__
    versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})

    # Finally add @property's raising OmittedAttributeErrors for missing cols
    for col_name in untracked_cols:
        msg = '{} is explicitly untracked via __chrononaut_untracked__.'.format(
            col_name)
        setattr(versioned_cls, col_name,
                property(lambda _: raise_(UntrackedAttributeError(msg))))

    for col_name in hidden_cols:
        msg = '{} is explicitly hidden via __chrononaut_hidden__'.format(
            col_name)
        setattr(versioned_cls, col_name,
                property(lambda _: raise_(HiddenAttributeError(msg))))

    m = mapper(versioned_cls,
               table,
               inherits=super_history_mapper,
               polymorphic_on=polymorphic_on,
               polymorphic_identity=local_mapper.polymorphic_identity,
               properties=properties)
    cls.__history_mapper__ = m

    if not super_history_mapper:
        local_mapper.local_table.append_column(
            Column('version', Integer, default=0, nullable=True))
        local_mapper.add_property("version",
                                  local_mapper.local_table.c.version)
Exemplo n.º 25
0
def _as_declarative(cls, classname, dict_):

    # dict_ will be a dictproxy, which we can't write to, and we need to!
    dict_ = dict(dict_)

    column_copies = {}
    potential_columns = {}
    
    mapper_args = {}
    table_args = inherited_table_args = None
    tablename = None
    parent_columns = ()
    
    for base in cls.__mro__:
        if _is_mapped_class(base):
            parent_columns = base.__table__.c.keys()
        else:
            for name,obj in vars(base).items():
                if name == '__mapper_args__':
                    if not mapper_args:
                        mapper_args = cls.__mapper_args__
                elif name == '__tablename__':
                    if not tablename:
                        tablename = cls.__tablename__
                elif name == '__table_args__':
                    if not table_args:                        
                        table_args = cls.__table_args__
                        if base is not cls:
                            inherited_table_args = True
                elif base is not cls:
                    # we're a mixin.
                    
                    if isinstance(obj, Column):
                        if obj.foreign_keys:
                            raise exceptions.InvalidRequestError(
                            "Columns with foreign keys to other columns "
                            "must be declared as @classproperty callables "
                            "on declarative mixin classes. ")
                        if name not in dict_ and not (
                                '__table__' in dict_ and 
                                name in dict_['__table__'].c
                                ):
                            potential_columns[name] = \
                                    column_copies[obj] = \
                                    obj.copy()
                            column_copies[obj]._creation_order = \
                                    obj._creation_order
                    elif isinstance(obj, MapperProperty):
                        raise exceptions.InvalidRequestError(
                            "Mapper properties (i.e. deferred,"
                            "column_property(), relationship(), etc.) must "
                            "be declared as @classproperty callables "
                            "on declarative mixin classes.")
                    elif isinstance(obj, util.classproperty):
                        dict_[name] = ret = \
                                column_copies[obj] = getattr(cls, name)
                        if isinstance(ret, (Column, MapperProperty)) and \
                            ret.doc is None:
                            ret.doc = obj.__doc__

    # apply inherited columns as we should
    for k, v in potential_columns.items():
        if tablename or k not in parent_columns:
            dict_[k] = v
            
    if inherited_table_args and not tablename:
        table_args = None

    # make sure that column copies are used rather 
    # than the original columns from any mixins
    for k, v in mapper_args.iteritems():
        mapper_args[k] = column_copies.get(v,v)
    
    cls._decl_class_registry[classname] = cls
    our_stuff = util.OrderedDict()

    for k in dict_:
        value = dict_[k]
        if isinstance(value, util.classproperty):
            value = getattr(cls, k)
            
        if (isinstance(value, tuple) and len(value) == 1 and
            isinstance(value[0], (Column, MapperProperty))):
            util.warn("Ignoring declarative-like tuple value of attribute "
                      "%s: possibly a copy-and-paste error with a comma "
                      "left at the end of the line?" % k)
            continue
        if not isinstance(value, (Column, MapperProperty)):
            continue
        prop = _deferred_relationship(cls, value)
        our_stuff[k] = prop

    # set up attributes in the order they were created
    our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)

    # extract columns from the class dict
    cols = []
    for key, c in our_stuff.iteritems():
        if isinstance(c, ColumnProperty):
            for col in c.columns:
                if isinstance(col, Column) and col.table is None:
                    _undefer_column_name(key, col)
                    cols.append(col)
        elif isinstance(c, Column):
            _undefer_column_name(key, c)
            cols.append(c)
            # if the column is the same name as the key, 
            # remove it from the explicit properties dict.
            # the normal rules for assigning column-based properties
            # will take over, including precedence of columns
            # in multi-column ColumnProperties.
            if key == c.key:
                del our_stuff[key]

    table = None
    if '__table__' not in dict_:
        if tablename is not None:
            
            if isinstance(table_args, dict):
                args, table_kw = (), table_args
            elif isinstance(table_args, tuple):
                args = table_args[0:-1]
                table_kw = table_args[-1]
                if len(table_args) < 2 or not isinstance(table_kw, dict):
                    raise exceptions.ArgumentError(
                        "Tuple form of __table_args__ is "
                        "(arg1, arg2, arg3, ..., {'kw1':val1, "
                        "'kw2':val2, ...})"
                    )
            else:
                args, table_kw = (), {}

            autoload = dict_.get('__autoload__')
            if autoload:
                table_kw['autoload'] = True

            cls.__table__ = table = Table(tablename, cls.metadata,
                                          *(tuple(cols) + tuple(args)),
                                           **table_kw)
    else:
        table = cls.__table__
        if cols:
            for c in cols:
                if not table.c.contains_column(c):
                    raise exceptions.ArgumentError(
                        "Can't add additional column %r when "
                        "specifying __table__" % c.key
                    )
    
    if 'inherits' not in mapper_args:
        for c in cls.__bases__:
            if _is_mapped_class(c):
                mapper_args['inherits'] = cls._decl_class_registry.get(
                                                            c.__name__, None)
                break

    if hasattr(cls, '__mapper_cls__'):
        mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
    else:
        mapper_cls = mapper

    if table is None and 'inherits' not in mapper_args:
        raise exceptions.InvalidRequestError(
            "Class %r does not have a __table__ or __tablename__ "
            "specified and does not inherit from an existing "
            "table-mapped class." % cls
            )

    elif 'inherits' in mapper_args and not mapper_args.get('concrete', False):
        inherited_mapper = class_mapper(mapper_args['inherits'],
                                            compile=False)
        inherited_table = inherited_mapper.local_table
        if 'inherit_condition' not in mapper_args and table is not None:
            # figure out the inherit condition with relaxed rules
            # about nonexistent tables, to allow for ForeignKeys to
            # not-yet-defined tables (since we know for sure that our
            # parent table is defined within the same MetaData)
            mapper_args['inherit_condition'] = sql_util.join_condition(
                mapper_args['inherits'].__table__, table,
                ignore_nonexistent_tables=True)

        if table is None:
            # single table inheritance.
            # ensure no table args
            if table_args:
                raise exceptions.ArgumentError(
                    "Can't place __table_args__ on an inherited class "
                    "with no table."
                    )
        
            # add any columns declared here to the inherited table.
            for c in cols:
                if c.primary_key:
                    raise exceptions.ArgumentError(
                        "Can't place primary key columns on an inherited "
                        "class with no table."
                        )
                if c.name in inherited_table.c:
                    raise exceptions.ArgumentError(
                        "Column '%s' on class %s conflicts with "
                        "existing column '%s'" % 
                        (c, cls, inherited_table.c[c.name])
                    )
                inherited_table.append_column(c)
    
        # single or joined inheritance
        # exclude any cols on the inherited table which are not mapped on the
        # parent class, to avoid
        # mapping columns specific to sibling/nephew classes
        inherited_mapper = class_mapper(mapper_args['inherits'],
                                            compile=False)
        inherited_table = inherited_mapper.local_table
        
        if 'exclude_properties' not in mapper_args:
            mapper_args['exclude_properties'] = exclude_properties = \
                set([c.key for c in inherited_table.c
                     if c not in inherited_mapper._columntoproperty])
            exclude_properties.difference_update([c.key for c in cols])
    
    cls.__mapper__ = mapper_cls(cls, 
                                table, 
                                properties=our_stuff, 
                                **mapper_args)
Exemplo n.º 26
0
def _as_declarative(cls, classname, dict_):
    cls._decl_class_registry[classname] = cls
    our_stuff = util.OrderedDict()
    for k in dict_:
        value = dict_[k]
        if (isinstance(value, tuple) and len(value) == 1
                and isinstance(value[0], (Column, MapperProperty))):
            util.warn("Ignoring declarative-like tuple value of attribute "
                      "%s: possibly a copy-and-paste error with a comma "
                      "left at the end of the line?" % k)
            continue
        if not isinstance(value, (Column, MapperProperty)):
            continue
        prop = _deferred_relation(cls, value)
        our_stuff[k] = prop

    # set up attributes in the order they were created
    our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)

    # extract columns from the class dict
    cols = []
    for key, c in our_stuff.iteritems():
        if isinstance(c, ColumnProperty):
            for col in c.columns:
                if isinstance(col, Column) and col.table is None:
                    _undefer_column_name(key, col)
                    cols.append(col)
        elif isinstance(c, Column):
            _undefer_column_name(key, c)
            cols.append(c)
            # if the column is the same name as the key,
            # remove it from the explicit properties dict.
            # the normal rules for assigning column-based properties
            # will take over, including precedence of columns
            # in multi-column ColumnProperties.
            if key == c.key:
                del our_stuff[key]

    table = None
    if '__table__' not in cls.__dict__:
        if '__tablename__' in cls.__dict__:
            tablename = cls.__tablename__

            table_args = cls.__dict__.get('__table_args__')
            if isinstance(table_args, dict):
                args, table_kw = (), table_args
            elif isinstance(table_args, tuple):
                args = table_args[0:-1]
                table_kw = table_args[-1]
                if len(table_args) < 2 or not isinstance(table_kw, dict):
                    raise exceptions.ArgumentError(
                        "Tuple form of __table_args__ is "
                        "(arg1, arg2, arg3, ..., {'kw1':val1, 'kw2':val2, ...})"
                    )
            else:
                args, table_kw = (), {}

            autoload = cls.__dict__.get('__autoload__')
            if autoload:
                table_kw['autoload'] = True

            cls.__table__ = table = Table(tablename, cls.metadata,
                                          *(tuple(cols) + tuple(args)),
                                          **table_kw)
    else:
        table = cls.__table__
        if cols:
            for c in cols:
                if not table.c.contains_column(c):
                    raise exceptions.ArgumentError(
                        "Can't add additional column %r when specifying __table__"
                        % key)

    mapper_args = getattr(cls, '__mapper_args__', {})
    if 'inherits' not in mapper_args:
        for c in cls.__bases__:
            if _is_mapped_class(c):
                mapper_args['inherits'] = cls._decl_class_registry.get(
                    c.__name__, None)
                break

    if hasattr(cls, '__mapper_cls__'):
        mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
    else:
        mapper_cls = mapper

    if not table and 'inherits' not in mapper_args:
        raise exceptions.InvalidRequestError(
            "Class %r does not have a __table__ or __tablename__ "
            "specified and does not inherit from an existing table-mapped class."
            % cls)

    elif 'inherits' in mapper_args and not mapper_args.get('concrete', False):
        inherited_mapper = class_mapper(mapper_args['inherits'], compile=False)
        inherited_table = inherited_mapper.local_table
        if 'inherit_condition' not in mapper_args and table:
            # figure out the inherit condition with relaxed rules
            # about nonexistent tables, to allow for ForeignKeys to
            # not-yet-defined tables (since we know for sure that our
            # parent table is defined within the same MetaData)
            mapper_args['inherit_condition'] = sql_util.join_condition(
                mapper_args['inherits'].__table__,
                table,
                ignore_nonexistent_tables=True)

        if not table:
            # single table inheritance.
            # ensure no table args
            table_args = cls.__dict__.get('__table_args__')
            if table_args is not None:
                raise exceptions.ArgumentError(
                    "Can't place __table_args__ on an inherited class with no table."
                )

            # add any columns declared here to the inherited table.
            for c in cols:
                if c.primary_key:
                    raise exceptions.ArgumentError(
                        "Can't place primary key columns on an inherited class with no table."
                    )
                inherited_table.append_column(c)

        # single or joined inheritance
        # exclude any cols on the inherited table which are not mapped on the parent class, to avoid
        # mapping columns specific to sibling/nephew classes
        inherited_mapper = class_mapper(mapper_args['inherits'], compile=False)
        inherited_table = inherited_mapper.local_table

        if 'exclude_properties' not in mapper_args:
            mapper_args['exclude_properties'] = exclude_properties = \
                set([c.key for c in inherited_table.c if c not in inherited_mapper._columntoproperty])
            exclude_properties.difference_update([c.key for c in cols])

    cls.__mapper__ = mapper_cls(cls,
                                table,
                                properties=our_stuff,
                                **mapper_args)
Exemplo n.º 27
0
    def visit_select(self, select):
        # the actual list of columns to print in the SELECT column list.
        inner_columns = util.OrderedDict()

        self.select_stack.append(select)
        for c in select._raw_columns:
            if isinstance(c, sql.Select) and c.is_scalar:
                self.traverse(c)
                inner_columns[self.get_str(c)] = c
                continue
            if hasattr(c, '_selectable'):
                s = c._selectable()
            else:
                self.traverse(c)
                inner_columns[self.get_str(c)] = c
                continue
            for co in s.columns:
                if select.use_labels:
                    labelname = co._label
                    if labelname is not None:
                        l = co.label(labelname)
                        self.traverse(l)
                        inner_columns[labelname] = l
                    else:
                        self.traverse(co)
                        inner_columns[self.get_str(co)] = co
                # TODO: figure this out, a ColumnClause with a select as a parent
                # is different from any other kind of parent
                elif select.is_subquery and isinstance(
                        co, sql._ColumnClause
                ) and not co.is_literal and co.table is not None and not isinstance(
                        co.table, sql.Select):
                    # SQLite doesnt like selecting from a subquery where the column
                    # names look like table.colname, so add a label synonomous with
                    # the column name
                    l = co.label(co.name)
                    self.traverse(l)
                    inner_columns[self.get_str(l.obj)] = l
                else:
                    self.traverse(co)
                    inner_columns[self.get_str(co)] = co
        self.select_stack.pop(-1)

        collist = string.join(
            [self.get_str(v) for v in inner_columns.values()], ', ')

        text = "SELECT "
        text += self.visit_select_precolumns(select)
        text += collist

        whereclause = select.whereclause

        froms = []
        for f in select.froms:

            if self.parameters is not None:
                # look at our own parameters, see if they
                # are all present in the form of BindParamClauses.  if
                # not, then append to the above whereclause column conditions
                # matching those keys
                for c in f.columns:
                    if sql.is_column(c) and self.parameters.has_key(
                            c.key) and not self.binds.has_key(c.key):
                        value = self.parameters[c.key]
                    else:
                        continue
                    clause = c == value
                    self.traverse(clause)
                    whereclause = sql.and_(clause, whereclause)
                    self.visit_compound(whereclause)

            # special thingy used by oracle to redefine a join
            w = self.get_whereclause(f)
            if w is not None:
                # TODO: move this more into the oracle module
                whereclause = sql.and_(w, whereclause)
                self.visit_compound(whereclause)

            t = self.get_from_text(f)
            if t is not None:
                froms.append(t)

        if len(froms):
            text += " \nFROM "
            text += string.join(froms, ', ')
        else:
            text += self.default_from()

        if whereclause is not None:
            t = self.get_str(whereclause)
            if t:
                text += " \nWHERE " + t

        group_by = self.get_str(select.group_by_clause)
        if group_by:
            text += " GROUP BY " + group_by

        if select.having is not None:
            t = self.get_str(select.having)
            if t:
                text += " \nHAVING " + t

        order_by = self.get_str(select.order_by_clause)
        if order_by:
            text += " ORDER BY " + order_by

        text += self.visit_select_postclauses(select)

        text += self.for_update_clause(select)

        if getattr(select, 'parens', False):
            self.strings[select] = "(" + text + ")"
        else:
            self.strings[select] = text
        self.froms[select] = "(" + text + ")"
Exemplo n.º 28
0
    def __init__(cls, classname, bases, dict_):
        if '_decl_class_registry' in cls.__dict__:
            return type.__init__(cls, classname, bases, dict_)

        cls._decl_class_registry[classname] = cls
        our_stuff = util.OrderedDict()
        for k in dict_:
            value = dict_[k]
            if (isinstance(value, tuple) and len(value) == 1
                    and isinstance(value[0], (Column, MapperProperty))):
                util.warn("Ignoring declarative-like tuple value of attribute "
                          "%s: possibly a copy-and-paste error with a comma "
                          "left at the end of the line?" % k)
                continue
            if not isinstance(value, (Column, MapperProperty)):
                continue
            prop = _deferred_relation(cls, value)
            our_stuff[k] = prop

        # set up attributes in the order they were created
        our_stuff.sort(lambda x, y: cmp(our_stuff[x]._creation_order,
                                        our_stuff[y]._creation_order))

        table = None
        if '__table__' not in cls.__dict__:
            if '__tablename__' in cls.__dict__:
                tablename = cls.__tablename__
                autoload = cls.__dict__.get('__autoload__')
                if autoload:
                    table_kw = {'autoload': True}
                else:
                    table_kw = {}
                cols = []
                for key, c in our_stuff.iteritems():
                    if isinstance(c, ColumnProperty):
                        for col in c.columns:
                            if isinstance(col, Column) and col.table is None:
                                _undefer_column_name(key, col)
                                cols.append(col)
                    elif isinstance(c, Column):
                        _undefer_column_name(key, c)
                        cols.append(c)
                cls.__table__ = table = Table(tablename, cls.metadata, *cols,
                                              **table_kw)
        else:
            table = cls.__table__

        mapper_args = getattr(cls, '__mapper_args__', {})
        if 'inherits' not in mapper_args:
            inherits = cls.__mro__[1]
            inherits = cls._decl_class_registry.get(inherits.__name__, None)
            if inherits:
                mapper_args['inherits'] = inherits
                if not mapper_args.get('concrete', False) and table:
                    # figure out the inherit condition with relaxed rules about nonexistent tables,
                    # to allow for ForeignKeys to not-yet-defined tables (since we know for sure that our parent
                    # table is defined within the same MetaData)
                    mapper_args['inherit_condition'] = sql_util.join_condition(
                        inherits.__table__,
                        table,
                        ignore_nonexistent_tables=True)

        if hasattr(cls, '__mapper_cls__'):
            mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
        else:
            mapper_cls = mapper
        cls.__mapper__ = mapper_cls(cls,
                                    table,
                                    properties=our_stuff,
                                    **mapper_args)
        return type.__init__(cls, classname, bases, dict_)
Exemplo n.º 29
0
def _history_mapper(local_mapper):
    cls = local_mapper.class_

    # set the "active_history" flag
    # on on column-mapped attributes so that the old version
    # of the info is always loaded (currently sets it on all attributes)
    for prop in local_mapper.iterate_properties:
        getattr(local_mapper.class_, prop.key).impl.active_history = True

    super_mapper = local_mapper.inherits
    super_history_mapper = getattr(cls, "__history_mapper__", None)

    polymorphic_on = None
    super_fks = []

    def _col_copy(col):
        orig = col
        col = col.copy()
        orig.info["history_copy"] = col
        col.unique = False
        col.default = col.server_default = None
        col.autoincrement = False
        return col

    properties = util.OrderedDict()
    if (not super_mapper
            or local_mapper.local_table is not super_mapper.local_table):
        cols = []
        version_meta = {"version_meta": True}  # add column.info to identify
        # columns specific to versioning

        for column in local_mapper.local_table.c:
            if _is_versioning_col(column):
                continue

            col = _col_copy(column)

            if super_mapper and col_references_table(column,
                                                     super_mapper.local_table):
                super_fks.append((
                    col.key,
                    list(super_history_mapper.local_table.primary_key)[0],
                ))

            cols.append(col)

            if column is local_mapper.polymorphic_on:
                polymorphic_on = col

            orig_prop = local_mapper.get_property_by_column(column)
            # carry over column re-mappings
            if (len(orig_prop.columns) > 1
                    or orig_prop.columns[0].key != orig_prop.key):
                properties[orig_prop.key] = tuple(col.info["history_copy"]
                                                  for col in orig_prop.columns)

        if super_mapper:
            super_fks.append(
                ("version", super_history_mapper.local_table.c.version))

        # "version" stores the integer version id.  This column is
        # required.
        cols.append(
            Column(
                "version",
                Integer,
                primary_key=True,
                autoincrement=False,
                info=version_meta,
            ))

        # "changed" column stores the UTC timestamp of when the
        # history row was created.
        # This column is optional and can be omitted.
        cols.append(
            Column(
                "changed",
                DateTime,
                default=datetime.datetime.utcnow,
                info=version_meta,
            ))

        if super_fks:
            cols.append(ForeignKeyConstraint(*zip(*super_fks)))

        table = Table(local_mapper.local_table.name + "_history",
                      local_mapper.local_table.metadata,
                      *cols,
                      schema=local_mapper.local_table.schema)
    else:
        # single table inheritance.  take any additional columns that may have
        # been added and add them to the history table.
        for column in local_mapper.local_table.c:
            if column.key not in super_history_mapper.local_table.c:
                col = _col_copy(column)
                super_history_mapper.local_table.append_column(col)
        table = None

    if super_history_mapper:
        bases = (super_history_mapper.class_, )

        if table is not None:
            properties["changed"] = (table.c.changed, ) + tuple(
                super_history_mapper.attrs.changed.columns)

    else:
        bases = local_mapper.base_mapper.class_.__bases__
    versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})

    m = mapper(
        versioned_cls,
        table,
        inherits=super_history_mapper,
        polymorphic_on=polymorphic_on,
        polymorphic_identity=local_mapper.polymorphic_identity,
        properties=properties,
    )
    cls.__history_mapper__ = m

    if not super_history_mapper:
        local_mapper.local_table.append_column(
            Column("version", Integer, default=1, nullable=False),
            replace_existing=True,
        )
        local_mapper.add_property("version",
                                  local_mapper.local_table.c.version)
        if cls.use_mapper_versioning:
            local_mapper.version_id_col = local_mapper.local_table.c.version
Exemplo n.º 30
0
    def _setup_stock_mapping(cls):
        (
            Node,
            composite_pk_table,
            users,
            Keyword,
            items,
            Dingaling,
            order_items,
            item_keywords,
            Item,
            User,
            dingalings,
            Address,
            keywords,
            CompositePk,
            nodes,
            Order,
            orders,
            addresses,
        ) = (
            cls.classes.Node,
            cls.tables.composite_pk_table,
            cls.tables.users,
            cls.classes.Keyword,
            cls.tables.items,
            cls.classes.Dingaling,
            cls.tables.order_items,
            cls.tables.item_keywords,
            cls.classes.Item,
            cls.classes.User,
            cls.tables.dingalings,
            cls.classes.Address,
            cls.tables.keywords,
            cls.classes.CompositePk,
            cls.tables.nodes,
            cls.classes.Order,
            cls.tables.orders,
            cls.tables.addresses,
        )

        # use OrderedDict on this one to support some tests that
        # assert the order of attributes (e.g. orm/test_inspect)
        cls.mapper_registry.map_imperatively(
            User,
            users,
            properties=util.OrderedDict([
                (
                    "addresses",
                    relationship(Address,
                                 backref="user",
                                 order_by=addresses.c.id),
                ),
                (
                    "orders",
                    relationship(Order, backref="user", order_by=orders.c.id),
                ),  # o2m, m2o
            ]),
        )
        cls.mapper_registry.map_imperatively(
            Address,
            addresses,
            properties={
                # o2o
                "dingaling":
                relationship(Dingaling, uselist=False, backref="address")
            },
        )
        cls.mapper_registry.map_imperatively(Dingaling, dingalings)
        cls.mapper_registry.map_imperatively(
            Order,
            orders,
            properties={
                # m2m
                "items":
                relationship(Item, secondary=order_items, order_by=items.c.id),
                "address":
                relationship(Address),  # m2o
            },
        )
        cls.mapper_registry.map_imperatively(
            Item,
            items,
            properties={
                "keywords": relationship(Keyword,
                                         secondary=item_keywords)  # m2m
            },
        )
        cls.mapper_registry.map_imperatively(Keyword, keywords)
        cls.mapper_registry.map_imperatively(
            Node,
            nodes,
            properties={
                "children":
                relationship(Node,
                             backref=backref("parent",
                                             remote_side=[nodes.c.id]))
            },
        )

        cls.mapper_registry.map_imperatively(CompositePk, composite_pk_table)

        configure_mappers()