Пример #1
0
    def define_tables(cls, metadata):
        dt = Table(
            'dt', metadata, Column('id', Integer, primary_key=True),
            Column('col1', String(20)),
            Column('col2', String(20),
                   server_default=sa.schema.FetchedValue()),
            Column('col3', String(20),
                   sa.schema.FetchedValue(for_update=True)),
            Column('col4', String(20), sa.schema.FetchedValue(),
                   sa.schema.FetchedValue(for_update=True)))
        for ins in (
                sa.DDL(
                    "CREATE TRIGGER dt_ins AFTER INSERT ON dt "
                    "FOR EACH ROW BEGIN "
                    "UPDATE dt SET col2='ins', col4='ins' "
                    "WHERE dt.id = NEW.id; END",
                    on='sqlite'),
                sa.DDL(
                    "CREATE TRIGGER dt_ins ON dt AFTER INSERT AS "
                    "UPDATE dt SET col2='ins', col4='ins' "
                    "WHERE dt.id IN (SELECT id FROM inserted);",
                    on='mssql'),
        ):
            if testing.against(ins.on):
                break
        else:
            ins = sa.DDL("CREATE TRIGGER dt_ins BEFORE INSERT ON dt "
                         "FOR EACH ROW BEGIN "
                         "SET NEW.col2='ins'; SET NEW.col4='ins'; END")
        ins.execute_at('after-create', dt)
        sa.DDL("DROP TRIGGER dt_ins").execute_at('before-drop', dt)

        for up in (
                sa.DDL(
                    "CREATE TRIGGER dt_up AFTER UPDATE ON dt "
                    "FOR EACH ROW BEGIN "
                    "UPDATE dt SET col3='up', col4='up' "
                    "WHERE dt.id = OLD.id; END",
                    on='sqlite'),
                sa.DDL(
                    "CREATE TRIGGER dt_up ON dt AFTER UPDATE AS "
                    "UPDATE dt SET col3='up', col4='up' "
                    "WHERE dt.id IN (SELECT id FROM deleted);",
                    on='mssql'),
        ):
            if testing.against(up.on):
                break
        else:
            up = sa.DDL("CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
                        "FOR EACH ROW BEGIN "
                        "SET NEW.col3='up'; SET NEW.col4='up'; END")
        up.execute_at('after-create', dt)
        sa.DDL("DROP TRIGGER dt_up").execute_at('before-drop', dt)
Пример #2
0
    def define_tables(cls, metadata):
        dt = Table('dt', metadata,
                   Column('id', Integer, primary_key=True),
                   Column('col1', String(20)),
                   Column('col2', String(20),
                          server_default=sa.schema.FetchedValue()),
                   Column('col3', String(20),
                          sa.schema.FetchedValue(for_update=True)),
                   Column('col4', String(20),
                          sa.schema.FetchedValue(),
                          sa.schema.FetchedValue(for_update=True)))
        for ins in (
            sa.DDL("CREATE TRIGGER dt_ins AFTER INSERT ON dt "
                   "FOR EACH ROW BEGIN "
                   "UPDATE dt SET col2='ins', col4='ins' "
                   "WHERE dt.id = NEW.id; END",
                   on='sqlite'),
            sa.DDL("CREATE TRIGGER dt_ins ON dt AFTER INSERT AS "
                   "UPDATE dt SET col2='ins', col4='ins' "
                   "WHERE dt.id IN (SELECT id FROM inserted);",
                   on='mssql'),
            ):
            if testing.against(ins.on):
                break
        else:
            ins = sa.DDL("CREATE TRIGGER dt_ins BEFORE INSERT ON dt "
                         "FOR EACH ROW BEGIN "
                         "SET NEW.col2='ins'; SET NEW.col4='ins'; END")
        ins.execute_at('after-create', dt)
        sa.DDL("DROP TRIGGER dt_ins").execute_at('before-drop', dt)


        for up in (
            sa.DDL("CREATE TRIGGER dt_up AFTER UPDATE ON dt "
                   "FOR EACH ROW BEGIN "
                   "UPDATE dt SET col3='up', col4='up' "
                   "WHERE dt.id = OLD.id; END",
                   on='sqlite'),
            sa.DDL("CREATE TRIGGER dt_up ON dt AFTER UPDATE AS "
                   "UPDATE dt SET col3='up', col4='up' "
                   "WHERE dt.id IN (SELECT id FROM deleted);",
                   on='mssql'),
            ):
            if testing.against(up.on):
                break
        else:
            up = sa.DDL("CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
                        "FOR EACH ROW BEGIN "
                        "SET NEW.col3='up'; SET NEW.col4='up'; END")
        up.execute_at('after-create', dt)
        sa.DDL("DROP TRIGGER dt_up").execute_at('before-drop', dt)
Пример #3
0
def Table(*args, **kw):
    """A schema.Table wrapper/hook for dialect-specific tweaks."""

    test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
                      if k.startswith('test_')])

    kw.update(table_options)

    if testing.against('mysql'):
        if 'mysql_engine' not in kw and 'mysql_type' not in kw:
            if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
                kw['mysql_engine'] = 'InnoDB'

    # Apply some default cascading rules for self-referential foreign keys.
    # MySQL InnoDB has some issues around seleting self-refs too.
    if testing.against('firebird'):
        table_name = args[0]
        unpack = (testing.config.db.dialect.
                  identifier_preparer.unformat_identifiers)

        # Only going after ForeignKeys in Columns.  May need to
        # expand to ForeignKeyConstraint too.
        fks = [fk
               for col in args if isinstance(col, schema.Column)
               for fk in col.args if isinstance(fk, schema.ForeignKey)]

        for fk in fks:
            # root around in raw spec
            ref = fk._colspec
            if isinstance(ref, schema.Column):
                name = ref.table.name
            else:
                # take just the table name: on FB there cannot be
                # a schema, so the first element is always the
                # table name, possibly followed by the field name
                name = unpack(ref)[0]
            if name == table_name:
                if fk.ondelete is None:
                    fk.ondelete = 'CASCADE'
                if fk.onupdate is None:
                    fk.onupdate = 'CASCADE'

    if testing.against('firebird', 'oracle'):
        pk_seqs = [col for col in args
                   if (isinstance(col, schema.Column)
                       and col.primary_key
                       and getattr(col, '_needs_autoincrement', False))]
        for c in pk_seqs:
            c.args.append(schema.Sequence(args[0] + '_' + c.name + '_seq', optional=True))
    return schema.Table(*args, **kw)
Пример #4
0
def Table(*args, **kw):
    """A schema.Table wrapper/hook for dialect-specific tweaks."""

    test_opts = dict([(k, kw.pop(k)) for k in kw.keys() if k.startswith("test_")])

    kw.update(table_options)

    if testing.against("mysql"):
        if "mysql_engine" not in kw and "mysql_type" not in kw:
            if "test_needs_fk" in test_opts or "test_needs_acid" in test_opts:
                kw["mysql_engine"] = "InnoDB"

    # Apply some default cascading rules for self-referential foreign keys.
    # MySQL InnoDB has some issues around seleting self-refs too.
    if testing.against("firebird"):
        table_name = args[0]
        unpack = testing.config.db.dialect.identifier_preparer.unformat_identifiers

        # Only going after ForeignKeys in Columns.  May need to
        # expand to ForeignKeyConstraint too.
        fks = [
            fk for col in args if isinstance(col, schema.Column) for fk in col.args if isinstance(fk, schema.ForeignKey)
        ]

        for fk in fks:
            # root around in raw spec
            ref = fk._colspec
            if isinstance(ref, schema.Column):
                name = ref.table.name
            else:
                # take just the table name: on FB there cannot be
                # a schema, so the first element is always the
                # table name, possibly followed by the field name
                name = unpack(ref)[0]
            if name == table_name:
                if fk.ondelete is None:
                    fk.ondelete = "CASCADE"
                if fk.onupdate is None:
                    fk.onupdate = "CASCADE"

    if testing.against("firebird", "oracle"):
        pk_seqs = [
            col
            for col in args
            if (isinstance(col, schema.Column) and col.primary_key and getattr(col, "_needs_autoincrement", False))
        ]
        for c in pk_seqs:
            c.args.append(schema.Sequence(args[0] + "_" + c.name + "_seq", optional=True))
    return schema.Table(*args, **kw)
Пример #5
0
    def test_basic(self):
        try:
            # the 'convert_unicode' should not get in the way of the reflection
            # process.  reflecttable for oracle, postgres (others?) expect non-unicode
            # strings in result sets/bind params
            bind = engines.utf8_engine(options={'convert_unicode':True})
            metadata = MetaData(bind)

            if testing.against('sybase', 'maxdb', 'oracle', 'mssql'):
                names = set(['plain'])
            else:
                names = set([u'plain', u'Unit\u00e9ble', u'\u6e2c\u8a66'])

            for name in names:
                Table(name, metadata, Column('id', sa.Integer, sa.Sequence(name + "_id_seq"), primary_key=True))
            metadata.create_all()

            reflected = set(bind.table_names())
            if not names.issubset(reflected):
                # Python source files in the utf-8 coding seem to normalize
                # literals as NFC (and the above are explicitly NFC).  Maybe
                # this database normalizes NFD on reflection.
                nfc = set([unicodedata.normalize('NFC', n) for n in names])
                self.assert_(nfc == names)
                # Yep.  But still ensure that bulk reflection and create/drop
                # work with either normalization.

            r = MetaData(bind, reflect=True)
            r.drop_all()
            r.create_all()
        finally:
            metadata.drop_all()
            bind.dispose()
Пример #6
0
    def test_limit(self):
        """test limit operations combined with lazy-load relationships."""

        mapper(Item, items)
        mapper(Order,
               orders,
               properties={
                   'items':
                   relationship(Item, secondary=order_items, lazy='select')
               })
        mapper(User,
               users,
               properties={
                   'addresses':
                   relationship(mapper(Address, addresses), lazy='select'),
                   'orders':
                   relationship(Order, lazy='select')
               })

        sess = create_session()
        q = sess.query(User)

        if testing.against('maxdb', 'mssql'):
            l = q.limit(2).all()
            assert self.static.user_all_result[:2] == l
        else:
            l = q.limit(2).offset(1).all()
            assert self.static.user_all_result[1:3] == l
Пример #7
0
    def test_aggregate_1(self):
        if (testing.against('mysql') and
            testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma')):
            return

        query = create_session().query(func.sum(foo.c.bar))
        assert query.filter(foo.c.bar<30).one() == (435,)
Пример #8
0
    def define_tables(cls, metadata):
        if testing.against('oracle'):
            fk_args = dict(deferrable=True, initially='deferred')
        else:
            fk_args = dict(onupdate='cascade')
            
        users = Table('users', metadata,
            Column('username', String(50), primary_key=True),
            Column('fullname', String(100)),
            test_needs_fk=True)

        addresses = Table('addresses', metadata,
            Column('email', String(50), primary_key=True),
            Column('username', String(50), 
                            ForeignKey('users.username', **fk_args)),
            test_needs_fk=True)

        items = Table('items', metadata,
            Column('itemname', String(50), primary_key=True),
            Column('description', String(100)), 
            test_needs_fk=True)

        users_to_items = Table('users_to_items', metadata,
            Column('username', String(50), 
                                ForeignKey('users.username', **fk_args),
                                primary_key=True),
            Column('itemname', String(50), 
                                ForeignKey('items.itemname', **fk_args),
                                primary_key=True),
            test_needs_fk=True)
Пример #9
0
    def test_explicit_default_schema(self):
        engine = testing.db

        if testing.against('sqlite'):
            # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
            # but fails on:
            #   FOREIGN KEY(col2) REFERENCES main.table1 (col1)
            schema = 'main'
        else:
            schema = engine.dialect.default_schema_name

        assert bool(schema)
        
        metadata = MetaData(engine)
        table1 = Table('table1', metadata,
                       Column('col1', sa.Integer, primary_key=True),
                       test_needs_fk=True,
                       schema=schema)
        table2 = Table('table2', metadata,
                       Column('col1', sa.Integer, primary_key=True),
                       Column('col2', sa.Integer,
                              sa.ForeignKey('%s.table1.col1' % schema)),
                       test_needs_fk=True,
                       schema=schema)
        try:
            metadata.create_all()
            metadata.create_all(checkfirst=True)
            assert len(metadata.tables) == 2
            metadata.clear()

            table1 = Table('table1', metadata, autoload=True, schema=schema)
            table2 = Table('table2', metadata, autoload=True, schema=schema)
            assert len(metadata.tables) == 2
        finally:
            metadata.drop_all()
Пример #10
0
    def define_tables(cls, metadata):
        if testing.against('oracle'):
            fk_args = dict(deferrable=True, initially='deferred')
        else:
            fk_args = dict(onupdate='cascade')

        Table('person',
              metadata,
              Column('name', String(50), primary_key=True),
              Column('type', String(50), nullable=False),
              test_needs_fk=True)

        Table('engineer',
              metadata,
              Column('name',
                     String(50),
                     ForeignKey('person.name', **fk_args),
                     primary_key=True),
              Column('primary_language', String(50)),
              Column('boss_name', String(50),
                     ForeignKey('manager.name', **fk_args)),
              test_needs_fk=True)

        Table('manager',
              metadata,
              Column('name',
                     String(50),
                     ForeignKey('person.name', **fk_args),
                     primary_key=True),
              Column('paperwork', String(50)),
              test_needs_fk=True)
Пример #11
0
    def define_tables(cls, metadata):
        if testing.against('oracle'):
            fk_args = dict(deferrable=True, initially='deferred')
        else:
            fk_args = dict(onupdate='cascade')

        Table('users',
              metadata,
              Column('id',
                     Integer,
                     primary_key=True,
                     test_needs_autoincrement=True),
              Column('username', String(50), unique=True),
              Column('fullname', String(100)),
              test_needs_fk=True)

        Table('addresses',
              metadata,
              Column('id',
                     Integer,
                     primary_key=True,
                     test_needs_autoincrement=True),
              Column('email', String(50)),
              Column('username', String(50),
                     ForeignKey('users.username', **fk_args)),
              test_needs_fk=True)
Пример #12
0
    def test_updatemany(self):
        # MySQL-Python 1.2.2 breaks functions in execute_many :(
        if (testing.against('mysql') and
            testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)):
            return

        t.insert().execute({}, {}, {})

        t.update(t.c.col1==sa.bindparam('pkval')).execute(
            {'pkval':51,'col7':None, 'col8':None, 'boolcol1':False})

        t.update(t.c.col1==sa.bindparam('pkval')).execute(
            {'pkval':51,},
            {'pkval':52,},
            {'pkval':53,})

        l = t.select().execute()
        ctexec = currenttime.scalar()
        today = datetime.date.today()
        eq_(l.fetchall(),
            [(51, 'im the update', f2, ts, ts, ctexec, False, False,
              13, today, 'py'),
             (52, 'im the update', f2, ts, ts, ctexec, True, False,
              13, today, 'py'),
             (53, 'im the update', f2, ts, ts, ctexec, True, False,
              13, today, 'py')])
Пример #13
0
    def define_tables(cls, metadata):
        if testing.against('oracle'):
            fk_args = dict(deferrable=True, initially='deferred')
        else:
            fk_args = dict(onupdate='cascade')

        Table('person', metadata,
            Column('name', String(50), primary_key=True),
            Column('type', String(50), nullable=False),
            test_needs_fk=True)
        
        Table('engineer', metadata,
            Column('name', String(50), ForeignKey('person.name', **fk_args),
                                        primary_key=True),
            Column('primary_language', String(50)),
            Column('boss_name', String(50), 
                                    ForeignKey('manager.name', **fk_args)),
                                    test_needs_fk=True
        )

        Table('manager', metadata,
            Column('name', String(50), 
                                    ForeignKey('person.name', **fk_args),
                                    primary_key=True),
            Column('paperwork', String(50)),
            test_needs_fk=True
        )
Пример #14
0
    def test_aggregate_1(self):
        if (testing.against('mysql')
                and testing.db.dialect.dbapi.version_info[:4]
                == (1, 2, 1, 'gamma')):
            return

        query = create_session().query(func.sum(foo.c.bar))
        assert query.filter(foo.c.bar < 30).one() == (435, )
Пример #15
0
 def define_tables(cls, metadata):
     if testing.against('oracle'):
         fk_args = dict(deferrable=True, initially='deferred')
     else:
         fk_args = dict(onupdate='cascade')
     
     Table('nodes', metadata,
           Column('name', String(50), primary_key=True),
           Column('parent', String(50),
                  ForeignKey('nodes.name', **fk_args)))
Пример #16
0
def Column(*args, **kw):
    """A schema.Column wrapper/hook for dialect-specific tweaks."""

    test_opts = dict([(k, kw.pop(k)) for k in kw.keys() if k.startswith("test_")])

    c = schema.Column(*args, **kw)
    if testing.against("firebird", "oracle"):
        if "test_needs_autoincrement" in test_opts:
            c._needs_autoincrement = True
    return c
Пример #17
0
def Table(*args, **kw):
    """A schema.Table wrapper/hook for dialect-specific tweaks."""

    test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
                      if k.startswith('test_')])

    kw.update(table_options)

    if testing.against('mysql'):
        if 'mysql_engine' not in kw and 'mysql_type' not in kw:
            if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
                kw['mysql_engine'] = 'InnoDB'

    # Apply some default cascading rules for self-referential foreign keys.
    # MySQL InnoDB has some issues around seleting self-refs too.
    if testing.against('firebird'):
        table_name = args[0]
        unpack = (testing.config.db.dialect.
                  identifier_preparer.unformat_identifiers)

        # Only going after ForeignKeys in Columns.  May need to
        # expand to ForeignKeyConstraint too.
        fks = [fk
               for col in args if isinstance(col, schema.Column)
               for fk in col.foreign_keys]

        for fk in fks:
            # root around in raw spec
            ref = fk._colspec
            if isinstance(ref, schema.Column):
                name = ref.table.name
            else:
                # take just the table name: on FB there cannot be
                # a schema, so the first element is always the
                # table name, possibly followed by the field name
                name = unpack(ref)[0]
            if name == table_name:
                if fk.ondelete is None:
                    fk.ondelete = 'CASCADE'
                if fk.onupdate is None:
                    fk.onupdate = 'CASCADE'

    return schema.Table(*args, **kw)
Пример #18
0
def Column(*args, **kw):
    """A schema.Column wrapper/hook for dialect-specific tweaks."""

    test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
                      if k.startswith('test_')])

    c = schema.Column(*args, **kw)
    if testing.against('firebird', 'oracle'):
        if 'test_needs_autoincrement' in test_opts:
            c._needs_autoincrement = True
    return c
Пример #19
0
 def _test_get_table_oid(self, table_name, schema=None):
     if testing.against('postgresql'):
         meta = MetaData(testing.db)
         (users, addresses) = createTables(meta, schema)
         meta.create_all()
         try:
             insp = create_inspector(meta.bind)
             oid = insp.get_table_oid(table_name, schema)
             self.assert_(isinstance(oid, (int, long)))
         finally:
             addresses.drop()
             users.drop()
Пример #20
0
    def define_tables(cls, metadata):
        if testing.against('oracle'):
            fk_args = dict(deferrable=True, initially='deferred')
        else:
            fk_args = dict(onupdate='cascade')

        Table('nodes',
              metadata,
              Column('name', String(50), primary_key=True),
              Column('parent', String(50), ForeignKey('nodes.name',
                                                      **fk_args)),
              test_needs_fk=True)
Пример #21
0
    def test_explicit_default_schema(self):
        engine = testing.db

        if testing.against('mysql'):
            schema = testing.db.url.database
        elif testing.against('postgres'):
            schema = 'public'
        elif testing.against('sqlite'):
            # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
            # but fails on:
            #   FOREIGN KEY(col2) REFERENCES main.table1 (col1)
            schema = 'main'
        else:
            schema = engine.dialect.get_default_schema_name(engine.connect())

        metadata = MetaData(engine)
        table1 = Table('table1',
                       metadata,
                       Column('col1', sa.Integer, primary_key=True),
                       test_needs_fk=True,
                       schema=schema)
        table2 = Table('table2',
                       metadata,
                       Column('col1', sa.Integer, primary_key=True),
                       Column('col2', sa.Integer,
                              sa.ForeignKey('%s.table1.col1' % schema)),
                       test_needs_fk=True,
                       schema=schema)
        try:
            metadata.create_all()
            metadata.create_all(checkfirst=True)
            assert len(metadata.tables) == 2
            metadata.clear()

            table1 = Table('table1', metadata, autoload=True, schema=schema)
            table2 = Table('table2', metadata, autoload=True, schema=schema)
            assert len(metadata.tables) == 2
        finally:
            metadata.drop_all()
Пример #22
0
    def define_tables(cls, metadata):
        # determine a literal value for "false" based on the dialect
        # FIXME: this DefaultClause setup is bogus.

        dialect = testing.db.dialect
        bp = sa.Boolean().dialect_impl(dialect).bind_processor(dialect)

        if bp:
            false = str(bp(False))
        elif testing.against('maxdb'):
            false = text('FALSE')
        else:
            false = str(False)
        cls.other_artifacts['false'] = false

        Table('owners', metadata,
              Column('id', Integer, primary_key=True, nullable=False),
              Column('data', String(30)))

        Table('categories', metadata,
              Column('id', Integer, primary_key=True, nullable=False),
              Column('name', String(20)))

        Table(
            'tests', metadata,
            Column('id', Integer, primary_key=True, nullable=False),
            Column('owner_id',
                   Integer,
                   ForeignKey('owners.id'),
                   nullable=False),
            Column('category_id',
                   Integer,
                   ForeignKey('categories.id'),
                   nullable=False))

        Table(
            'options', metadata,
            Column('test_id',
                   Integer,
                   ForeignKey('tests.id'),
                   primary_key=True,
                   nullable=False),
            Column('owner_id',
                   Integer,
                   ForeignKey('owners.id'),
                   primary_key=True,
                   nullable=False),
            Column('someoption',
                   sa.Boolean,
                   server_default=false,
                   nullable=False))
Пример #23
0
def Column(*args, **kw):
    """A schema.Column wrapper/hook for dialect-specific tweaks."""

    test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
                      if k.startswith('test_')])

    col = schema.Column(*args, **kw)
    if 'test_needs_autoincrement' in test_opts and \
        kw.get('primary_key', False) and \
        testing.against('firebird', 'oracle'):
        def add_seq(tbl, c):
            c._init_items(
                schema.Sequence(_truncate_name(testing.db.dialect, tbl.name + '_' + c.name + '_seq'), optional=True)
            )
        col._on_table_attach(add_seq)
    return col
Пример #24
0
    def test_insertmany(self):
        # MySQL-Python 1.2.2 breaks functions in execute_many :(
        if (testing.against('mysql+mysqldb')
                and testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)):
            return

        r = t.insert().execute({}, {}, {})

        ctexec = currenttime.scalar()
        l = t.select().execute()
        today = datetime.date.today()
        eq_(l.fetchall(), [(51, 'imthedefault', f, ts, ts, ctexec, True, False,
                            12, today, 'py'),
                           (52, 'imthedefault', f, ts, ts, ctexec, True, False,
                            12, today, 'py'),
                           (53, 'imthedefault', f, ts, ts, ctexec, True, False,
                            12, today, 'py')])
Пример #25
0
 def _test_get_columns(self, schema=None, table_type='table'):
     meta = MetaData(testing.db)
     (users, addresses) = createTables(meta, schema)
     table_names = ['users', 'email_addresses']
     meta.create_all()
     if table_type == 'view':
         createViews(meta.bind, schema)
         table_names = ['users_v', 'email_addresses_v']
     try:
         insp = Inspector(meta.bind)
         for (table_name, table) in zip(table_names, (users, addresses)):
             schema_name = schema
             cols = insp.get_columns(table_name, schema=schema_name)
             self.assert_(len(cols) > 0, len(cols))
             # should be in order
             for (i, col) in enumerate(table.columns):
                 eq_(col.name, cols[i]['name'])
                 ctype = cols[i]['type'].__class__
                 ctype_def = col.type
                 if isinstance(ctype_def, sa.types.TypeEngine):
                     ctype_def = ctype_def.__class__
                     
                 # Oracle returns Date for DateTime.
                 if testing.against('oracle') \
                     and ctype_def in (sql_types.Date, sql_types.DateTime):
                         ctype_def = sql_types.Date
                 
                 # assert that the desired type and return type
                 # share a base within one of the generic types.
                 self.assert_(
                     len(
                         set(
                             ctype.__mro__
                         ).intersection(ctype_def.__mro__)
                         .intersection([sql_types.Integer, sql_types.Numeric, 
                                         sql_types.DateTime, sql_types.Date, sql_types.Time, 
                                         sql_types.String, sql_types._Binary])
                         ) > 0
                 ,("%s(%s), %s(%s)" % (col.name, col.type, cols[i]['name'],
                                       ctype)))
     finally:
         if table_type == 'view':
             dropViews(meta.bind, schema)
         addresses.drop()
         users.drop()
Пример #26
0
    def test_insertmany(self):
        # MySQL-Python 1.2.2 breaks functions in execute_many :(
        if (testing.against('mysql') and
            testing.db.dialect.dbapi.version_info[:3] == (1, 2, 2)):
            return

        r = t.insert().execute({}, {}, {})

        ctexec = currenttime.scalar()
        l = t.select().execute()
        today = datetime.date.today()
        eq_(l.fetchall(),
            [(51, 'imthedefault', f, ts, ts, ctexec, True, False,
              12, today, 'py'),
             (52, 'imthedefault', f, ts, ts, ctexec, True, False,
              12, today, 'py'),
             (53, 'imthedefault', f, ts, ts, ctexec, True, False,
              12, today, 'py')])
Пример #27
0
    def define_tables(cls, metadata):
        if testing.against('oracle'):
            fk_args = dict(deferrable=True, initially='deferred')
        else:
            fk_args = dict(onupdate='cascade')

        Table('users', metadata,
            Column('username', String(50), primary_key=True),
            test_needs_fk=True)

        Table('addresses', metadata,
                Column('username', String(50), 
                       ForeignKey('users.username', **fk_args),
                       primary_key=True
                       ),
              Column('email', String(50), primary_key=True),
              Column('etc', String(50)),
                 test_needs_fk=True
                 )
Пример #28
0
def Column(*args, **kw):
    """A schema.Column wrapper/hook for dialect-specific tweaks."""

    test_opts = dict([(k, kw.pop(k)) for k in kw.keys() if k.startswith("test_")])

    col = schema.Column(*args, **kw)
    if (
        "test_needs_autoincrement" in test_opts
        and kw.get("primary_key", False)
        and testing.against("firebird", "oracle")
    ):

        def add_seq(tbl, c):
            c._init_items(
                schema.Sequence(_truncate_name(testing.db.dialect, tbl.name + "_" + c.name + "_seq"), optional=True)
            )

        col._on_table_attach(add_seq)
    return col
Пример #29
0
def Column(*args, **kw):
    """A schema.Column wrapper/hook for dialect-specific tweaks."""

    test_opts = dict([(k, kw.pop(k)) for k in kw.keys()
                      if k.startswith('test_')])

    col = schema.Column(*args, **kw)
    if 'test_needs_autoincrement' in test_opts and \
        kw.get('primary_key', False) and \
        testing.against('firebird', 'oracle'):

        def add_seq(tbl, c):
            c._init_items(
                schema.Sequence(_truncate_name(
                    testing.db.dialect, tbl.name + '_' + c.name + '_seq'),
                                optional=True))

        col._on_table_attach(add_seq)
    return col
Пример #30
0
    def test_reserved(self):
        # check a table that uses an SQL reserved name doesn't cause an error
        meta = MetaData(testing.db)
        table_a = Table('select', meta,
                        Column('not', sa.Integer, primary_key=True),
                        Column('from', sa.String(12), nullable=False),
                        sa.UniqueConstraint('from', name='when'))
        sa.Index('where', table_a.c['from'])

        # There's currently no way to calculate identifier case normalization
        # in isolation, so...
        if testing.against('firebird', 'oracle', 'maxdb'):
            check_col = 'TRUE'
        else:
            check_col = 'true'
        quoter = meta.bind.dialect.identifier_preparer.quote_identifier

        table_b = Table(
            'false', meta, Column('create', sa.Integer, primary_key=True),
            Column('true', sa.Integer, sa.ForeignKey('select.not')),
            sa.CheckConstraint('%s <> 1' % quoter(check_col), name='limit'))

        table_c = Table(
            'is', meta,
            Column('or', sa.Integer, nullable=False, primary_key=True),
            Column('join', sa.Integer, nullable=False, primary_key=True),
            sa.PrimaryKeyConstraint('or', 'join', name='to'))

        index_c = sa.Index('else', table_c.c.join)

        meta.create_all()

        index_c.drop()

        meta2 = MetaData(testing.db)
        try:
            table_a2 = Table('select', meta2, autoload=True)
            table_b2 = Table('false', meta2, autoload=True)
            table_c2 = Table('is', meta2, autoload=True)
        finally:
            meta.drop_all()
Пример #31
0
    def test_reserved(self):
        # check a table that uses an SQL reserved name doesn't cause an error
        meta = MetaData(testing.db)
        table_a = Table('select', meta,
                       Column('not', sa.Integer, primary_key=True),
                       Column('from', sa.String(12), nullable=False),
                       sa.UniqueConstraint('from', name='when'))
        sa.Index('where', table_a.c['from'])

        # There's currently no way to calculate identifier case normalization
        # in isolation, so...
        if testing.against('firebird', 'oracle', 'maxdb'):
            check_col = 'TRUE'
        else:
            check_col = 'true'
        quoter = meta.bind.dialect.identifier_preparer.quote_identifier

        table_b = Table('false', meta,
                        Column('create', sa.Integer, primary_key=True),
                        Column('true', sa.Integer, sa.ForeignKey('select.not')),
                        sa.CheckConstraint('%s <> 1' % quoter(check_col),
                                        name='limit'))

        table_c = Table('is', meta,
                        Column('or', sa.Integer, nullable=False, primary_key=True),
                        Column('join', sa.Integer, nullable=False, primary_key=True),
                        sa.PrimaryKeyConstraint('or', 'join', name='to'))

        index_c = sa.Index('else', table_c.c.join)

        meta.create_all()

        index_c.drop()

        meta2 = MetaData(testing.db)
        try:
            table_a2 = Table('select', meta2, autoload=True)
            table_b2 = Table('false', meta2, autoload=True)
            table_c2 = Table('is', meta2, autoload=True)
        finally:
            meta.drop_all()
Пример #32
0
    def test_limit(self):
        """test limit operations combined with lazy-load relationships."""

        mapper(Item, items)
        mapper(Order, orders, properties={
            'items':relationship(Item, secondary=order_items, lazy='select')
        })
        mapper(User, users, properties={
            'addresses':relationship(mapper(Address, addresses), lazy='select'),
            'orders':relationship(Order, lazy='select')
        })

        sess = create_session()
        q = sess.query(User)

        if testing.against('maxdb', 'mssql'):
            l = q.limit(2).all()
            assert self.static.user_all_result[:2] == l
        else:
            l = q.limit(2).offset(1).all()
            assert self.static.user_all_result[1:3] == l
Пример #33
0
    def test_basic(self):
        try:
            # the 'convert_unicode' should not get in the way of the reflection
            # process.  reflecttable for oracle, postgres (others?) expect non-unicode
            # strings in result sets/bind params
            bind = engines.utf8_engine(options={'convert_unicode': True})
            metadata = MetaData(bind)

            if testing.against('sybase', 'maxdb', 'oracle', 'mssql'):
                names = set(['plain'])
            else:
                names = set([u'plain', u'Unit\u00e9ble', u'\u6e2c\u8a66'])

            for name in names:
                Table(
                    name, metadata,
                    Column('id',
                           sa.Integer,
                           sa.Sequence(name + "_id_seq"),
                           primary_key=True))
            metadata.create_all()

            reflected = set(bind.table_names())
            if not names.issubset(reflected):
                # Python source files in the utf-8 coding seem to normalize
                # literals as NFC (and the above are explicitly NFC).  Maybe
                # this database normalizes NFD on reflection.
                nfc = set([unicodedata.normalize('NFC', n) for n in names])
                self.assert_(nfc == names)
                # Yep.  But still ensure that bulk reflection and create/drop
                # work with either normalization.

            r = MetaData(bind, reflect=True)
            r.drop_all()
            r.create_all()
        finally:
            metadata.drop_all()
            bind.dispose()
Пример #34
0
    def test_limit(self):
        """test limit operations combined with lazy-load relationships."""

        mapper(Item, items)
        mapper(Order, orders, properties={"items": relation(Item, secondary=order_items, lazy=True)})
        mapper(
            User,
            users,
            properties={
                "addresses": relation(mapper(Address, addresses), lazy=True),
                "orders": relation(Order, lazy=True),
            },
        )

        sess = create_session()
        q = sess.query(User)

        if testing.against("maxdb", "mssql"):
            l = q.limit(2).all()
            assert self.static.user_all_result[:2] == l
        else:
            l = q.limit(2).offset(1).all()
            assert self.static.user_all_result[1:3] == l
Пример #35
0
    def define_tables(cls, metadata):
        # determine a literal value for "false" based on the dialect
        # FIXME: this DefaultClause setup is bogus.

        dialect = testing.db.dialect
        bp = sa.Boolean().dialect_impl(dialect).bind_processor(dialect)

        if bp:
            false = str(bp(False))
        elif testing.against('maxdb'):
            false = text('FALSE')
        else:
            false = str(False)
        cls.other_artifacts['false'] = false

        Table('owners', metadata ,
              Column('id', Integer, primary_key=True, nullable=False),
              Column('data', String(30)))

        Table('categories', metadata,
              Column('id', Integer, primary_key=True, nullable=False),
              Column('name', String(20)))

        Table('tests', metadata ,
              Column('id', Integer, primary_key=True, nullable=False ),
              Column('owner_id', Integer, ForeignKey('owners.id'),
                     nullable=False),
              Column('category_id', Integer, ForeignKey('categories.id'),
                     nullable=False))

        Table('options', metadata ,
              Column('test_id', Integer, ForeignKey('tests.id'),
                     primary_key=True, nullable=False),
              Column('owner_id', Integer, ForeignKey('owners.id'),
                     primary_key=True, nullable=False),
              Column('someoption', sa.Boolean, server_default=false,
                     nullable=False))
Пример #36
0
class ExecuteTest(TestBase):
    @classmethod
    def setup_class(cls):
        global users, users_autoinc, metadata
        metadata = MetaData(testing.db)
        users = Table(
            'users',
            metadata,
            Column('user_id', INT, primary_key=True, autoincrement=False),
            Column('user_name', VARCHAR(20)),
        )
        users_autoinc = Table(
            'users_autoinc',
            metadata,
            Column('user_id',
                   INT,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column('user_name', VARCHAR(20)),
        )
        metadata.create_all()

    @engines.close_first
    def teardown(self):
        testing.db.connect().execute(users.delete())

    @classmethod
    def teardown_class(cls):
        metadata.drop_all()

    @testing.fails_on_everything_except('firebird', 'maxdb', 'sqlite',
                                        '+pyodbc', '+mxodbc', '+zxjdbc',
                                        'mysql+oursql', 'informix+informixdb')
    def test_raw_qmark(self):
        for conn in testing.db, testing.db.connect():
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (?, ?)', (1, 'jack'))
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (?, ?)', [2, 'fred'])
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (?, ?)', [3, 'ed'], [4, 'horse'])
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (?, ?)', (5, 'barney'), (6, 'donkey'))
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (?, ?)', 7, 'sally')
            res = conn.execute('select * from users order by user_id')
            assert res.fetchall() == [
                (1, 'jack'),
                (2, 'fred'),
                (3, 'ed'),
                (4, 'horse'),
                (5, 'barney'),
                (6, 'donkey'),
                (7, 'sally'),
            ]
            conn.execute('delete from users')

    # some psycopg2 versions bomb this.
    @testing.fails_on_everything_except('mysql+mysqldb',
                                        'mysql+mysqlconnector', 'postgresql')
    @testing.fails_on('postgresql+zxjdbc', 'sprintf not supported')
    def test_raw_sprintf(self):
        for conn in testing.db, testing.db.connect():
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (%s, %s)', [1, 'jack'])
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (%s, %s)', [2, 'ed'], [3, 'horse'])
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (%s, %s)', 4, 'sally')
            conn.execute('insert into users (user_id) values (%s)', 5)
            res = conn.execute('select * from users order by user_id')
            assert res.fetchall() == [(1, 'jack'), (2, 'ed'), (3, 'horse'),
                                      (4, 'sally'), (5, None)]
            conn.execute('delete from users')

    # pyformat is supported for mysql, but skipping because a few driver
    # versions have a bug that bombs out on this test. (1.2.2b3,
    # 1.2.2c1, 1.2.2)

    @testing.skip_if(lambda: testing.against('mysql+mysqldb'), 'db-api flaky')
    @testing.fails_on_everything_except('postgresql+psycopg2',
                                        'postgresql+pypostgresql',
                                        'mysql+mysqlconnector')
    def test_raw_python(self):
        for conn in testing.db, testing.db.connect():
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (%(id)s, %(name)s)', {
                    'id': 1,
                    'name': 'jack'
                })
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (%(id)s, %(name)s)', {
                    'id': 2,
                    'name': 'ed'
                }, {
                    'id': 3,
                    'name': 'horse'
                })
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (%(id)s, %(name)s)',
                id=4,
                name='sally')
            res = conn.execute('select * from users order by user_id')
            assert res.fetchall() == [(1, 'jack'), (2, 'ed'), (3, 'horse'),
                                      (4, 'sally')]
            conn.execute('delete from users')

    @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle',
                                        'informix+informixdb')
    def test_raw_named(self):
        for conn in testing.db, testing.db.connect():
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (:id, :name)', {
                    'id': 1,
                    'name': 'jack'
                })
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (:id, :name)', {
                    'id': 2,
                    'name': 'ed'
                }, {
                    'id': 3,
                    'name': 'horse'
                })
            conn.execute(
                'insert into users (user_id, user_name) '
                'values (:id, :name)',
                id=4,
                name='sally')
            res = conn.execute('select * from users order by user_id')
            assert res.fetchall() == [(1, 'jack'), (2, 'ed'), (3, 'horse'),
                                      (4, 'sally')]
            conn.execute('delete from users')

    def test_exception_wrapping(self):
        for conn in testing.db, testing.db.connect():
            try:
                conn.execute('osdjafioajwoejoasfjdoifjowejfoawejqoijwef')
                assert False
            except tsa.exc.DBAPIError:
                assert True

    def test_empty_insert(self):
        """test that execute() interprets [] as a list with no params"""

        result = \
            testing.db.execute(users_autoinc.insert().
                        values(user_name=bindparam('name')), [])
        eq_(testing.db.execute(users_autoinc.select()).fetchall(), [(1, None)])

    def test_engine_level_options(self):
        eng = engines.testing_engine(
            options={'execution_options': {
                'foo': 'bar'
            }})
        conn = eng.contextual_connect()
        eq_(conn._execution_options['foo'], 'bar')
        eq_(
            conn.execution_options(bat='hoho')._execution_options['foo'],
            'bar')
        eq_(
            conn.execution_options(bat='hoho')._execution_options['bat'],
            'hoho')
        eq_(
            conn.execution_options(foo='hoho')._execution_options['foo'],
            'hoho')
        eng.update_execution_options(foo='hoho')
        conn = eng.contextual_connect()
        eq_(conn._execution_options['foo'], 'hoho')
Пример #37
0
class NaturalPKTest(_base.MappedTest):
    # MySQL 5.5 on Windows crashes (the entire server, not the client)
    # if you screw around with ON UPDATE CASCADE type of stuff.
    __requires__ = 'skip_mysql_on_windows',

    @classmethod
    def define_tables(cls, metadata):
        if testing.against('oracle'):
            fk_args = dict(deferrable=True, initially='deferred')
        else:
            fk_args = dict(onupdate='cascade')

        users = Table('users',
                      metadata,
                      Column('username', String(50), primary_key=True),
                      Column('fullname', String(100)),
                      test_needs_fk=True)

        addresses = Table('addresses',
                          metadata,
                          Column('email', String(50), primary_key=True),
                          Column('username', String(50),
                                 ForeignKey('users.username', **fk_args)),
                          test_needs_fk=True)

        items = Table('items',
                      metadata,
                      Column('itemname', String(50), primary_key=True),
                      Column('description', String(100)),
                      test_needs_fk=True)

        users_to_items = Table('users_to_items',
                               metadata,
                               Column('username',
                                      String(50),
                                      ForeignKey('users.username', **fk_args),
                                      primary_key=True),
                               Column('itemname',
                                      String(50),
                                      ForeignKey('items.itemname', **fk_args),
                                      primary_key=True),
                               test_needs_fk=True)

    @classmethod
    def setup_classes(cls):
        class User(_base.ComparableEntity):
            pass

        class Address(_base.ComparableEntity):
            pass

        class Item(_base.ComparableEntity):
            pass

    @testing.resolve_artifact_names
    def test_entity(self):
        mapper(User, users)

        sess = create_session()
        u1 = User(username='******', fullname='jack')

        sess.add(u1)
        sess.flush()
        assert sess.query(User).get('jack') is u1

        u1.username = '******'
        sess.flush()

        def go():
            assert sess.query(User).get('ed') is u1

        self.assert_sql_count(testing.db, go, 0)

        assert sess.query(User).get('jack') is None

        sess.expunge_all()
        u1 = sess.query(User).get('ed')
        eq_(User(username='******', fullname='jack'), u1)

    @testing.resolve_artifact_names
    def test_load_after_expire(self):
        mapper(User, users)

        sess = create_session()
        u1 = User(username='******', fullname='jack')

        sess.add(u1)
        sess.flush()
        assert sess.query(User).get('jack') is u1

        users.update(values={User.username: '******'}).execute(username='******')

        # expire/refresh works off of primary key.  the PK is gone
        # in this case so theres no way to look it up.  criterion-
        # based session invalidation could solve this [ticket:911]
        sess.expire(u1)
        assert_raises(sa.orm.exc.ObjectDeletedError, getattr, u1, 'username')

        sess.expunge_all()
        assert sess.query(User).get('jack') is None
        assert sess.query(User).get('ed').fullname == 'jack'

    @testing.resolve_artifact_names
    def test_flush_new_pk_after_expire(self):
        mapper(User, users)
        sess = create_session()
        u1 = User(username='******', fullname='jack')

        sess.add(u1)
        sess.flush()
        assert sess.query(User).get('jack') is u1

        sess.expire(u1)
        u1.username = '******'
        sess.flush()
        sess.expunge_all()
        assert sess.query(User).get('ed').fullname == 'jack'

    @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE')
    @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE')
    def test_onetomany_passive(self):
        self._test_onetomany(True)

    def test_onetomany_nonpassive(self):
        self._test_onetomany(False)

    @testing.resolve_artifact_names
    def _test_onetomany(self, passive_updates):
        mapper(User,
               users,
               properties={
                   'addresses':
                   relationship(Address, passive_updates=passive_updates)
               })
        mapper(Address, addresses)

        sess = create_session()
        u1 = User(username='******', fullname='jack')
        u1.addresses.append(Address(email='jack1'))
        u1.addresses.append(Address(email='jack2'))
        sess.add(u1)
        sess.flush()

        assert sess.query(Address).get('jack1') is u1.addresses[0]

        u1.username = '******'
        sess.flush()
        assert u1.addresses[0].username == 'ed'

        sess.expunge_all()
        eq_([Address(username='******'),
             Address(username='******')],
            sess.query(Address).all())

        u1 = sess.query(User).get('ed')
        u1.username = '******'

        def go():
            sess.flush()

        if not passive_updates:
            # test passive_updates=False;
            #load addresses, update user, update 2 addresses
            self.assert_sql_count(testing.db, go, 4)
        else:
            # test passive_updates=True; update user
            self.assert_sql_count(testing.db, go, 1)
        sess.expunge_all()
        assert User(username='******', addresses=[
                                        Address(username='******'),
                                        Address(username='******')]) == \
                            sess.query(User).get('jack')

        u1 = sess.query(User).get('jack')
        u1.addresses = []
        u1.username = '******'
        sess.flush()
        sess.expunge_all()
        assert sess.query(Address).get('jack1').username is None
        u1 = sess.query(User).get('fred')
        eq_(User(username='******', fullname='jack'), u1)

    @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE')
    @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE')
    def test_manytoone_passive(self):
        self._test_manytoone(True)

    def test_manytoone_nonpassive(self):
        self._test_manytoone(False)

    @testing.resolve_artifact_names
    def _test_manytoone(self, passive_updates):
        mapper(User, users)
        mapper(Address,
               addresses,
               properties={
                   'user': relationship(User, passive_updates=passive_updates)
               })

        sess = create_session()
        a1 = Address(email='jack1')
        a2 = Address(email='jack2')

        u1 = User(username='******', fullname='jack')
        a1.user = u1
        a2.user = u1
        sess.add(a1)
        sess.add(a2)
        sess.flush()

        u1.username = '******'

        def go():
            sess.flush()

        if passive_updates:
            self.assert_sql_count(testing.db, go, 1)
        else:
            self.assert_sql_count(testing.db, go, 3)

        def go():
            sess.flush()

        self.assert_sql_count(testing.db, go, 0)

        assert a1.username == a2.username == 'ed'
        sess.expunge_all()
        eq_([Address(username='******'),
             Address(username='******')],
            sess.query(Address).all())

    @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE')
    @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE')
    def test_onetoone_passive(self):
        self._test_onetoone(True)

    def test_onetoone_nonpassive(self):
        self._test_onetoone(False)

    @testing.resolve_artifact_names
    def _test_onetoone(self, passive_updates):
        mapper(User,
               users,
               properties={
                   "address":
                   relationship(Address,
                                passive_updates=passive_updates,
                                uselist=False)
               })
        mapper(Address, addresses)

        sess = create_session()
        u1 = User(username='******', fullname='jack')
        sess.add(u1)
        sess.flush()

        a1 = Address(email='jack1')
        u1.address = a1
        sess.add(a1)
        sess.flush()

        u1.username = '******'

        def go():
            sess.flush()

        if passive_updates:
            sess.expire(u1, ['address'])
            self.assert_sql_count(testing.db, go, 1)
        else:
            self.assert_sql_count(testing.db, go, 2)

        def go():
            sess.flush()

        self.assert_sql_count(testing.db, go, 0)

        sess.expunge_all()
        eq_([Address(username='******')], sess.query(Address).all())

    @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE')
    @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE')
    def test_bidirectional_passive(self):
        self._test_bidirectional(True)

    def test_bidirectional_nonpassive(self):
        self._test_bidirectional(False)

    @testing.resolve_artifact_names
    def _test_bidirectional(self, passive_updates):
        mapper(User, users)
        mapper(Address,
               addresses,
               properties={
                   'user':
                   relationship(User,
                                passive_updates=passive_updates,
                                backref='addresses')
               })

        sess = create_session()
        a1 = Address(email='jack1')
        a2 = Address(email='jack2')

        u1 = User(username='******', fullname='jack')
        a1.user = u1
        a2.user = u1
        sess.add(a1)
        sess.add(a2)
        sess.flush()

        u1.username = '******'
        (ad1, ad2) = sess.query(Address).all()
        eq_([Address(username='******'), Address(username='******')], [ad1, ad2])

        def go():
            sess.flush()

        if passive_updates:
            self.assert_sql_count(testing.db, go, 1)
        else:
            self.assert_sql_count(testing.db, go, 3)
        eq_([Address(username='******'), Address(username='******')], [ad1, ad2])
        sess.expunge_all()
        eq_([Address(username='******'),
             Address(username='******')],
            sess.query(Address).all())

        u1 = sess.query(User).get('ed')
        assert len(u1.addresses) == 2  # load addresses
        u1.username = '******'

        def go():
            sess.flush()

        # check that the passive_updates is on on the other side
        if passive_updates:
            self.assert_sql_count(testing.db, go, 1)
        else:
            self.assert_sql_count(testing.db, go, 3)
        sess.expunge_all()
        eq_([Address(username='******'),
             Address(username='******')],
            sess.query(Address).all())

    @testing.fails_on('sqlite', 'sqlite doesnt support ON UPDATE CASCADE')
    @testing.fails_on('oracle', 'oracle doesnt support ON UPDATE CASCADE')
    def test_manytomany_passive(self):
        self._test_manytomany(True)

    # mysqldb executemany() of the association table fails to
    # report the correct row count
    @testing.fails_if(
        lambda: testing.against('mysql') and not testing.against('+zxjdbc'))
    def test_manytomany_nonpassive(self):
        self._test_manytomany(False)

    @testing.resolve_artifact_names
    def _test_manytomany(self, passive_updates):
        mapper(User,
               users,
               properties={
                   'items':
                   relationship(Item,
                                secondary=users_to_items,
                                backref='users',
                                passive_updates=passive_updates)
               })
        mapper(Item, items)

        sess = create_session()
        u1 = User(username='******')
        u2 = User(username='******')
        i1 = Item(itemname='item1')
        i2 = Item(itemname='item2')

        u1.items.append(i1)
        u1.items.append(i2)
        i2.users.append(u2)
        sess.add(u1)
        sess.add(u2)
        sess.flush()

        r = sess.query(Item).all()
        # ComparableEntity can't handle a comparison with the backrefs
        # involved....
        eq_(Item(itemname='item1'), r[0])
        eq_(['jack'], [u.username for u in r[0].users])
        eq_(Item(itemname='item2'), r[1])
        eq_(['jack', 'fred'], [u.username for u in r[1].users])

        u2.username = '******'

        def go():
            sess.flush()

        go()

        def go():
            sess.flush()

        self.assert_sql_count(testing.db, go, 0)

        sess.expunge_all()
        r = sess.query(Item).all()
        eq_(Item(itemname='item1'), r[0])
        eq_(['jack'], [u.username for u in r[0].users])
        eq_(Item(itemname='item2'), r[1])
        eq_(['ed', 'jack'], sorted([u.username for u in r[1].users]))

        sess.expunge_all()
        u2 = sess.query(User).get(u2.username)
        u2.username = '******'
        sess.flush()
        r = sess.query(Item).with_parent(u2).all()
        eq_(Item(itemname='item2'), r[0])
Пример #38
0
    def setup_class(cls):
        global t, f, f2, ts, currenttime, metadata, default_generator

        db = testing.db
        metadata = MetaData(db)
        default_generator = {'x':50}

        def mydefault():
            default_generator['x'] += 1
            return default_generator['x']

        def myupdate_with_ctx(ctx):
            conn = ctx.connection
            return conn.execute(sa.select([sa.text('13')])).scalar()

        def mydefault_using_connection(ctx):
            conn = ctx.connection
            try:
                return conn.execute(sa.select([sa.text('12')])).scalar()
            finally:
                # ensure a "close()" on this connection does nothing,
                # since its a "branched" connection
                conn.close()

        use_function_defaults = testing.against('postgres', 'mssql', 'maxdb')
        is_oracle = testing.against('oracle')

        # select "count(1)" returns different results on different DBs also
        # correct for "current_date" compatible as column default, value
        # differences
        currenttime = func.current_date(type_=sa.Date, bind=db)
        if is_oracle:
            ts = db.scalar(sa.select([func.trunc(func.sysdate(), sa.literal_column("'DAY'"), type_=sa.Date).label('today')]))
            assert isinstance(ts, datetime.date) and not isinstance(ts, datetime.datetime)
            f = sa.select([func.length('abcdef')], bind=db).scalar()
            f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar()
            # TODO: engine propigation across nested functions not working
            currenttime = func.trunc(currenttime, sa.literal_column("'DAY'"), bind=db, type_=sa.Date)
            def1 = currenttime
            def2 = func.trunc(sa.text("sysdate"), sa.literal_column("'DAY'"), type_=sa.Date)

            deftype = sa.Date
        elif use_function_defaults:
            f = sa.select([func.length('abcdef')], bind=db).scalar()
            f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar()
            def1 = currenttime
            deftype = sa.Date
            if testing.against('maxdb'):
                def2 = sa.text("curdate")
            elif testing.against('mssql'):
                def2 = sa.text("getdate()")
            else:
                def2 = sa.text("current_date")
            ts = db.func.current_date().scalar()
        else:
            f = len('abcdef')
            f2 = len('abcdefghijk')
            def1 = def2 = "3"
            ts = 3
            deftype = Integer

        t = Table('default_test1', metadata,
            # python function
            Column('col1', Integer, primary_key=True,
                   default=mydefault),

            # python literal
            Column('col2', String(20),
                   default="imthedefault",
                   onupdate="im the update"),

            # preexecute expression
            Column('col3', Integer,
                   default=func.length('abcdef'),
                   onupdate=func.length('abcdefghijk')),

            # SQL-side default from sql expression
            Column('col4', deftype,
                   server_default=def1),

            # SQL-side default from literal expression
            Column('col5', deftype,
                   server_default=def2),

            # preexecute + update timestamp
            Column('col6', sa.Date,
                   default=currenttime,
                   onupdate=currenttime),

            Column('boolcol1', sa.Boolean, default=True),
            Column('boolcol2', sa.Boolean, default=False),

            # python function which uses ExecutionContext
            Column('col7', Integer,
                   default=mydefault_using_connection,
                   onupdate=myupdate_with_ctx),

            # python builtin
            Column('col8', sa.Date,
                   default=datetime.date.today,
                   onupdate=datetime.date.today),
            # combo
            Column('col9', String(20),
                   default='py',
                   server_default='ddl'))
        t.create()
Пример #39
0
    def test_proxy(self):
        
        stmts = []
        cursor_stmts = []
        
        class MyProxy(ConnectionProxy):
            def execute(self, conn, execute, clauseelement, *multiparams, **params):
                stmts.append(
                    (str(clauseelement), params,multiparams)
                )
                return execute(clauseelement, *multiparams, **params)

            def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
                cursor_stmts.append(
                    (str(statement), parameters, None)
                )
                return execute(cursor, statement, parameters, context)
        
        def assert_stmts(expected, received):
            for stmt, params, posn in expected:
                if not received:
                    assert False
                while received:
                    teststmt, testparams, testmultiparams = received.pop(0)
                    teststmt = re.compile(r'[\n\t ]+', re.M).sub(' ', teststmt).strip()
                    if teststmt.startswith(stmt) and (testparams==params or testparams==posn):
                        break

        for engine in (
            engines.testing_engine(options=dict(implicit_returning=False, proxy=MyProxy())),
            engines.testing_engine(options=dict(
                                                    implicit_returning=False, 
                                                    proxy=MyProxy(), 
                                                    strategy='threadlocal'))
        ):
            m = MetaData(engine)

            t1 = Table('t1', m, 
                    Column('c1', Integer, primary_key=True), 
                    Column('c2', String(50), default=func.lower('Foo'), primary_key=True)
            )

            m.create_all()
            try:
                t1.insert().execute(c1=5, c2='some data')
                t1.insert().execute(c1=6)
                eq_(engine.execute("select * from t1").fetchall(),
                    [(5, 'some data'), (6, 'foo')]
                )
            finally:
                m.drop_all()
            
            engine.dispose()
            
            compiled = [
                ("CREATE TABLE t1", {}, None),
                ("INSERT INTO t1 (c1, c2)", {'c2': 'some data', 'c1': 5}, None),
                ("INSERT INTO t1 (c1, c2)", {'c1': 6}, None),
                ("select * from t1", {}, None),
                ("DROP TABLE t1", {}, None)
            ]

            if not testing.against('oracle+zxjdbc'): # or engine.dialect.preexecute_pk_sequences:
                cursor = [
                    ("CREATE TABLE t1", {}, ()),
                    ("INSERT INTO t1 (c1, c2)", {'c2': 'some data', 'c1': 5}, (5, 'some data')),
                    ("SELECT lower", {'lower_2':'Foo'}, ('Foo',)),
                    ("INSERT INTO t1 (c1, c2)", {'c2': 'foo', 'c1': 6}, (6, 'foo')),
                    ("select * from t1", {}, ()),
                    ("DROP TABLE t1", {}, ())
                ]
            else:
                insert2_params = (6, 'Foo')
                if testing.against('oracle+zxjdbc'):
                    from sqlalchemy.dialects.oracle.zxjdbc import ReturningParam
                    insert2_params.append(ReturningParam(12))
                cursor = [
                    ("CREATE TABLE t1", {}, ()),
                    ("INSERT INTO t1 (c1, c2)", {'c2': 'some data', 'c1': 5}, (5, 'some data')),
                    # bind param name 'lower_2' might be incorrect
                    ("INSERT INTO t1 (c1, c2)", {'c1': 6, "lower_2":"Foo"}, insert2_params),  
                    ("select * from t1", {}, ()),
                    ("DROP TABLE t1", {}, ())
                ]
                
            assert_stmts(compiled, stmts)
            assert_stmts(cursor, cursor_stmts)
Пример #40
0
    def test_proxy(self):

        stmts = []
        cursor_stmts = []

        class MyProxy(ConnectionProxy):
            def execute(self, conn, execute, clauseelement, *multiparams,
                        **params):
                stmts.append((str(clauseelement), params, multiparams))
                return execute(clauseelement, *multiparams, **params)

            def cursor_execute(
                self,
                execute,
                cursor,
                statement,
                parameters,
                context,
                executemany,
            ):
                cursor_stmts.append((str(statement), parameters, None))
                return execute(cursor, statement, parameters, context)

        def assert_stmts(expected, received):
            for stmt, params, posn in expected:
                if not received:
                    assert False
                while received:
                    teststmt, testparams, testmultiparams = \
                        received.pop(0)
                    teststmt = re.compile(r'[\n\t ]+',
                                          re.M).sub(' ', teststmt).strip()
                    if teststmt.startswith(stmt) and (testparams == params
                                                      or testparams == posn):
                        break

        for engine in \
            engines.testing_engine(options=dict(implicit_returning=False,
                                   proxy=MyProxy())), \
            engines.testing_engine(options=dict(implicit_returning=False,
                                   proxy=MyProxy(),
                                   strategy='threadlocal')):
            m = MetaData(engine)
            t1 = Table(
                't1', m, Column('c1', Integer, primary_key=True),
                Column('c2',
                       String(50),
                       default=func.lower('Foo'),
                       primary_key=True))
            m.create_all()
            try:
                t1.insert().execute(c1=5, c2='some data')
                t1.insert().execute(c1=6)
                eq_(
                    engine.execute('select * from t1').fetchall(),
                    [(5, 'some data'), (6, 'foo')])
            finally:
                m.drop_all()
            engine.dispose()
            compiled = [('CREATE TABLE t1', {}, None),
                        ('INSERT INTO t1 (c1, c2)', {
                            'c2': 'some data',
                            'c1': 5
                        }, None), ('INSERT INTO t1 (c1, c2)', {
                            'c1': 6
                        }, None), ('select * from t1', {}, None),
                        ('DROP TABLE t1', {}, None)]
            if not testing.against('oracle+zxjdbc'):  # or engine.dialect.pr
                # eexecute_pk_sequence
                # s:
                cursor = [
                    ('CREATE TABLE t1', {}, ()),
                    ('INSERT INTO t1 (c1, c2)', {
                        'c2': 'some data',
                        'c1': 5
                    }, (5, 'some data')),
                    ('SELECT lower', {
                        'lower_2': 'Foo'
                    }, ('Foo', )),
                    ('INSERT INTO t1 (c1, c2)', {
                        'c2': 'foo',
                        'c1': 6
                    }, (6, 'foo')),
                    ('select * from t1', {}, ()),
                    ('DROP TABLE t1', {}, ()),
                ]
            else:
                insert2_params = 6, 'Foo'
                if testing.against('oracle+zxjdbc'):
                    insert2_params += (ReturningParam(12), )
                cursor = [('CREATE TABLE t1', {}, ()),
                          ('INSERT INTO t1 (c1, c2)', {
                              'c2': 'some data',
                              'c1': 5
                          }, (5, 'some data')),
                          ('INSERT INTO t1 (c1, c2)', {
                              'c1': 6,
                              'lower_2': 'Foo'
                          }, insert2_params), ('select * from t1', {}, ()),
                          ('DROP TABLE t1', {}, ())
                          ]  # bind param name 'lower_2' might
                # be incorrect
            assert_stmts(compiled, stmts)
            assert_stmts(cursor, cursor_stmts)
Пример #41
0
class GenerativeQueryTest(_base.MappedTest):
    run_inserts = 'once'
    run_deletes = None

    @classmethod
    def define_tables(cls, metadata):
        Table('foo', metadata,
              Column('id', Integer, sa.Sequence('foo_id_seq'), primary_key=True),
              Column('bar', Integer),
              Column('range', Integer))

    @classmethod
    def fixtures(cls):
        rows = tuple([(i, i % 10) for i in range(100)])
        foo_data = (('bar', 'range'),) + rows
        return dict(foo=foo_data)

    @classmethod
    @testing.resolve_artifact_names
    def setup_mappers(cls):
        class Foo(_base.BasicEntity):
            pass

        mapper(Foo, foo)

    @testing.resolve_artifact_names
    def test_selectby(self):
        res = create_session().query(Foo).filter_by(range=5)
        assert res.order_by(Foo.bar)[0].bar == 5
        assert res.order_by(sa.desc(Foo.bar))[0].bar == 95

    @testing.fails_on('maxdb', 'FIXME: unknown')
    @testing.resolve_artifact_names
    def test_slice(self):
        sess = create_session()
        query = sess.query(Foo).order_by(Foo.id)
        orig = query.all()

        assert query[1] == orig[1]
        assert query[-4] == orig[-4]
        assert query[-1] == orig[-1]

        assert list(query[10:20]) == orig[10:20]
        assert list(query[10:]) == orig[10:]
        assert list(query[:10]) == orig[:10]
        assert list(query[:10]) == orig[:10]
        assert list(query[5:5]) == orig[5:5]
        assert list(query[10:40:3]) == orig[10:40:3]
        assert list(query[-5:]) == orig[-5:]
        assert list(query[-2:-5]) == orig[-2:-5]
        assert list(query[-5:-2]) == orig[-5:-2]
        assert list(query[:-2]) == orig[:-2]

        assert query[10:20][5] == orig[10:20][5]

    @testing.uses_deprecated('Call to deprecated function apply_max')
    @testing.resolve_artifact_names
    def test_aggregate(self):
        sess = create_session()
        query = sess.query(Foo)
        assert query.count() == 100
        assert sess.query(func.min(foo.c.bar)).filter(foo.c.bar<30).one() == (0,)

        assert sess.query(func.max(foo.c.bar)).filter(foo.c.bar<30).one() == (29,)
        # Py3K
        #assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).__next__()[0] == 29
        #assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).__next__()[0] == 29
        # Py2K
        assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
        assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
        # end Py2K

    @testing.fails_if(lambda:testing.against('mysql+mysqldb') and
            testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma'),
            "unknown incompatibility")
    @testing.resolve_artifact_names
    def test_aggregate_1(self):

        query = create_session().query(func.sum(foo.c.bar))
        assert query.filter(foo.c.bar<30).one() == (435,)

    @testing.fails_on('firebird', 'FIXME: unknown')
    @testing.fails_on('mssql', 'AVG produces an average as the original column type on mssql.')
    @testing.resolve_artifact_names
    def test_aggregate_2(self):
        query = create_session().query(func.avg(foo.c.bar))
        avg = query.filter(foo.c.bar < 30).one()[0]
        eq_(float(round(avg, 1)), 14.5)

    @testing.fails_on('mssql', 'AVG produces an average as the original column type on mssql.')
    @testing.resolve_artifact_names
    def test_aggregate_3(self):
        query = create_session().query(Foo)

        # Py3K
        #avg_f = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).__next__()[0]
        # Py2K
        avg_f = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).next()[0]
        # end Py2K
        assert float(round(avg_f, 1)) == 14.5

        # Py3K
        #avg_o = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).__next__()[0]
        # Py2K
        avg_o = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).next()[0]
        # end Py2K
        assert float(round(avg_o, 1)) == 14.5

    @testing.resolve_artifact_names
    def test_filter(self):
        query = create_session().query(Foo)
        assert query.count() == 100
        assert query.filter(Foo.bar < 30).count() == 30
        res2 = query.filter(Foo.bar < 30).filter(Foo.bar > 10)
        assert res2.count() == 19

    @testing.resolve_artifact_names
    def test_options(self):
        query = create_session().query(Foo)
        class ext1(sa.orm.MapperExtension):
            def populate_instance(self, mapper, selectcontext, row, instance, **flags):
                instance.TEST = "hello world"
                return sa.orm.EXT_CONTINUE
        assert query.options(sa.orm.extension(ext1()))[0].TEST == "hello world"

    @testing.resolve_artifact_names
    def test_order_by(self):
        query = create_session().query(Foo)
        assert query.order_by(Foo.bar)[0].bar == 0
        assert query.order_by(sa.desc(Foo.bar))[0].bar == 99

    @testing.resolve_artifact_names
    def test_offset(self):
        query = create_session().query(Foo)
        assert list(query.order_by(Foo.bar).offset(10))[0].bar == 10

    @testing.resolve_artifact_names
    def test_offset(self):
        query = create_session().query(Foo)
        assert len(list(query.limit(10))) == 10