Ejemplo n.º 1
0
def isxrayDB(dbname):
    """
    return whether a file is a valid XrayDB database

    Parameters:
        dbname (string): name of XrayDB file

    Returns:
        bool: is file a valid XrayDB

    Notes:
        must be a sqlite db file, with tables named 'elements',
        'photoabsorption', 'scattering', 'xray_levels', 'Coster_Kronig',
        'Chantler', 'Waasmaier', and 'KeskiRahkonen_Krause'
    """
    _tables = ('Chantler', 'Waasmaier', 'Coster_Kronig',
               'KeskiRahkonen_Krause', 'xray_levels',
               'elements', 'photoabsorption', 'scattering')
    result = False
    try:
        engine = make_engine(dbname)
        meta = MetaData(engine)
        meta.reflect()
        result = all([t in meta.tables for t in _tables])
    except:
        pass
    return result
def upgrade(migrate_engine):
    # Upgrade operations go here. Don't create your own engine;
    # bind migrate_engine to your metadata
    meta = MetaData()
    meta.bind = migrate_engine
    #
    # New Tables
    #
    instance_faults = Table('instance_faults', meta,
            Column('created_at', DateTime(timezone=False)),
            Column('updated_at', DateTime(timezone=False)),
            Column('deleted_at', DateTime(timezone=False)),
            Column('deleted', Boolean(create_constraint=True, name=None),
                    default=False),
            Column('id', Integer(), primary_key=True, nullable=False),
            Column('instance_uuid', String(36, ForeignKey('instances.uuid'))),
            Column('code', Integer(), nullable=False),
            Column('message',
                   String(length=255, convert_unicode=False,
                          assert_unicode=None,
                          unicode_error=None, _warn_on_bytestring=False)),
            Column('details',
                   Text(length=None, convert_unicode=False,
                        assert_unicode=None,
                        unicode_error=None, _warn_on_bytestring=False)),
            )
    try:
        instance_faults.create()
    except Exception:
        LOG.info(repr(instance_faults))
def downgrade(migrate_engine):
    """Convert columns back to the larger String(255)."""
    meta = MetaData()
    meta.bind = migrate_engine
    for table, column in CIDR_TABLE_COLUMNS:
        t = Table(table, meta, autoload=True)
        getattr(t.c, column).alter(type=String(39))
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    snapshots = Table('snapshots', meta, autoload=True)

    # New table
    snapshot_metadata = Table(
        'snapshot_metadata', meta,
        Column('created_at', DateTime),
        Column('updated_at', DateTime),
        Column('deleted_at', DateTime),
        Column('deleted', Boolean),
        Column('id', Integer, primary_key=True, nullable=False),
        Column('snapshot_id', String(length=36), ForeignKey('snapshots.id'),
               nullable=False),
        Column('key', String(length=255)),
        Column('value', String(length=255)),
        mysql_engine='InnoDB'
    )

    try:
        snapshot_metadata.create()
    except Exception:
        LOG.error(_("Table |%s| not created!"), repr(snapshot_metadata))
        raise
Ejemplo n.º 5
0
Archivo: utils.py Proyecto: faye89/nova
def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns,
                           **col_name_col_instance):
    """
    This method drops UC from table and works for mysql, postgresql and sqlite.
    In mysql and postgresql we are able to use "alter table" constuction. In
    sqlite is only one way to drop UC:
        1) Create new table with same columns, indexes and constraints
           (except one that we want to drop).
        2) Copy data from old table to new.
        3) Drop old table.
        4) Rename new table to the name of old table.

    :param migrate_engine: sqlalchemy engine
    :param table_name:     name of table that contains uniq constarint.
    :param uc_name:        name of uniq constraint that will be dropped.
    :param columns:        columns that are in uniq constarint.
    :param col_name_col_instance:   contains pair column_name=column_instance.
                            column_instance is instance of Column. These params
                            are required only for columns that have unsupported
                            types by sqlite. For example BigInteger.
    """
    if migrate_engine.name in ["mysql", "postgresql"]:
        meta = MetaData()
        meta.bind = migrate_engine
        t = Table(table_name, meta, autoload=True)
        uc = UniqueConstraint(*columns, table=t, name=uc_name)
        uc.drop()
    else:
        _drop_unique_constraint_in_sqlite(migrate_engine, table_name, uc_name,
                                          **col_name_col_instance)
Ejemplo n.º 6
0
def _create_shadow_tables(migrate_engine):
    meta = MetaData(migrate_engine)
    meta.reflect(migrate_engine)
    table_names = meta.tables.keys()

    meta.bind = migrate_engine

    for table_name in table_names:
        table = Table(table_name, meta, autoload=True)

        columns = []
        for column in table.columns:
            column_copy = None
            # NOTE(boris-42): BigInteger is not supported by sqlite, so
            #                 after copy it will have NullType, other
            #                 types that are used in Nova are supported by
            #                 sqlite.
            if isinstance(column.type, NullType):
                column_copy = Column(column.name, BigInteger(), default=0)
            column_copy = column.copy()
            columns.append(column_copy)

        shadow_table_name = 'shadow_' + table_name
        shadow_table = Table(shadow_table_name, meta, *columns,
                             mysql_engine='InnoDB')
        try:
            shadow_table.create(checkfirst=True)
        except Exception:
            LOG.info(repr(shadow_table))
            LOG.exception(_('Exception while creating table.'))
            raise
Ejemplo n.º 7
0
def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns,
                           **col_name_col_instance):
    """Drop unique constraint from table.

    This method drops UC from table and works for mysql, postgresql and sqlite.
    In mysql and postgresql we are able to use "alter table" construction.
    Sqlalchemy doesn't support some sqlite column types and replaces their
    type with NullType in metadata. We process these columns and replace
    NullType with the correct column type.

    :param migrate_engine: sqlalchemy engine
    :param table_name:     name of table that contains uniq constraint.
    :param uc_name:        name of uniq constraint that will be dropped.
    :param columns:        columns that are in uniq constraint.
    :param col_name_col_instance:   contains pair column_name=column_instance.
                            column_instance is instance of Column. These params
                            are required only for columns that have unsupported
                            types by sqlite. For example BigInteger.
    """

    meta = MetaData()
    meta.bind = migrate_engine
    t = Table(table_name, meta, autoload=True)

    if migrate_engine.name == "sqlite":
        override_cols = [
            _get_not_supported_column(col_name_col_instance, col.name)
            for col in t.columns
            if isinstance(col.type, NullType)
        ]
        for col in override_cols:
            t.columns.replace(col)

    uc = UniqueConstraint(*columns, table=t, name=uc_name)
    uc.drop()
Ejemplo n.º 8
0
    def test_boolean(self):
        """Test that the boolean only treats 1 as True

        """

        meta = MetaData(testing.db)
        t = Table('bool_table', meta, Column('id', Integer,
                  primary_key=True), Column('boo',
                  Boolean(create_constraint=False)))
        try:
            meta.create_all()
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (1, 'false');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (2, 'true');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (3, '1');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (4, '0');")
            testing.db.execute('INSERT INTO bool_table (id, boo) '
                               'VALUES (5, 1);')
            testing.db.execute('INSERT INTO bool_table (id, boo) '
                               'VALUES (6, 0);')
            eq_(t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
                [(3, True), (5, True)])
        finally:
            meta.drop_all()
Ejemplo n.º 9
0
 def setup_class(cls):
     global metadata, cattable, matchtable
     metadata = MetaData(testing.db)
     testing.db.execute("""
     CREATE VIRTUAL TABLE cattable using FTS3 (
         id INTEGER NOT NULL,
         description VARCHAR(50),
         PRIMARY KEY (id)
     )
     """)
     cattable = Table('cattable', metadata, autoload=True)
     testing.db.execute("""
     CREATE VIRTUAL TABLE matchtable using FTS3 (
         id INTEGER NOT NULL,
         title VARCHAR(200),
         category_id INTEGER NOT NULL,
         PRIMARY KEY (id)
     )
     """)
     matchtable = Table('matchtable', metadata, autoload=True)
     metadata.create_all()
     cattable.insert().execute([{'id': 1, 'description': 'Python'},
                               {'id': 2, 'description': 'Ruby'}])
     matchtable.insert().execute([{'id': 1, 'title'
                                 : 'Agile Web Development with Rails'
                                 , 'category_id': 2}, {'id': 2,
                                 'title': 'Dive Into Python',
                                 'category_id': 1}, {'id': 3, 'title'
                                 : "Programming Matz's Ruby",
                                 'category_id': 2}, {'id': 4, 'title'
                                 : 'The Definitive Guide to Django',
                                 'category_id': 1}, {'id': 5, 'title'
                                 : 'Python in a Nutshell',
                                 'category_id': 1}])
Ejemplo n.º 10
0
class TestDatabaseHelpers(unittest2.TestCase):
    def setUp(self):
        self.uri = 'sqlite:///:memory:'
        self.metadata = MetaData()
        self.table = (
            'table', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('value', String(63)),
        )
        self.engine = create_engine(self.uri)
        self.metadata.create_all(self.engine)

    def test_get_database(self):
        user = mock.Mock(database_uri='sqlite:///:memory:')
        engine = get_database_engine(user)
        self.assertIn(self.uri, _ENGINES)
        self.assertIs(_ENGINES[self.uri], engine)
        engine2 = get_database_engine(user)
        self.assertIs(engine, engine2)

    def test_get_declarative_base(self):
        user = mock.Mock(database_uri='sqlite:///:memory:')
        base = get_declarative_base(user)
        self.assertIn(self.uri, _BASES)
        self.assertIs(_BASES[self.uri], base)
        base2 = get_declarative_base(user)
        self.assertIs(base, base2)
Ejemplo n.º 11
0
class InvalidateDuringResultTest(fixtures.TestBase):
    __backend__ = True

    def setup(self):
        self.engine = engines.reconnecting_engine()
        self.meta = MetaData(self.engine)
        table = Table(
            'sometable', self.meta,
            Column('id', Integer, primary_key=True),
            Column('name', String(50)))
        self.meta.create_all()
        table.insert().execute(
            [{'id': i, 'name': 'row %d' % i} for i in range(1, 100)]
        )

    def teardown(self):
        self.meta.drop_all()
        self.engine.dispose()

    @testing.fails_if([
        '+mysqlconnector', '+mysqldb', '+cymysql', '+pymysql', '+pg8000'],
        "Buffers the result set and doesn't check for connection close")
    def test_invalidate_on_results(self):
        conn = self.engine.connect()
        result = conn.execute('select * from sometable')
        for x in range(20):
            result.fetchone()
        self.engine.test_shutdown()
        _assert_invalidated(result.fetchone)
        assert conn.invalidated
Ejemplo n.º 12
0
def populate_main_sql_testdatabase(engine):
    meta = MetaData()

    table = Table('events', meta,
                  Column('id', Integer, primary_key=True, ),
                  Column('time', String(30)),
                  Column('source_ip', String(30)),
                  Column('source_port', String(30)),
                  Column('request_url', String(500)),
                  Column('request_raw', String(65536)),
                  Column('pattern', String(20)),
                  Column('filename', String(500)),
    )

    meta.create_all(engine)

    insert_dicts = []
    data = open(os.path.join(file_dir, 'data/events_500.bson'), 'r').read()
    for item in bson.decode_all(data):
        new_item = {"source_ip": item["source_ip"],
                    "source_port": item["source_port"],
                    "request_url": item["request"]["url"],
                    "pattern": item["pattern"]}

        insert_dicts.append(new_item)

    conn = engine.connect()
    print "Inserted: {0}".format(len(insert_dicts))
    conn.execute(table.insert(), insert_dicts)
Ejemplo n.º 13
0
def check_shadow_table(migrate_engine, table_name):
    """This method checks that table with ``table_name`` and
    corresponding shadow table have same columns.
    """
    meta = MetaData()
    meta.bind = migrate_engine

    table = Table(table_name, meta, autoload=True)
    shadow_table = Table(db._SHADOW_TABLE_PREFIX + table_name, meta,
                         autoload=True)

    columns = {c.name: c for c in table.columns}
    shadow_columns = {c.name: c for c in shadow_table.columns}

    for name, column in columns.iteritems():
        if name not in shadow_columns:
            raise exception.NovaException(
                _("Missing column %(table)s.%(column)s in shadow table")
                        % {'column': name, 'table': shadow_table.name})
        shadow_column = shadow_columns[name]

        if not isinstance(shadow_column.type, type(column.type)):
            raise exception.NovaException(
                _("Different types in %(table)s.%(column)s and shadow table: "
                  "%(c_type)s %(shadow_c_type)s")
                        % {'column': name, 'table': table.name,
                           'c_type': column.type,
                           'shadow_c_type': shadow_column.type})

    for name, column in shadow_columns.iteritems():
        if name not in columns:
            raise exception.NovaException(
                _("Extra column %(table)s.%(column)s in shadow table")
                        % {'column': name, 'table': shadow_table.name})
    return True
Ejemplo n.º 14
0
    def test_unicode_warnings(self):
        metadata = MetaData(self.engine)
        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", Unicode(30)),
        )
        metadata.create_all()
        i = [1]

        # the times here is cranked way up so that we can see
        # pysqlite clearing out its internal buffer and allow
        # the test to pass
        @testing.emits_warning()
        @profile_memory()
        def go():

            # execute with a non-unicode object. a warning is emitted,
            # this warning shouldn't clog up memory.

            self.engine.execute(
                table1.select().where(table1.c.col2 == "foo%d" % i[0])
            )
            i[0] += 1

        try:
            go()
        finally:
            metadata.drop_all()
def upgrade(migrate_engine):
    # Upgrade operations go here. Don't create your own engine;
    # bind migrate_engine to your metadata
    meta = MetaData()
    meta.bind = migrate_engine

    # load tables for fk
    instances = Table('instances', meta, autoload=True)

    #
    # New Tables
    #
    migrations = Table('migrations', meta,
                Column('created_at', DateTime(timezone=False)),
                Column('updated_at', DateTime(timezone=False)),
                Column('deleted_at', DateTime(timezone=False)),
                Column('deleted', Boolean(create_constraint=True, name=None)),
                Column('id', Integer(), primary_key=True, nullable=False),
                Column('source_compute', String(255)),
                Column('dest_compute', String(255)),
                Column('dest_host', String(255)),
                Column('instance_id', Integer, ForeignKey('instances.id'),
                    nullable=True),
                Column('status', String(255)),
          )

    for table in (migrations, ):
        try:
            table.create()
        except Exception:
            LOG.info(repr(table))
            LOG.exception('Exception while creating table')
            raise
Ejemplo n.º 16
0
    def test_unless_sqla(self):
        from sqlalchemy import (MetaData, Table, Column, Integer, String)
        from sqlalchemy.orm import create_session, mapper

        metadata = MetaData('sqlite:///:memory:')
        testtable = Table('test1', metadata,
            Column('id', Integer, primary_key=True),
            Column('val', String(8)))
        metadata.create_all()

        class Test(object):
            pass
        mapper(Test, testtable)

        testtable.insert().execute({'id': 1, 'val': 'bob'})
        testtable.insert().execute({'id': 2, 'val': 'bobby'})
        testtable.insert().execute({'id': 3, 'val': 'alberto'})

        sess = create_session()
        getunless = unless(sess.query(Test).get)

        x = getunless(1)
        assert x.val == 'bob', x

        x = getunless(2)
        assert x.val == 'bobby', x

        assert_raises(ValueError, getunless, 5)
        assert_raises(TGValidationError, Convert(getunless).to_python, '5')

        x = Convert(getunless).to_python('1')
        assert x.val == 'bob', x
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    shadow_table = Table('shadow_instances', meta, autoload=True)
    locked_by_column = getattr(shadow_table.c, 'locked_by')
    if str(locked_by_column.type).__contains__("SHADOW_INSTANCES0LOCKED_BY"):
        LOG.info("the shadow instance table need to convert.")
        shadow_table.drop()
        table = Table('instances', meta, autoload=True)
        columns = []
        for column in table.columns:
            if column.name == 'locked_by':
                enum = Enum('owner', 'admin',
                            name='instances0locked_by'.upper())
                column_copy = Column(column.name, enum)
            else:
                column_copy = column.copy()
            columns.append(column_copy)
        shadow_table_name = 'shadow_instances'
        shadow_table = Table(shadow_table_name, meta, *columns,
                             mysql_engine='InnoDB', extend_existing=True)
        shadow_table.create(checkfirst=True)
    else:
        LOG.info("the shadow instance table don't need to convert.")
Ejemplo n.º 18
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # create new table
    task_log = Table('task_log', meta,
            Column('created_at', DateTime(timezone=False)),
            Column('updated_at', DateTime(timezone=False)),
            Column('deleted_at', DateTime(timezone=False)),
            Column('deleted',
                    Boolean(create_constraint=True, name=None)),
            Column('id', Integer(),
                    primary_key=True,
                    nullable=False,
                    autoincrement=True),
            Column('task_name', String(255), nullable=False),
            Column('state', String(255), nullable=False),
            Column('host', String(255), index=True, nullable=False),
            Column('period_beginning', String(255),
                                       index=True, nullable=False),
            Column('period_ending', String(255), index=True, nullable=False),
            Column('message', String(255), nullable=False),
            Column('task_items', Integer()),
            Column('errors', Integer()),
            )
    try:
        task_log.create()
    except Exception:
        meta.drop_all(tables=[task_log])
        raise

    if migrate_engine.name == "mysql":
        migrate_engine.execute("ALTER TABLE task_log "
                "Engine=InnoDB")
Ejemplo n.º 19
0
def get_table(name):
    if name not in cached_tables:
        meta = MetaData()
        meta.reflect(bind=model.meta.engine)
        table = meta.tables[name]
        cached_tables[name] = table
    return cached_tables[name]
Ejemplo n.º 20
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    volumes = Table('volumes', meta, autoload=True)

    # New table
    transfers = Table(
        'transfers', meta,
        Column('created_at', DateTime(timezone=False)),
        Column('updated_at', DateTime(timezone=False)),
        Column('deleted_at', DateTime(timezone=False)),
        Column('deleted', Boolean),
        Column('id', String(36), primary_key=True, nullable=False),
        Column('volume_id', String(length=36), ForeignKey('volumes.id'),
               nullable=False),
        Column('display_name', String(length=255)),
        Column('salt', String(length=255)),
        Column('crypt_hash', String(length=255)),
        Column('expires_at', DateTime(timezone=False)),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    try:
        transfers.create()
    except Exception:
        LOG.error(_("Table |%s| not created!"), repr(transfers))
        raise
Ejemplo n.º 21
0
 def __init__(self):
     metadata = MetaData()
     self.engine = create_engine('mysql://*****:*****@localhost:3006/games_online', encoding='utf-8',pool_recycle=7200,pool_size=15,max_overflow=30)
     # self._dbSession = scoped_session(
     #    sessionmaker(
     #        bind=self.engine
     #    )
     # )
     self.games_info = Table('games_info', metadata,
                             Column('id', INTEGER, primary_key=True),
                             Column('gamecode', VARCHAR(20)),
                             Column('language', VARCHAR(20)),
                             Column('sid_api', VARCHAR(255)),
                             Column('online_api', VARCHAR(255)))
     self.games_online = Table('games_online', metadata,
                               Column('id', INTEGER, primary_key=True),
                               Column('gamecode', VARCHAR(20)),
                               Column('language', VARCHAR(20)),
                               Column('region', VARCHAR(20)),
                               Column('serverid', INTEGER),
                               Column('online', INTEGER),
                               Column('time', INTEGER))
     self.games_triggers = Table('games_triggers', metadata,
                                 Column('id', INTEGER, primary_key=True),
                                 Column('gamecode', VARCHAR(20)),
                                 Column('language', VARCHAR(20)),
                                 Column('region', VARCHAR(20)),
                                 Column('serverid', INTEGER),
                                 Column('time', INTEGER))
     metadata.create_all(self.engine)
Ejemplo n.º 22
0
    def test_clauseelement(self):
        metadata = MetaData()
        table = Table('test_table', metadata,
            Column('foo', Integer))
        metadata.create_all(bind=testing.db)
        try:
            for elem in [
                table.select,
                lambda **kwargs: sa.func.current_timestamp(**kwargs).select(),
               # func.current_timestamp().select,
                lambda **kwargs:text("select * from test_table", **kwargs)
            ]:
                for bind in (
                    testing.db,
                    testing.db.connect()
                ):
                    try:
                        e = elem(bind=bind)
                        assert e.bind is bind
                        e.execute().close()
                    finally:
                        if isinstance(bind, engine.Connection):
                            bind.close()

                e = elem()
                assert e.bind is None
                assert_raises(
                    exc.UnboundExecutionError,
                    e.execute
                )
        finally:
            if isinstance(bind, engine.Connection):
                bind.close()
            metadata.drop_all(bind=testing.db)
Ejemplo n.º 23
0
def upgrade(migrate_engine):
    """Add workers table."""
    meta = MetaData()
    meta.bind = migrate_engine

    workers = Table(
        'workers', meta,
        # Inherited fields from CinderBase
        Column('created_at', DateTime(timezone=False)),
        Column('updated_at', DateTime(timezone=False)),
        Column('deleted_at', DateTime(timezone=False)),
        Column('deleted', Boolean(), default=False),

        # Workers table specific fields
        Column('id', Integer, primary_key=True),
        Column('resource_type', String(40), nullable=False),
        Column('resource_id', String(36), nullable=False),
        Column('status', String(255), nullable=False),
        Column('service_id', Integer, nullable=True),
        UniqueConstraint('resource_type', 'resource_id'),

        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    workers.create()

    services = Table('services', meta, autoload=True)

    ForeignKeyConstraint(
        columns=[workers.c.service_id],
        refcolumns=[services.c.id]).create()
Ejemplo n.º 24
0
def init_db():
    # create a new metadata object
    metadata = MetaData()
    # build our tweets table
    tweets = Table('deals', metadata, 
              Column('deal_id', Integer(), primary_key=True),
              Column('price', String(15)),
              Column('url', String(255)),
              Column('description', String(255)),
              Column('tweet_id', BigInteger()),
              Column('analyzed', DateTime(), default=datetime.now),
              Column('updated', DateTime(), default=datetime.now, onupdate=datetime.now)
               )
    # build our matches table
    price_check_history = Table('price_check_history', metadata, 
              Column('match_id', Integer(), primary_key=True),
              Column('merchant', String(75)),
              Column('url', String(255)),
              Column('merchant_description', String(255)),
              Column('tweet_id', BigInteger()),
              Column('merchant_price', BigInteger()),
              Column('analyzed', DateTime(), default=datetime.now),
              Column('updated', DateTime(), default=datetime.now, onupdate=datetime.now)
               )   
    # now make a new file based SQLite3 db
    engine = create_engine('sqlite:///tweet.db',encoding='latin-1')
    # and build it
    metadata.create_all(engine)
    # return the handle so we can talk to it
    return engine, tweets, price_check_history
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    virtual_interfaces = Table('virtual_interfaces', meta, autoload=True)

    virtual_interfaces.drop_column('uuid')
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    columns = [
        (('created_at', DateTime), {}),
        (('updated_at', DateTime), {}),
        (('deleted_at', DateTime), {}),
        (('deleted', Integer), {}),
        (('id', Integer), dict(primary_key=True, nullable=False)),
        (('instance_uuid', String(length=36)), dict(nullable=False)),
        (('source_host', String(length=255)), dict(nullable=True)),
        (('dest_host', String(length=255)), dict(nullable=True)),
        (('dest_addr', String(length=255)), dict(nullable=True)),
        (('block_migration', Boolean), dict(nullable=True, default=False)),
        (('migrate_data', Text), dict(nullable=True)),
        ]

    for prefix in ('', 'shadow_'):
        basename = prefix + 'huawei_live_migrations'
        if migrate_engine.has_table(basename):
            continue
        _columns = tuple([Column(*args, **kwargs)
                          for args, kwargs in columns])
        table = Table(basename, meta, *_columns, mysql_engine='InnoDB',
                      mysql_charset='utf8')
        table.create()
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    storage_pools = Table('storage_pools', meta, autoload=True)

    storage_pools.drop_column('cache_mode')
Ejemplo n.º 28
0
    def test_use_alter(self):
        m = MetaData()
        Table('t', m,
                  Column('a', Integer),
        )

        Table('t2', m,
                Column('a', Integer, ForeignKey('t.a', use_alter=True,
                                                        name='fk_ta')),
                Column('b', Integer, ForeignKey('t.a', name='fk_tb'))
        )

        e = engines.mock_engine(dialect_name='postgresql')
        m.create_all(e)
        m.drop_all(e)

        e.assert_sql([
            'CREATE TABLE t (a INTEGER)',
            'CREATE TABLE t2 (a INTEGER, b INTEGER, CONSTRAINT fk_tb '
                            'FOREIGN KEY(b) REFERENCES t (a))',
            'ALTER TABLE t2 '
                    'ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)',
            'ALTER TABLE t2 DROP CONSTRAINT fk_ta',
            'DROP TABLE t2',
            'DROP TABLE t'
        ])
def upgrade(migrate_engine):
    """Add backup_metadata table."""

    meta = MetaData()
    meta.bind = migrate_engine

    Table('backups', meta, autoload=True)

    backup_metadata = Table(
        'backup_metadata', meta,
        Column('created_at', DateTime(timezone=False)),
        Column('updated_at', DateTime(timezone=False)),
        Column('deleted_at', DateTime(timezone=False)),
        Column('deleted', Boolean(), default=False),
        Column('id', Integer, primary_key=True, nullable=False),
        Column('backup_id', String(36),
               ForeignKey('backups.id'),
               nullable=False),
        Column('key', String(255)),
        Column('value', String(255)),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    backup_metadata.create()

    if not utils.index_exists_on_columns(migrate_engine,
                                         'backup_metadata',
                                         ['backup_id']):
        utils.add_index(migrate_engine,
                        'backup_metadata',
                        'backup_metadata_backup_id_idx',
                        ['backup_id'])
Ejemplo n.º 30
0
    def create_base_tables(self):
        metadata = MetaData(bind=self.create_engine())

        Table('random_quote', metadata,
              Column('id', Integer, primary_key=True, nullable=False, autoincrement=True),
              Column('quote', Text, nullable=False)
              )
        Table('exploit_type', metadata,
              Column('id', Integer, primary_key=True, nullable=False, autoincrement=True),
              Column('name', String(128), nullable=False),
              Column('short_name', String(32), nullable=False)
              )
        Table('exploit', metadata,
              Column('id', Integer, primary_key=True, nullable=False, autoincrement=True),
              Column('type_id', Integer, ForeignKey(ExploitType.id), nullable=False),
              Column('validator_id', Integer, nullable=False),
              Column('name', String(128), nullable=False),
              Column('version', String(64), nullable=False),
              Column('url', String(128), nullable=False),
              Column('request_method', String(12), nullable=False, default='GET'),
              Column('exploit_url', String(128), nullable=False),
              Column('exploit_body', Text, nullable=True),
              Column('exploit_headers', Text, nullable=True),
              Column('is_url_encode', Boolean, nullable=False, default=False),
              Column('is_authenticated', Boolean, nullable=False, default=False)
              )

        metadata.create_all(checkfirst=True)
Ejemplo n.º 31
0
    def _assert_data_autoincrement_returning(self, table):
        engine = engines.testing_engine(options={"implicit_returning": True})

        with self.sql_execution_asserter(engine) as asserter:
            with engine.connect() as conn:

                # execute with explicit id

                r = conn.execute(table.insert(), {"id": 30, "data": "d1"})
                eq_(r.inserted_primary_key, [30])

                # execute with prefetch id

                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, [1])

                # executemany with explicit ids

                conn.execute(
                    table.insert(),
                    {
                        "id": 31,
                        "data": "d3"
                    },
                    {
                        "id": 32,
                        "data": "d4"
                    },
                )

                # executemany, uses SERIAL

                conn.execute(table.insert(), {"data": "d5"}, {"data": "d6"})

                # single execute, explicit id, inline

                conn.execute(table.insert(inline=True), {
                    "id": 33,
                    "data": "d7"
                })

                # single execute, inline, uses SERIAL

                conn.execute(table.insert(inline=True), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data) RETURNING "
                "testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.connect() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (1, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (2, "d5"),
                    (3, "d6"),
                    (33, "d7"),
                    (4, "d8"),
                ],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData(engine)
        table = Table(table.name, m2, autoload=True)

        with self.sql_execution_asserter(engine) as asserter:
            with engine.connect() as conn:
                conn.execute(table.insert(), {"id": 30, "data": "d1"})
                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, [5])
                conn.execute(
                    table.insert(),
                    {
                        "id": 31,
                        "data": "d3"
                    },
                    {
                        "id": 32,
                        "data": "d4"
                    },
                )
                conn.execute(table.insert(), {"data": "d5"}, {"data": "d6"})
                conn.execute(table.insert(inline=True), {
                    "id": 33,
                    "data": "d7"
                })
                conn.execute(table.insert(inline=True), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data) RETURNING "
                "testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.connect() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (5, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (6, "d5"),
                    (7, "d6"),
                    (33, "d7"),
                    (8, "d8"),
                ],
            )
            conn.execute(table.delete())
Ejemplo n.º 32
0
import datetime
import sqlite3

from sqlalchemy import (event, engine, MetaData, Table, Column, Integer,
                        Boolean, String, Float, Date, DateTime, ForeignKey,
                        Index)

metadata = MetaData()


def table(name, *columns):
    return Table(
        name, metadata, Column('id', Integer, primary_key=True),
        Column('created', DateTime, default=datetime.datetime.utcnow),
        Column('updated',
               DateTime,
               default=datetime.datetime.utcnow,
               onupdate=datetime.datetime.utcnow),
        Column('deleted', Boolean, default=False),
        Column('external_id', String), *columns)


patients = table('patients', Column('first_name', String, nullable=False),
                 Column('last_name', String, nullable=False),
                 Column('middle_name', String), Column('date_of_birth', Date),
                 Index('idx_patients_external_id', 'external_id'))

payments = table(
    'payments', Column('amount', Float, nullable=False),
    Column('patient_id', Integer, ForeignKey('patients.id'), nullable=False),
    Index('idx_payments_external_id', 'external_id'))
Ejemplo n.º 33
0
# pylint: disable=W0232,R0903
"""Module for interacting with postgres database."""

from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref, scoped_session, sessionmaker

__engine__ = create_engine(
    'postgresql://*****:*****@localhost/labeler',
    isolation_level='READ UNCOMMITTED',
)
__metadata__ = MetaData(__engine__)
__Base__ = declarative_base(metadata=__metadata__)


class HITType(__Base__):
    """The HIT Type is what appears in the search engine on mturk."""

    __tablename__ = 'hittypes'
    __table_args__ = {'autoload': True}
    hits = relationship('HIT', backref='HITType')


class Assignment(__Base__):
    """A HIT can have multiple assignments to many users."""

    __tablename__ = 'assignments'
    __table_args__ = {'autoload': True}
    click_sessions = relationship('ClickSession', backref='assignment')
    approve_pair_sessions = relationship('ApprovePairSession',
                                         backref='assignment')
Ejemplo n.º 34
0
#from sqlalchemy import create_engine
# Create an engine to the census database
#engine = create_engine('postgresql+psycopg2://'+'student:datacamp'+'@postgresql.csrrinzqubik.us-east-1.rds.amazonaws.com'+':5432/census')
# Use the .table_names() method on the engine to print the table names
#print(engine.table_names())

# Import create_engine function
#from sqlalchemy import create_engine
# Create an engine to the census database
#engine = create_engine('mysql+pymysql://'+'student:datacamp'+'@courses.csrrinzqubik.us-east-1.rds.amazonaws.com:3306/'+'census')
# Print the table names
#print(engine.table_names())

from sqlalchemy import MetaData, Table
#to store the metadata of the DB such as table names, we initiate a metadata object
metadata = MetaData()
#then we extract tables information
album = Table('Album', metadata, autoload=True, autoload_with=engine)
#print(repr(album))

# Import create_engine, MetaData
#from sqlalchemy import create_engine, MetaData
# Define an engine to connect to chapter5.sqlite: engine
#engine = create_engine('sqlite:///chapter5.sqlite')
# Initialize MetaData: metadata
#metadata=MetaData()

# Import Table
#from sqlalchemy import Table
# Reflect census table from the engine: census
#census = Table('census',metadata,autoload=True,autoload_with=engine)
Ejemplo n.º 35
0
class BaseTestCase(unittest.TestCase):
    """
    Initialise up all table/mappers.
    """
    def setUp(self):
        if not sqlalchemy:
            self.skipTest("'sqlalchemy' is not available")

        # Create DB and map objects
        self.metadata = MetaData()
        self.engine = create_engine('sqlite:///:memory:', echo=False)

        Session = sessionmaker(bind=self.engine)

        self.session = Session()
        self.tables = {}

        self.tables['users'] = Table('users', self.metadata,
                                     Column('id', Integer, primary_key=True),
                                     Column('name', String(64)))

        self.tables['addresses'] = Table(
            'addresses', self.metadata, Column('id', Integer,
                                               primary_key=True),
            Column('user_id', Integer, ForeignKey('users.id')),
            Column('email_address', String(128)))

        self.tables['lazy_loaded'] = Table(
            'lazy_loaded', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('user_id', Integer, ForeignKey('users.id')))

        self.tables['another_lazy_loaded'] = Table(
            'another_lazy_loaded', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('user_id', Integer, ForeignKey('users.id')))

        self.mappers = {}

        self.mappers['user'] = mapper(User,
                                      self.tables['users'],
                                      properties={
                                          'addresses':
                                          relation(Address,
                                                   backref='user',
                                                   lazy=False),
                                          'lazy_loaded':
                                          relation(LazyLoaded, lazy=True),
                                          'another_lazy_loaded':
                                          relation(AnotherLazyLoaded,
                                                   lazy=True)
                                      })

        self.mappers['addresses'] = mapper(Address, self.tables['addresses'])
        self.mappers['lazy_loaded'] = mapper(LazyLoaded,
                                             self.tables['lazy_loaded'])
        self.mappers['another_lazy_loaded'] = mapper(
            AnotherLazyLoaded, self.tables['another_lazy_loaded'])

        self.metadata.create_all(self.engine)

        pyamf.register_class(User, 'server.User')
        pyamf.register_class(Address, 'server.Address')
        pyamf.register_class(LazyLoaded, 'server.LazyLoaded')

    def tearDown(self):
        clear_mappers()

        pyamf.unregister_class(User)
        pyamf.unregister_class(Address)
        pyamf.unregister_class(LazyLoaded)

    def _build_obj(self):
        user = User()
        user.name = "test_user"
        user.addresses.append(Address(email_address="*****@*****.**"))

        return user

    def _save(self, obj):
        # this covers deprecation warnings etc.
        if hasattr(self.session, 'add'):
            self.session.add(obj)
        elif hasattr(self.session, 'save'):
            self.session.save(obj)
        else:
            raise AttributeError('Don\'t know how to save an object')

    def _clear(self):
        # this covers deprecation warnings etc.
        if hasattr(self.session, 'expunge_all'):
            self.session.expunge_all()
        elif hasattr(self.session, 'clear'):
            self.session.clear()
        else:
            raise AttributeError('Don\'t know how to clear session')
Ejemplo n.º 36
0
    def _assert_data_noautoincrement(self, table):
        engine = engines.testing_engine(options={"implicit_returning": False})

        with engine.connect() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                    {"data": "d3"},
                )
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                    {"data": "d3"},
                )

            conn.execute(
                table.insert(),
                {
                    "id": 31,
                    "data": "d2"
                },
                {
                    "id": 32,
                    "data": "d3"
                },
            )
            conn.execute(table.insert(inline=True), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData(engine)
        table = Table(table.name, m2, autoload=True)
        with engine.connect() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                    {"data": "d3"},
                )
            conn.execute(
                table.insert(),
                {
                    "id": 31,
                    "data": "d2"
                },
                {
                    "id": 32,
                    "data": "d3"
                },
            )
            conn.execute(table.insert(inline=True), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
Ejemplo n.º 37
0
 def setup_class(cls):
     cls.metadata = MetaData(testing.db)
Ejemplo n.º 38
0
#  * without the express permission of CloudRunner.io
#  *******************************************************/

from sqlalchemy import MetaData
from sqlalchemy.orm import class_mapper
from sqlalchemy.ext.declarative import declarative_base

convention = {
    "ix": 'ix_%(column_0_label)s',
    "uq": "uq_%(table_name)s_%(column_0_name)s",
    "ck": "ck_%(table_name)s_%(column_0_name)s",
    "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
    "pk": "pk_%(table_name)s"
}

metadata = MetaData(naming_convention=convention)

Base = declarative_base(metadata=metadata)


def getattr_func(x, y):
    if not x:
        return None
    if isinstance(x, list):
        return [getattr(a, y) for a in x]
    else:
        ret = getattr(x, y)
        if callable(ret):
            ret = ret()

        return ret
Ejemplo n.º 39
0
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

from typing import Any

from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base

from airflow.configuration import conf

SQL_ALCHEMY_SCHEMA = conf.get("core", "SQL_ALCHEMY_SCHEMA")

metadata = (None if not SQL_ALCHEMY_SCHEMA or SQL_ALCHEMY_SCHEMA.isspace() else
            MetaData(schema=SQL_ALCHEMY_SCHEMA))
Base = declarative_base(metadata=metadata)  # type: Any

ID_LEN = 250


# used for typing
class Operator:
    pass


def get_id_collation_args():
    collation = conf.get('core', 'sql_engine_collation_for_ids', fallback=None)
    if collation:
        return {'collation': collation}
    else:
Ejemplo n.º 40
0
def downgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    for table_name in pf9_compute_node_tables:
        table = Table(table_name, meta, autoload=True)
        table.c.deleted.alter(type=Boolean, default=False)
Ejemplo n.º 41
0
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.session import Session

from crate_anon.anonymise.config_singleton import config
from crate_anon.anonymise.constants import (
    MAX_TRID,
    TABLE_KWARGS,
    TridType,
)

if TYPE_CHECKING:
    from crate_anon.anonymise.scrub import PersonalizedScrubber

log = logging.getLogger(__name__)
admin_meta = MetaData()
AdminBase = declarative_base(metadata=admin_meta)


class PatientInfoConstants(object):
    SECRET_MAP_TABLENAME = 'secret_map'
    PID_FIELDNAME = "pid"
    MPID_FIELDNAME = "mpid"
    RID_FIELDNAME = "rid"
    MRID_FIELDNAME = "mrid"
    TRID_FIELDNAME = "trid"


class PatientInfo(AdminBase):
    """
    Represent patient information in the secret admin database.
Ejemplo n.º 42
0
 def _get_meta_data(engine):
     """search db objects like views or tables"""
     meta_data = MetaData(engine)
     meta_data.reflect(views=True)
     return meta_data
Ejemplo n.º 43
0

app = Flask(__name__)


#################################################
# Database Setup
#################################################
DATABASE_URL = os.environ['DATABASE_URL']
app.config["SQLALCHEMY_DATABASE_URI"] = DATABASE_URL
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
# for local "postgres://*****:*****@@127.0.0.1/inc5000"
db = SQLAlchemy(app)


metadata = MetaData(bind=db.engine) 
# inc2018_data = Table('inc2018_data', metadata, autoload_with=db.engine) 

inc2018_data = Table('inc2018_data', metadata, autoload_with=db.engine) 


#session = Session(db.engine)
# reflect an existing database into a new model
Base = automap_base()
# reflect the tables
Base.prepare(db.engine, reflect=True)

session = Session(db.engine)

Base.classes.keys()
# Save references to each table
Ejemplo n.º 44
0
 def sqla_metadata(self):
     # pylint: disable=no-member
     metadata = MetaData(bind=self.get_sqla_engine())
     return metadata.reflect()
Ejemplo n.º 45
0
    "drop_db",
    # util
    "test_db_connection", "get_contest_list", "is_contest_id",
    "ask_for_contest", "get_submissions", "get_submission_results",
    "get_datasets_to_judge", "enumerate_files"
]


# Instantiate or import these objects.

version = 39

engine = create_engine(config.database, echo=config.database_debug,
                       pool_timeout=60, pool_recycle=120)

metadata = MetaData(engine)

from .session import Session, ScopedSession, SessionGen, \
    custom_psycopg2_connection

from .types import CastingArray, Codename, Filename, FilenameSchema, \
    FilenameSchemaArray, Digest
from .base import Base
from .fsobject import FSObject, LargeObject
from .contest import Contest, Announcement, Presentation
from .user import User, Team, Participation, Message, Question
from .admin import Admin
from .task import Task, Statement, Attachment, Dataset, Manager, Testcase, \
    TaskName, TaskTitle
from .submission import Submission, File, Token, SubmissionResult, \
    Executable, Evaluation
Ejemplo n.º 46
0
                        Table, Numeric)
from sqlalchemy.dialects.postgresql import ARRAY
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
import sqlahelper as sah
from geoalchemy2 import types as geotypes

# import WAM_APP_FRED.app_settings
# from geoalchemy2.types import Geometry
# from WAM_APP_FRED.cli.openFRED import mapped_classes, db_session

# ##########################################SQLAlchemy setup########################################
SCHEMA_1 = 'climate'
SCHEMA_2 = 'supply'
engine = sah.get_engine('oep_engine')
metadata_1 = MetaData(schema=SCHEMA_1, bind=engine)
metadata_2 = MetaData(schema=SCHEMA_2, bind=engine)
# ##########################################TABLE DEFINITION########################################

# included function from github: https://github.com/open-fred/cli/blob/master/openFRED.py
# copied function to avoid dependencies


def mapped_classes(metadata):
    """ Returns classes mapped to the openFRED database via SQLAlchemy.
    The classes are dynamically created and stored in a dictionary keyed by
    class names. The dictionary also contains the special entry `__Base__`,
    which an SQLAlchemy `declarative_base` instance used as the base class from
    which all mapped classes inherit.
    """
Ejemplo n.º 47
0
# define metadata for table

from sqlalchemy import MetaData
from sqlalchemy import Table, Column
from sqlalchemy import Integer, String

metadata = MetaData(
)  # metadata is collection of tables and can be traversed like XML DOM

user_table = Table("user", metadata, Column("id", Integer, primary_key=True),
                   Column("name", String))

# init engine over database
from sqlalchemy import create_engine

engine = create_engine("sqlite://", echo=True)

# create table from metadata
metadata.create_all(engine)

# now load metadata for the table from database
metadata2 = MetaData()
user_table_reflected = Table("user",
                             metadata2,
                             autoload=True,
                             autoload_with=engine)

print("-- TABLE SCHEMA LOADED FROM DB ---------------------")
print(repr(user_table_reflected))
Ejemplo n.º 48
0
class ClientDatabase:
    # Класс - отображение таблицы известных пользователей.
    class KnownUsers:
        def __init__(self, user):
            self.id = None
            self.username = user

    # Класс - отображение таблицы истории сообщений
    class MessageHistory:
        def __init__(self, from_user, to_user, message):
            self.id = None
            self.from_user = from_user
            self.to_user = to_user
            self.message = message
            self.date = datetime.datetime.now()

    # Класс - отображение списка контактов
    class Contacts:
        def __init__(self, contact):
            self.id = None
            self.name = contact

    # Конструктор класса:
    def __init__(self, name):
        # Создаём движок базы данных, поскольку разрешено несколько клиентов одновременно, каждый должен иметь свою БД
        # Поскольку клиент мультипоточный необходимо отключить проверки на подключения с разных потоков,
        # иначе sqlite3.ProgrammingError
        self.database_engine = create_engine(f'sqlite:///client_{name}.db3', echo=False, pool_recycle=7200,
                                             connect_args={'check_same_thread': False})

        # Создаём объект MetaData
        self.metadata = MetaData()

        # Создаём таблицу известных пользователей
        users = Table('known_users', self.metadata,
                      Column('id', Integer, primary_key=True),
                      Column('username', String)
                      )

        # Создаём таблицу истории сообщений
        history = Table('message_history', self.metadata,
                        Column('id', Integer, primary_key=True),
                        Column('from_user', String),
                        Column('to_user', String),
                        Column('message', Text),
                        Column('date', DateTime)
                        )

        # Создаём таблицу контактов
        contacts = Table('contacts', self.metadata,
                         Column('id', Integer, primary_key=True),
                         Column('name', String, unique=True)
                         )

        # Создаём таблицы
        self.metadata.create_all(self.database_engine)

        # Создаём отображения
        mapper(self.KnownUsers, users)
        mapper(self.MessageHistory, history)
        mapper(self.Contacts, contacts)

        # Создаём сессию
        Session = sessionmaker(bind=self.database_engine)
        self.session = Session()

        # Необходимо очистить таблицу контактов, т.к. при запуске они подгружаются с сервера.
        self.session.query(self.Contacts).delete()
        self.session.commit()

    # Функция добавления контактов
    def add_contact(self, contact):
        if not self.session.query(self.Contacts).filter_by(name=contact).count():
            contact_row = self.Contacts(contact)
            self.session.add(contact_row)
            self.session.commit()

    # Функция удаления контакта
    def del_contact(self, contact):
        self.session.query(self.Contacts).filter_by(name=contact).delete()

    # Функция добавления известных пользователей.
    # Пользователи получаются только с сервера, поэтому таблица очищается.
    def add_users(self, users_list):
        self.session.query(self.KnownUsers).delete()
        for user in users_list:
            user_row = self.KnownUsers(user)
            self.session.add(user_row)
        self.session.commit()

    # Функция сохраняющяя сообщения
    def save_message(self, from_user, to_user, message):
        message_row = self.MessageHistory(from_user, to_user, message)
        self.session.add(message_row)
        self.session.commit()

    # Функция возвращающяя контакты
    def get_contacts(self):
        return [contact[0] for contact in self.session.query(self.Contacts.name).all()]

    # Функция возвращающяя список известных пользователей
    def get_users(self):
        return [user[0] for user in self.session.query(self.KnownUsers.username).all()]

    # Функция проверяющяя наличие пользователя в известных
    def check_user(self, user):
        if self.session.query(self.KnownUsers).filter_by(username=user).count():
            return True
        else:
            return False

    # Функция проверяющяя наличие пользователя контактах
    def check_contact(self, contact):
        if self.session.query(self.Contacts).filter_by(name=contact).count():
            return True
        else:
            return False

    # Функция возвращающая историю переписки
    def get_history(self, from_who=None, to_who=None):
        query = self.session.query(self.MessageHistory)
        if from_who:
            query = query.filter_by(from_user=from_who)
        if to_who:
            query = query.filter_by(to_user=to_who)
        return [(history_row.from_user, history_row.to_user, history_row.message, history_row.date)
                for history_row in query.all()]
Ejemplo n.º 49
0
    def test_nolength_string(self):
        metadata = MetaData()
        foo = Table('foo', metadata, Column('one', String))

        foo.create(config.db)
        foo.drop(config.db)
Ejemplo n.º 50
0
    CheckConstraint,
    Integer,
    String,
    DateTime,
    Boolean,
    Date,
    Text,
    SmallInteger,
    DDL,
    sql,
)
from sqlalchemy.dialects.postgresql import ARRAY, UUID, INET, JSONB

metadata = MetaData(
    naming_convention={
        'fk': '%(table_name)s_fk_%(column_0_name)s',
        'ix': '%(table_name)s_idx_%(column_0_name)s',
        'pk': '%(table_name)s_pkey',
    })

import mbdata.config  # noqa: E402
mbdata.config.configure(metadata=metadata, schema='musicbrainz')

sqlalchemy.event.listen(
    metadata,
    'before_create',
    DDL('CREATE SCHEMA IF NOT EXISTS musicbrainz'),
)

account = Table(
    'account',
    metadata,
Ejemplo n.º 51
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    table = define_hosts_table(meta)
    table.create()
Ejemplo n.º 52
0
from sqlalchemy import (create_engine, MetaData, Column, Table, Integer,
                        String, DateTime)
from datetime import datetime

engine = create_engine('sqlite:///teste.db', echo=True)
metadata = MetaData(bind=engine)

user_table = Table(
    'usuarios', metadata, Column('id', Integer, primary_key=True),
    Column('nome', String(40), index=True),
    Column('idade', Integer, nullable=False), Column('senha', String),
    Column('Criado_em', DateTime, default=datetime.now),
    Column('atualizado_em',
           DateTime,
           default=datetime.now,
           onupdate=datetime.now))

metadata.create_all(engine)
Ejemplo n.º 53
0
 def test_reflect_all(self, connection):
     m = MetaData()
     m.reflect(connection)
     eq_(set(t.name for t in m.tables.values()), set(["admin_docindex"]))
Ejemplo n.º 54
0
from sqlalchemy import create_engine, MetaData

from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker

import os

DATABASE_USER = os.getenv('DATABASE_USER', 'app')
DATABASE_PASSWORD = os.getenv('DATABASE_PASSWORD', 'app')
DATABASE_HOST = os.getenv('DATABASE_HOST', '172.16.0.2')
DATABASE_PORT = os.getenv('DATABASE_PORT', '5432')
DATABASE_NAME = os.getenv('DATABASE_NAME', 'whowantsabeer')

url = f'postgresql://{DATABASE_USER}:{DATABASE_PASSWORD}@{DATABASE_HOST}:{DATABASE_PORT}/{DATABASE_NAME}'
engine = create_engine(url)
meta_data = MetaData(bind=engine)

session_factory = sessionmaker(autocommit=False, autoflush=False, bind=engine)
db_session = scoped_session(session_factory)

Base = declarative_base(bind=engine, metadata=meta_data)
Base.query = db_session.query_property()
Ejemplo n.º 55
0
Archivo: api.py Proyecto: kvh/dcp
 def get_sqlalchemy_metadata(self):
     sa_engine = self.get_engine()
     meta = MetaData()
     meta.reflect(bind=sa_engine)
     return meta
Ejemplo n.º 56
0
    def test_basic(self, metadata, connection):

        s_table = Table(
            "sometable",
            metadata,
            Column("id_a", Unicode(255), primary_key=True),
            Column("id_b", Unicode(255), primary_key=True, unique=True),
            Column("group", Unicode(255), primary_key=True),
            Column("col", Unicode(255)),
            UniqueConstraint("col", "group"),
        )

        # "group" is a keyword, so lower case
        normalind = Index("tableind", s_table.c.id_b, s_table.c.group)
        Index(
            "compress1", s_table.c.id_a, s_table.c.id_b, oracle_compress=True
        )
        Index(
            "compress2",
            s_table.c.id_a,
            s_table.c.id_b,
            s_table.c.col,
            oracle_compress=1,
        )

        metadata.create_all(connection)

        mirror = MetaData()
        mirror.reflect(connection)

        metadata.drop_all(connection)
        mirror.create_all(connection)

        inspect = MetaData()
        inspect.reflect(connection)

        def obj_definition(obj):
            return (
                obj.__class__,
                tuple([c.name for c in obj.columns]),
                getattr(obj, "unique", None),
            )

        # find what the primary k constraint name should be
        primaryconsname = connection.scalar(
            text(
                """SELECT constraint_name
               FROM all_constraints
               WHERE table_name = :table_name
               AND owner = :owner
               AND constraint_type = 'P' """
            ),
            dict(
                table_name=s_table.name.upper(),
                owner=testing.db.dialect.default_schema_name.upper(),
            ),
        )

        reflectedtable = inspect.tables[s_table.name]

        # make a dictionary of the reflected objects:

        reflected = dict(
            [
                (obj_definition(i), i)
                for i in reflectedtable.indexes | reflectedtable.constraints
            ]
        )

        # assert we got primary key constraint and its name, Error
        # if not in dict

        assert (
            reflected[
                (PrimaryKeyConstraint, ("id_a", "id_b", "group"), None)
            ].name.upper()
            == primaryconsname.upper()
        )

        # Error if not in dict

        eq_(reflected[(Index, ("id_b", "group"), False)].name, normalind.name)
        assert (Index, ("id_b",), True) in reflected
        assert (Index, ("col", "group"), True) in reflected

        idx = reflected[(Index, ("id_a", "id_b"), False)]
        assert idx.dialect_options["oracle"]["compress"] == 2

        idx = reflected[(Index, ("id_a", "id_b", "col"), False)]
        assert idx.dialect_options["oracle"]["compress"] == 1

        eq_(len(reflectedtable.constraints), 1)
        eq_(len(reflectedtable.indexes), 5)
Ejemplo n.º 57
0
def drop_tables(engine=user_engine):
    meta = MetaData()
    meta.drop_all(bind=engine, tables=tables)
    print('-   删除表成功:')
    print(tables)
Ejemplo n.º 58
0
def create_new_spine_database(db_url, upgrade=True, for_spine_model=False):
    """Create a new Spine database at the given url."""
    try:
        engine = create_engine(db_url)
    except DatabaseError as e:
        raise SpineDBAPIError("Could not connect to '{}': {}".format(
            db_url, e.orig.args))
    # Drop existing tables. This is a Spine db now...
    meta = MetaData(engine)
    meta.reflect()
    meta.drop_all(engine)
    # Create new tables
    meta = MetaData(naming_convention=naming_convention)
    Table(
        "commit",
        meta,
        Column("id", Integer, primary_key=True),
        Column("comment", String(255), nullable=False),
        Column("date", DateTime, nullable=False),
        Column("user", String(45)),
    )
    object_class_category = Table(
        "object_class_category",
        meta,
        Column("id", Integer, primary_key=True),
        Column("name", String(255), nullable=False, unique=True),
        Column("description", String(255), server_default=null()),
        Column("commit_id", Integer, ForeignKey("commit.id")),
    )
    object_class = Table(
        "object_class",
        meta,
        Column("id", Integer, primary_key=True),
        Column("name", String(255), nullable=False, unique=True),
        Column("description", String(255), server_default=null()),
        Column("category_id",
               Integer,
               ForeignKey("object_class_category.id"),
               server_default=null()),
        Column("display_order", Integer, server_default="99"),
        Column("display_icon", BigInteger, server_default=null()),
        Column("hidden", Integer, server_default="0"),
        Column("commit_id", Integer, ForeignKey("commit.id")),
    )
    Table(
        "object_category",
        meta,
        Column("id", Integer, primary_key=True),
        Column("object_class_id", Integer, ForeignKey("object_class.id")),
        Column("name", String(255), nullable=False, unique=True),
        Column("description", String(255), server_default=null()),
        Column("commit_id", Integer, ForeignKey("commit.id")),
    )
    Table(
        "object",
        meta,
        Column("id", Integer, primary_key=True),
        Column(
            "class_id", Integer,
            ForeignKey("object_class.id",
                       onupdate="CASCADE",
                       ondelete="CASCADE")),
        Column("name", String(255), nullable=False, unique=True),
        Column("description", String(255), server_default=null()),
        Column("category_id", Integer, ForeignKey("object_category.id")),
        Column("commit_id", Integer, ForeignKey("commit.id")),
    )
    Table(
        "relationship_class",
        meta,
        Column("id", Integer, primary_key=True),
        Column("dimension", Integer, primary_key=True),
        Column("object_class_id", Integer, ForeignKey("object_class.id")),
        Column("name", String(255), nullable=False),
        Column("hidden", Integer, server_default="0"),
        Column("commit_id", Integer, ForeignKey("commit.id")),
        UniqueConstraint("dimension", "name"),
    )
    Table(
        "relationship",
        meta,
        Column("id", Integer, primary_key=True),
        Column("dimension", Integer, primary_key=True),
        Column("object_id", Integer, ForeignKey("object.id")),
        Column("class_id", Integer, nullable=False),
        Column("name", String(255), nullable=False),
        Column("commit_id", Integer, ForeignKey("commit.id")),
        UniqueConstraint("dimension", "name"),
        ForeignKeyConstraint(
            ("class_id", "dimension"),
            ("relationship_class.id", "relationship_class.dimension"),
            onupdate="CASCADE",
            ondelete="CASCADE",
        ),
    )
    Table(
        "parameter",
        meta,
        Column("id", Integer, primary_key=True),
        Column("name", String(155), nullable=False, unique=True),
        Column("description", String(155), server_default=null()),
        Column("data_type", String(155), server_default="NUMERIC"),
        Column("relationship_class_id", Integer, default=null()),
        Column(
            "object_class_id",
            Integer,
            ForeignKey("object_class.id",
                       onupdate="CASCADE",
                       ondelete="CASCADE"),
            server_default=null(),
        ),
        Column("can_have_time_series", Integer, server_default="0"),
        Column("can_have_time_pattern", Integer, server_default="1"),
        Column("can_be_stochastic", Integer, server_default="0"),
        Column("default_value", String(155), server_default="0"),
        Column("is_mandatory", Integer, server_default="0"),
        Column("precision", Integer, server_default="2"),
        Column("unit", String(155), server_default=null()),
        Column("minimum_value", Float, server_default=null()),
        Column("maximum_value", Float, server_default=null()),
        Column("commit_id", Integer, ForeignKey("commit.id")),
        CheckConstraint(
            "`relationship_class_id` IS NOT NULL OR `object_class_id` IS NOT NULL"
        ),
    )
    Table(
        "parameter_value",
        meta,
        Column("id", Integer, primary_key=True),
        Column(
            "parameter_id", Integer,
            ForeignKey("parameter.id", onupdate="CASCADE",
                       ondelete="CASCADE")),
        Column("relationship_id", Integer, server_default=null()),
        Column("dummy_relationship_dimension", Integer, server_default="0"),
        Column("object_id",
               Integer,
               ForeignKey("object.id", onupdate="CASCADE", ondelete="CASCADE"),
               server_default=null()),
        Column("index", Integer, server_default="1"),
        Column("value", String(155), server_default=null()),
        Column("json", String(255), server_default=null()),
        Column("expression", String(155), server_default=null()),
        Column("time_pattern", String(155), server_default=null()),
        Column("time_series_id", String(155), server_default=null()),
        Column("stochastic_model_id", String(155), server_default=null()),
        Column("commit_id", Integer, ForeignKey("commit.id")),
        CheckConstraint(
            "`relationship_id` IS NOT NULL OR `object_id` IS NOT NULL"),
        UniqueConstraint("parameter_id", "object_id"),
        UniqueConstraint("parameter_id", "relationship_id"),
        ForeignKeyConstraint(
            ("relationship_id", "dummy_relationship_dimension"),
            ("relationship.id", "relationship.dimension"),
            onupdate="CASCADE",
            ondelete="CASCADE",
        ),
    )
    try:
        meta.create_all(engine)
    except DatabaseError as e:
        raise SpineDBAPIError("Unable to create Spine database: {}".format(
            e.orig.args))
    if not upgrade:
        return engine
    is_head(db_url, upgrade=True)
    if not for_spine_model:
        return engine
    # Add specific data structure for Spine Model
    meta = MetaData(engine, reflect=True)
    object_class = meta.tables["object_class"]
    object_ = meta.tables["object"]
    relationship_class = meta.tables["relationship_class"]
    parameter_definition = meta.tables["parameter_definition"]
    parameter_tag = meta.tables["parameter_tag"]
    parameter_definition_tag = meta.tables["parameter_definition_tag"]
    obj_cls = lambda *x: dict(
        zip(("id", "name", "description", "display_order", "display_icon"), x))
    obj = lambda *x: dict(zip(("class_id", "name", "description"), x))
    rel_cls = lambda *x: dict(
        zip(("id", "dimension", "object_class_id", "name"), x))
    obj_par_def = lambda *x: dict(
        zip(("id", "name", "object_class_id", "default_value"), x))
    rel_par_def = lambda *x: dict(
        zip(("id", "name", "relationship_class_id", "default_value"), x))
    par_tag = lambda *x: dict(zip(("id", "tag", "description"), x))
    par_def_tag = lambda *x: dict(
        zip(("parameter_definition_id", "parameter_tag_id"), x))
    try:
        engine.execute(
            object_class.insert(),
            [
                obj_cls(1, "direction", "A flow direction", 1, 281105626296654,
                        0),
                obj_cls(
                    2, "unit",
                    "An entity where an energy conversion process takes place",
                    2, 281470681805429, 0),
                obj_cls(3, "connection",
                        "An entity where an energy transfer takes place", 3,
                        280378317271233, 0),
                obj_cls(4, "storage", "A storage", 4, 280376899531934, 0),
                obj_cls(5, "commodity", "A commodity", 5, 281473533932880, 0),
                obj_cls(6, "node",
                        "An entity where an energy balance takes place", 6,
                        280740554077951, 0),
                obj_cls(7, "temporal_block", "A temporal block", 7,
                        280376891207703, 0),
            ],
        )
        engine.execute(
            object_.insert(),
            [
                obj(1, "from_node", "From a node, into something else"),
                obj(1, "to_node", "Into a node, from something else"),
            ],
        )
        engine.execute(
            relationship_class.insert(),
            [
                rel_cls(1, 0, 2, "unit__node__direction__temporal_block"),
                rel_cls(1, 1, 6, "unit__node__direction__temporal_block"),
                rel_cls(1, 2, 1, "unit__node__direction__temporal_block"),
                rel_cls(1, 3, 7, "unit__node__direction__temporal_block"),
                rel_cls(2, 0, 3,
                        "connection__node__direction__temporal_block"),
                rel_cls(2, 1, 6,
                        "connection__node__direction__temporal_block"),
                rel_cls(2, 2, 1,
                        "connection__node__direction__temporal_block"),
                rel_cls(2, 3, 7,
                        "connection__node__direction__temporal_block"),
                rel_cls(3, 0, 6, "node__commodity"),
                rel_cls(3, 1, 5, "node__commodity"),
                rel_cls(4, 0, 2, "unit_group__unit"),
                rel_cls(4, 1, 2, "unit_group__unit"),
                rel_cls(5, 0, 5, "commodity_group__commodity"),
                rel_cls(5, 1, 5, "commodity_group__commodity"),
                rel_cls(6, 0, 6, "node_group__node"),
                rel_cls(6, 1, 6, "node_group__node"),
                rel_cls(7, 0, 2, "unit_group__commodity_group"),
                rel_cls(7, 1, 5, "unit_group__commodity_group"),
                rel_cls(8, 0, 5, "commodity_group__node_group"),
                rel_cls(8, 1, 6, "commodity_group__node_group"),
                rel_cls(9, 0, 2, "unit__commodity"),
                rel_cls(9, 1, 5, "unit__commodity"),
                rel_cls(10, 0, 2, "unit__commodity__direction"),
                rel_cls(10, 1, 5, "unit__commodity__direction"),
                rel_cls(10, 2, 1, "unit__commodity__direction"),
                rel_cls(11, 0, 2, "unit__commodity__commodity"),
                rel_cls(11, 1, 5, "unit__commodity__commodity"),
                rel_cls(11, 2, 5, "unit__commodity__commodity"),
                rel_cls(12, 0, 3, "connection__node__direction"),
                rel_cls(12, 1, 6, "connection__node__direction"),
                rel_cls(12, 2, 1, "connection__node__direction"),
                rel_cls(13, 0, 3, "connection__node__node"),
                rel_cls(13, 1, 6, "connection__node__node"),
                rel_cls(13, 2, 6, "connection__node__node"),
                rel_cls(14, 0, 6, "node__temporal_block"),
                rel_cls(14, 1, 7, "node__temporal_block"),
                rel_cls(15, 0, 4, "storage__unit"),
                rel_cls(15, 1, 2, "storage__unit"),
                rel_cls(16, 0, 4, "storage__connection"),
                rel_cls(16, 1, 3, "storage__connection"),
                rel_cls(17, 0, 4, "storage__commodity"),
                rel_cls(17, 1, 5, "storage__commodity"),
            ],
        )
        engine.execute(
            parameter_definition.insert(),
            [
                obj_par_def(1, "fom_cost", 2, "null"),
                obj_par_def(2, "start_up_cost", 2, "null"),
                obj_par_def(3, "shut_down_cost", 2, "null"),
                obj_par_def(4, "number_of_units", 2, 1),
                obj_par_def(5, "avail_factor", 2, 1),
                obj_par_def(6, "min_down_time", 2, 0),
                obj_par_def(7, "min_up_time", 2, 0),
                obj_par_def(8, "start_datetime", 7, "null"),
                obj_par_def(9, "end_datetime", 7, "null"),
                obj_par_def(10, "time_slice_duration", 7, "null"),
                obj_par_def(11, "demand", 6, 0),
                obj_par_def(12, "online_variable_type", 2,
                            '"integer_online_variable"'),
                obj_par_def(13, "fix_units_on", 2, "null"),
                obj_par_def(14, "stor_state_cap", 4, 0),
                obj_par_def(15, "frac_state_loss", 4, 0),
            ],
        )
        engine.execute(
            parameter_definition.insert(),
            [
                rel_par_def(1001, "unit_conv_cap_to_flow", 9, 1),
                rel_par_def(1002, "unit_capacity", 10, "null"),
                rel_par_def(1003, "operating_cost", 10, "null"),
                rel_par_def(1004, "vom_cost", 10, "null"),
                rel_par_def(1005, "tax_net_flow", 8, "null"),
                rel_par_def(1006, "tax_out_flow", 8, "null"),
                rel_par_def(1007, "tax_in_flow", 8, "null"),
                rel_par_def(1008, "fix_ratio_out_in", 11, "null"),
                rel_par_def(1009, "fix_ratio_out_in", 12, "null"),
                rel_par_def(1010, "max_ratio_out_in", 11, "null"),
                rel_par_def(1011, "max_ratio_out_in", 12, "null"),
                rel_par_def(1012, "min_ratio_out_in", 11, "null"),
                rel_par_def(1013, "min_ratio_out_in", 12, "null"),
                rel_par_def(1014, "minimum_operating_point", 9, "null"),
                rel_par_def(1017, "stor_unit_discharg_eff", 15, 1),
                rel_par_def(1018, "stor_unit_charg_eff", 15, 1),
                rel_par_def(1019, "stor_conn_discharg_eff", 16, 1),
                rel_par_def(1020, "stor_conn_charg_eff", 16, 1),
                rel_par_def(1021, "max_cum_in_flow_bound", 7, "null"),
                rel_par_def(1022, "fix_flow", 10, "null"),
                rel_par_def(1023, "fix_trans", 12, "null"),
            ],
        )
        engine.execute(
            parameter_tag.insert(),
            [
                par_tag(1, "duration", "duration in time"),
                par_tag(2, "date_time", "a specific point in time"),
                par_tag(3, "time_series", "time series data"),
                par_tag(4, "time_pattern", "time patterned data"),
            ],
        )
        engine.execute(
            parameter_definition_tag.insert(),
            [
                par_def_tag(11, 3),
                par_def_tag(10, 1),
                par_def_tag(8, 2),
                par_def_tag(9, 2)
            ],
        )
    except DatabaseError as e:
        raise SpineDBAPIError(
            "Unable to add specific data structure for Spine Model: {}".format(
                e.orig.args))
    return engine
Ejemplo n.º 59
0
def create_tables(engine=user_engine):
    meta = MetaData()
    meta.create_all(bind=engine, tables=tables)
    print('-   创建表成功: ')
    for table in tables:
        print('-               %s' % table)
def upgrade():
    connection = op.get_bind()
    meta = MetaData()
    meta.reflect(bind=connection)

    commit_table = meta.tables["commit"]
    distribution_table = meta.tables["distribution"]
    machine_table = meta.tables["machine"]
    run_table = meta.tables["run"]
    summary_table = meta.tables["summary"]

    runs = connection.execute(run_table.select())
    commits = connection.execute(commit_table.select())
    distributions = connection.execute(distribution_table.select())
    machines = connection.execute(machine_table.select())
    runs_by_id = {r["id"]: r for r in runs}
    commits_by_id = {c["id"]: c for c in commits}
    machines_by_id = {m["id"]: m for m in machines}

    logging.info("1fed559406c5: Get benchmarks")
    summaries = connection.execute(
        summary_table.select()
        .join(run_table, run_table.c.id == summary_table.c.run_id)
        .filter(run_table.c.name.like("commit: %"))
    )

    i = 1

    logging.info("1fed559406c5: Truncate distribution table")
    connection.execute(distribution_table.delete())
    assert list(connection.execute(distribution_table.select())) == []

    for summary in summaries:
        run = runs_by_id.get(summary["run_id"])
        if not run:
            continue

        commit = commits_by_id.get(run["commit_id"])
        if not commit:
            continue

        if commit.timestamp is None:
            continue

        m = machines_by_id[run["machine_id"]]
        machine_hash = f"{m.name}-{m.gpu_count}-{m.cpu_core_count}-{m.cpu_thread_count}-{m.memory_bytes}"

        distributions = list(
            connection.execute(
                get_distribution(
                    summary_table,
                    run_table,
                    machine_table,
                    commit_table,
                    commit,
                    summary,
                    machine_hash,
                    100,
                )
            )
        )

        if not distributions:
            continue

        distribution = distributions[0]
        values = dict(distribution)
        machine_hash = values.pop("hash")
        values["id"] = uuid.uuid4().hex
        values["machine_hash"] = machine_hash
        values["limit"] = 100

        connection.execute(
            insert(distribution_table)
            .values(values)
            .on_conflict_do_update(
                index_elements=["case_id", "context_id", "commit_id", "machine_hash"],
                set_=values,
            )
        )
        logging.info(f"1fed559406c5: Processed {i} summary")
        i += 1

    logging.info("1fed559406c5: Done with migration")