def downgrade(migrate_engine):
    print("036 downgrade")
    meta = MetaData()
    meta.bind = migrate_engine

    tables = [define_component_config_table(meta)]
    drop_tables(tables)
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    ip_blocks = Table('ip_blocks', meta, autoload=True)
    network_name = Column('network_name', String(255))
    ip_blocks.create_column(network_name)
def test_insert_table(engine_testaccount):
    metadata = MetaData()
    users = Table('users', metadata,
                  Column('id', Integer, Sequence('user_id_seq'),
                         primary_key=True),
                  Column('name', String),
                  Column('fullname', String),
                  )
    metadata.create_all(engine_testaccount)

    data = [{
        'id': 1,
        'name': 'testname1',
        'fullname': 'fulltestname1',
    }, {
        'id': 2,
        'name': 'testname2',
        'fullname': 'fulltestname2',
    }]
    conn = engine_testaccount.connect()
    try:
        # using multivalue insert
        conn.execute(users.insert(data))
        results = conn.execute(select([users]).order_by('id'))
        row = results.fetchone()
        assert row['name'] == 'testname1'

    finally:
        conn.close()
        users.drop(engine_testaccount)
def downgrade(migrate_engine):
    print("042 downgrade")
    meta = MetaData()
    meta.bind = migrate_engine

    tables = [define_template_config_roles_table(meta)]
    drop_tables(tables)

    configs = Table('configs', meta, autoload=True)
    template_config_id_reserve = getattr(configs.c, 'template_config_id')
    template_config_id_reserve.alter(type=String(36))

    template_config = Table('template_config', meta, autoload=True)
    id_reserve = getattr(template_config.c, 'id')
    id_reserve.alter(type=String(36))
    name_reserve = getattr(template_config.c, 'name')
    name_reserve.alter(type=String(50))

    template_func = Table('template_func', meta, autoload=True)
    id_reserve = getattr(template_func.c, 'id')
    id_reserve.alter(type=String(36))
    name_reserve = getattr(template_func.c, 'name')
    name_reserve.alter(type=String(36))

    template_func_configs = Table('template_func_configs', meta, autoload=True)
    id_reserve = getattr(template_func_configs.c, 'func_id')
    id_reserve.alter(type=String(36))
    name_reserve = getattr(template_func_configs.c, 'config_id')
    name_reserve.alter(type=String(36))

    config_service = Table('config_service', meta, autoload=True)
    config_id_reserve = getattr(config_service.c, 'config_id')
    config_id_reserve.alter(type=String(36))
def downgrade(migrate_engine):
    print("033 downgrade")
    meta = MetaData()
    meta.bind = migrate_engine

    tables = [define_neutron_backend_table(meta)]
    drop_tables(tables)
def upgrade(migrate_engine):
    """Create shares and share_access_map tables."""
    meta = MetaData()
    meta.bind = migrate_engine

    shares = Table('shares', meta, autoload=True)
    share_snapshots = Table(
        'share_snapshots', meta,
        Column('created_at', DateTime),
        Column('updated_at', DateTime),
        Column('deleted_at', DateTime),
        Column('deleted', Boolean),
        Column('id', String(length=36), primary_key=True, nullable=False),
        Column('user_id', String(length=255)),
        Column('project_id', String(length=255)),
        Column('share_id', String(36), ForeignKey('shares.id'),
               nullable=False),
        Column('size', Integer),
        Column('status', String(length=255)),
        Column('progress', String(length=255)),
        Column('display_name', String(length=255)),
        Column('display_description', String(length=255)),
        Column('share_size', Integer),
        Column('share_proto', String(length=255)),
        Column('export_location', String(255)),
        mysql_engine='InnoDB')

    try:
        share_snapshots.create()
    except Exception:
        LOG.error(_("Table %r not created!"), share_snapshots)
        raise
Exemple #7
0
def drop_tables(names, session):
    """Takes a list of table names and drops them from the database if they exist."""
    metadata = MetaData()
    metadata.reflect(bind=session.bind)
    for table in metadata.sorted_tables:
        if table.name in names:
            table.drop()
Exemple #8
0
    def _do_tables(self, mapper, engine):
        metadata = MetaData()

        test_table = Table(
            "test",
            metadata,
            Column("id", Integer, primary_key=True, nullable=False),
            Column("name", String, nullable=False),
        )

        pk_test_table = Table(
            "pk_test",
            metadata,
            Column("foobar", String, primary_key=True, nullable=False),
            Column("baz", String, nullable=False),
        )

        Test = type(str("Test"), (Base,), {})
        PKTest = type(str("PKTest"), (Base,), {"__unicode__": lambda x: x.baz, "__str__": lambda x: x.baz})

        mapper(Test, test_table, order_by=[test_table.c.name])
        mapper(PKTest, pk_test_table, order_by=[pk_test_table.c.baz])
        self.Test = Test
        self.PKTest = PKTest

        metadata.create_all(bind=engine)
Exemple #9
0
    def _do_tables(self, mapper, engine):
        metadata = MetaData()

        test_table = Table('test', metadata,
            Column('id', Integer, primary_key=True, nullable=False),
            Column('name', String, nullable=False),
        )

        pk_test_table = Table('pk_test', metadata,
            Column('foobar', String, primary_key=True, nullable=False),
            Column('baz', String, nullable=False),
        )

        Test = type('Test', (Base, ), {})
        PKTest = type('PKTest', (Base, ), {
            '__unicode__': lambda x: x.baz,
            '__str__': lambda x: x.baz,
        })

        mapper(Test, test_table, order_by=[test_table.c.name])
        mapper(PKTest, pk_test_table, order_by=[pk_test_table.c.baz])
        self.Test = Test
        self.PKTest = PKTest

        metadata.create_all(bind=engine)
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # drop column:
    backups = Table('backups', meta, autoload=True)
    backups.drop_column('parent_id')
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # add column:
    backups = Table('backups', meta, autoload=True)
    backups.create_column(Column('parent_id', String(36), nullable=True))
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    service_statuses = Table('service_statuses', meta, autoload=True)
    idx = Index("service_statuses_instance_id", service_statuses.c.instance_id)
    idx.drop()
Exemple #13
0
def reflect_table(engine, klass):
    """Inspect and reflect objects"""

    try:
        meta = MetaData()
        meta.reflect(bind=engine)
    except OperationalError as e:
        raise DatabaseError(error=e.orig.args[1], code=e.orig.args[0])

    # Try to reflect from any of the supported tables
    table = None

    for tb in klass.tables():
        if tb in meta.tables:
            table = meta.tables[tb]
            break

    if table is None:
        raise DatabaseError(error="Invalid schema. Table not found",
                            code="-1")

    # Map table schema into klass
    mapper(klass, table,
           column_prefix=klass.column_prefix())

    return table
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    instances = Table('instances', meta, autoload=True)
    instances.create_column(
        Column(COLUMN_NAME, String(36), ForeignKey('instances.id')),
        nullable=True)
Exemple #15
0
def reflect(engine, models, schema = None):
    metadata = MetaData()
    metadata.bind = engine

    with warnings.catch_warnings():
        warnings.simplefilter("ignore", category = SAWarning)
        metadata.reflect(schema = schema, views = False)

    if schema is not None:
        tables = dict((table_name.replace(str(schema) + ".", ""), table)
                      for table_name, table in metadata.tables.iteritems())
    else:
        tables = metadata.tables

    clear_mappers()

    mappers = {}
    for table_name, table in tables.iteritems():
        modelname = "".join([word.capitalize() for word in table_name.split("_")])

        try:
            model = getattr(models, modelname)
        except AttributeError:
            stderr.write("Missing model for table %s\n" % table_name)
        else:
            mappers[modelname] = mapper(model, table)

    Session = sessionmaker(bind = engine, autocommit = False, autoflush = True)

    return mappers, tables, Session
Exemple #16
0
 def copy_star_schema(cls, bind=None):
     m = MetaData()
     for t in cls.data_tables:
         i2b2_star.metadata.tables[t].tometadata(m)
     if bind:
         m.bind = bind
     return m
Exemple #17
0
def main():
    parser = argparse.ArgumentParser(description='Generates SQLAlchemy model code from an existing database.')
    parser.add_argument('url', nargs='?', help='SQLAlchemy url to the database')
    parser.add_argument('--version', action='store_true', help="print the version number and exit")
    parser.add_argument('--schema', help='load tables from an alternate schema')
    parser.add_argument('--tables', help='tables to process (comma-separated, default: all)')
    parser.add_argument('--noviews', action='store_true', help="ignore views")
    parser.add_argument('--noindexes', action='store_true', help='ignore indexes')
    parser.add_argument('--noconstraints', action='store_true', help='ignore constraints')
    parser.add_argument('--nojoined', action='store_true', help="don't autodetect joined table inheritance")
    parser.add_argument('--noinflect', action='store_true', help="don't try to convert tables names to singular form")
    parser.add_argument('--noclasses', action='store_true', help="don't generate classes, only tables")
    parser.add_argument('--alwaysclasses', action='store_true', help="always generate classes")
    parser.add_argument('--nosequences', action='store_true', help="don't auto-generate postgresql sequences")
    parser.add_argument('--outfile', help='file to write output to (default: stdout)')
    args = parser.parse_args()

    if args.version:
        print(sqlacodegen.version)
        return
    if not args.url:
        print('You must supply a url\n', file=sys.stderr)
        parser.print_help()
        return

    engine = create_engine(args.url)
    metadata = MetaData(engine)
    tables = args.tables.split(',') if args.tables else None
    metadata.reflect(engine, args.schema, not args.noviews, tables)
    outfile = codecs.open(args.outfile, 'w', encoding='utf-8') if args.outfile else sys.stdout
    generator = CodeGenerator(metadata, args.noindexes, args.noconstraints, args.nojoined, args.noinflect,
                              args.noclasses, args.alwaysclasses, args.nosequences)
    generator.render(outfile)
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    instances = Table('instances', meta, autoload=True)
    service_type = Column('service_type', String(36))
    instances.create_column(service_type)
    instances.update().values({'service_type': 'mysql'}).execute()
Exemple #19
0
def migrate(engine, connection, revmap):
    """Given engine, connection and revision map, go through the
    ticket descriptions and comments and migrate the svn revisions to
    git hashes.

    """
    metadata = MetaData()
    metadata.bind = engine

    tickets = Table('ticket', metadata, autoload=True)
    changes = Table('ticket_change', metadata, autoload=True)

    trans = connection.begin()
    try:

        count = migrate_table(connection, revmap,
            tickets, [tickets.c.id], 
            [tickets.c.description]
        )
        count += migrate_table(connection, revmap,
            changes, [changes.c.ticket, changes.c.time, changes.c.field], 
            [changes.c.newvalue]
        )

        trans.commit()
        
        print("Migrated %i records" % count)

    except Exception, e:
        trans.rollback()
        die("Migration error: %s" % repr(e), "Changes were rolled back")
Exemple #20
0
 def finalizer():
     del backend.connection
     print("Dropping schema...")
     meta = MetaData(engine)
     meta.reflect()
     meta.drop_all()
     print("Done...")
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    tasks_table = Table('tasks', meta, autoload=True)
    task_info_table = Table('task_info', meta, autoload=True)

    for col_name in TASKS_MIGRATE_COLUMNS:
        column = Column(col_name, Text())
        column.create(tasks_table)

    task_info_records = task_info_table.select().execute().fetchall()

    for task_info in task_info_records:
        values = {
            'input': task_info.input,
            'result': task_info.result,
            'message': task_info.message
        }

        tasks_table\
            .update(values=values)\
            .where(tasks_table.c.id == task_info.task_id)\
            .execute()

    drop_tables([task_info_table])
Exemple #22
0
    def _sql_backend(request,engine,**kwargs):

        meta = MetaData(engine)
        meta.reflect()
        meta.drop_all()
        #we enable foreign key checks for SQLITE
        if str(engine.url).startswith('sqlite://'):
            engine.connect().execute('pragma foreign_keys=ON')

        if not 'ondelete' in kwargs:
            kwargs['ondelete'] = 'CASCADE'
        backend = SqlBackend(engine = engine,**kwargs)
        backend.init_schema()
        backend.create_schema()

        def finalizer():
            backend.rollback()
            del backend.connection
            print("Dropping schema...")
            #we disable foreign key checks for SQLITE (as dropping tables with circular foreign keys won't work otherwise...)
            if str(engine.url).startswith('sqlite://'):
                engine.connect().execute('pragma foreign_keys=OFF')
            meta = MetaData(engine)
            meta.reflect()
            meta.drop_all()
            print("Done...")

        request.addfinalizer(finalizer)

        return backend
Exemple #23
0
def setupPackage():
    os.environ['MONGO_URI'] = 'mongodb://localhost'
    os.environ['MONGO_DB_NAME'] = 'royal_example'
    os.environ['MONGO_DB_PREFIX'] = ''

    # sqla extentsion setup.
    global engine

    alembic_config = Config()
    alembic_config.set_main_option('script_location',
                                   'example/ext/sqla/db')
    alembic_config.set_main_option('sqlalchemy.url', mysql_uri)

    engine = create_engine(mysql_uri)

    try:
        command.downgrade(alembic_config, 'base')
    except:
        log.exception("Migration downgrade failed, clearing all tables")
        metadata = MetaData(engine)
        metadata.reflect()
        for table in metadata.tables.values():
            for fk in table.foreign_keys:
                engine.execute(DropConstraint(fk.constraint))
        metadata.drop_all()

    command.upgrade(alembic_config, 'head')
Exemple #24
0
def read_sql_table(table_name, con, index_col=None, coerce_float=True,
                   parse_dates=None, columns=None):
    """Read SQL database table into a DataFrame.

    Given a table name and an SQLAlchemy engine, returns a DataFrame.
    This function does not support DBAPI connections.

    Parameters
    ----------
    table_name : string
        Name of SQL table in database
    con : SQLAlchemy engine
        Sqlite DBAPI connection mode not supported
    index_col : string, optional
        Column to set as index
    coerce_float : boolean, default True
        Attempt to convert values to non-string, non-numeric objects (like
        decimal.Decimal) to floating point. Can result in loss of Precision.
    parse_dates : list or dict
        - List of column names to parse as dates
        - Dict of ``{column_name: format string}`` where format string is
          strftime compatible in case of parsing string times or is one of
          (D, s, ns, ms, us) in case of parsing integer timestamps
        - Dict of ``{column_name: arg dict}``, where the arg dict corresponds
          to the keyword arguments of :func:`pandas.to_datetime`
          Especially useful with databases without native Datetime support,
          such as SQLite
    columns : list
        List of column names to select from sql table

    Returns
    -------
    DataFrame

    See also
    --------
    read_sql_query : Read SQL query into a DataFrame.
    read_sql

    """
    if not _is_sqlalchemy_engine(con):
        raise NotImplementedError("read_sql_table only supported for "
                                  "SQLAlchemy engines.")
    import sqlalchemy
    from sqlalchemy.schema import MetaData
    meta = MetaData(con)
    try:
        meta.reflect(only=[table_name])
    except sqlalchemy.exc.InvalidRequestError:
        raise ValueError("Table %s not found" % table_name)

    pandas_sql = PandasSQLAlchemy(con, meta=meta)
    table = pandas_sql.read_table(
        table_name, index_col=index_col, coerce_float=coerce_float,
        parse_dates=parse_dates, columns=columns)

    if table is not None:
        return table
    else:
        raise ValueError("Table %s not found" % table_name, con)
Exemple #25
0
    def test_clone_table_adds_or_deletes_columns(self):
        meta = MetaData()
        meta.bind = self.engine

        table = Table('dummy',
                      meta,
                      Column('id', String(36), primary_key=True,
                             nullable=False),
                      Column('A', Boolean, default=False)
                      )
        table.create()

        newcols = [
            Column('B', Boolean, default=False),
            Column('C', String(255), default='foobar')
        ]
        ignorecols = [
            table.c.A.name
        ]
        new_table = migrate_utils.clone_table('new_dummy', table, meta,
                                              newcols=newcols,
                                              ignorecols=ignorecols)

        col_names = [c.name for c in new_table.columns]

        self.assertEqual(3, len(col_names))
        self.assertIsNotNone(new_table.c.B)
        self.assertIsNotNone(new_table.c.C)
        self.assertNotIn('A', col_names)
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    ip_blocks = Table('ip_blocks', meta, autoload=True)
    max_allocation = Column('max_allocation', Integer())
    ip_blocks.create_column(max_allocation)
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    modules = Table('modules', meta, autoload=True)
    modules.drop_column(COLUMN_NAME_1)
    modules.drop_column(COLUMN_NAME_2)
    modules.drop_column(COLUMN_NAME_3)
class Database:
    def __init__(self, url, config=None):
        self.url = url
        self.engine = self.make_engine(url)
        self.metadata = MetaData(bind=self.engine)
        self.metadata.reflect()

        self.config = config

        # parallel table init
        self.tables_lock = threading.Lock()
        self.tables = {}
        threads = []
        for table in self.metadata.sorted_tables:
            t = threading.Thread(target=self.make_dsm_table, args=(table,))
            t.start()
            threads.append(t)
        [t.join() for t in threads]

    def execute(self, qry):
        try:
            res = self.engine.execute(qry)
        except Exception, e:
            if e.message == "(OperationalError) (1205, 'Lock wait timeout exceeded; try restarting transaction')":
                print e
                res = self.execute(qry)
            else:
                print e
                raise e

        return res
def upgrade(migrate_engine):
    print("001 upgrade")
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_hosts_table(meta),
              define_discover_hosts_table(meta),
              define_clusters_table(meta),
              define_cluster_hosts_table(meta),
              define_networks_table(meta),
              define_ip_ranges_table(meta),
              define_host_interfaces_table(meta),
              define_config_sets_table(meta),
              define_components_table(meta),
              define_services_table(meta),
              define_roles_table(meta),
              define_host_roles_table(meta),
              define_service_roles_table(meta),
              define_config_files_table(meta),
              define_configs_table(meta),
              define_config_set_items_table(meta),
              define_config_historys_table(meta),
              define_tasks_table(meta),
              define_task_infos_table(meta),
              define_repositorys_table(meta),
              define_users_table(meta),
              define_versions_table(meta),
              define_assigned_networks_table(meta),
              define_logic_networks_table(meta),
              define_routers_table(meta),
              define_subnets_table(meta),
              define_float_ip_ranges_table(meta),
              define_dns_nameservers_table(meta),
              define_service_disks_table(meta),
              define_cinder_volumes_table(meta)]
    create_tables(tables)
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # new table with desired columns, indexes, and constraints
    new_agent_heartbeats = Table(
        'agent_heartbeats', meta,
        Column('id', String(36), primary_key=True, nullable=False),
        Column('instance_id', String(36),
               nullable=False, unique=True, index=True),
        Column('guest_agent_version', String(255), index=True),
        Column('deleted', Boolean(), index=True),
        Column('deleted_at', DateTime()),
        Column('updated_at', DateTime(), nullable=False))

    # original table from migration 005_heartbeat.py
    previous_agent_heartbeats = Table('agent_heartbeats', meta, autoload=True)

    try:
        drop_tables([previous_agent_heartbeats])
    except OperationalError as e:
        logger.warn("This table may have been dropped by some other means.")
        logger.warn(e)

    create_tables([new_agent_heartbeats])
def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
                     name='Base', constructor=_declarative_constructor,
                     metaclass=DeclarativeMeta, engine=None):
    """Construct a base class for declarative class definitions.

    The new base class will be given a metaclass that invokes
    :func:`instrument_declarative()` upon each subclass definition, and routes
    later Column- and Mapper-related attribute assignments made on the class
    into Table and Mapper assignments.  

    :param bind: An optional :class:`~sqlalchemy.engine.base.Connectable`, will be assigned 
      the ``bind`` attribute on the :class:`~sqlalchemy.MetaData` instance.
      The `engine` keyword argument is a deprecated synonym for `bind`.

    :param metadata:
      An optional :class:`~sqlalchemy.MetaData` instance.  All :class:`~sqlalchemy.schema.Table` 
      objects implicitly declared by
      subclasses of the base will share this MetaData.  A MetaData instance
      will be create if none is provided.  The MetaData instance will be
      available via the `metadata` attribute of the generated declarative
      base class.

    :param mapper:
      An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`.  Will be
      used to map subclasses to their Tables.

    :param cls:
      Defaults to :class:`object`.  A type to use as the base for the generated
      declarative base class.  May be a type or tuple of types.

    :param name:
      Defaults to ``Base``.  The display name for the generated
      class.  Customizing this is not required, but can improve clarity in
      tracebacks and debugging.

    :param constructor:
      Defaults to declarative._declarative_constructor, an __init__
      implementation that assigns \**kwargs for declared fields and relations
      to an instance.  If ``None`` is supplied, no __init__ will be installed
      and construction will fall back to cls.__init__ with normal Python
      semantics.

    :param metaclass:
      Defaults to :class:`DeclarativeMeta`.  A metaclass or __metaclass__
      compatible callable to use as the meta type of the generated
      declarative base class.

    """
    lcl_metadata = metadata or MetaData()
    if bind or engine:
        lcl_metadata.bind = bind or engine

    bases = not isinstance(cls, tuple) and (cls,) or cls
    class_dict = dict(_decl_class_registry=dict(),
                      metadata=lcl_metadata)

    if constructor:
        class_dict['__init__'] = constructor
    if mapper:
        class_dict['__mapper_cls__'] = mapper

    return metaclass(name, bases, class_dict)
Exemple #32
0
                       echo=True,
                       client_encoding='utf8')

conn = engine.connect()

# the transaction only applies if the DB supports
# transactional DDL, i.e. Postgresql, MS SQL Server
trans = conn.begin()

inspector = reflection.Inspector.from_engine(engine)

# gather all data first before dropping anything.
# some DBs lock after things have been dropped in
# a transaction.

metadata = MetaData()

tbs = []
all_fks = []

for table_name in inspector.get_table_names():
    fks = []
    for fk in inspector.get_foreign_keys(table_name):
        if not fk['name']:
            continue
        fks.append(ForeignKeyConstraint((), (), name=fk['name']))
    t = Table(table_name, metadata, *fks)
    tbs.append(t)
    all_fks.extend(fks)

for fkc in all_fks:
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_tasks_table(meta)]
    drop_tables(tables)
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_tasks_table(meta)]
    create_tables(tables)
Exemple #35
0
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, TIMESTAMP, ForeignKey, Text
import datetime
from sqlalchemy.schema import MetaData
from db.utils import engine
from common_utils import read_json_dict
from loguru import logger

config = read_json_dict('config.json')
SCHEMA_NAME = config['postgres']['schema']
Base = declarative_base(metadata=MetaData(schema=SCHEMA_NAME))


class Posts(Base):
    __tablename__ = 'posts'
    id = Column(Integer, primary_key=True, autoincrement=True)
    create_ts = Column(TIMESTAMP, default=datetime.datetime.utcnow)
    tg_message_id = Column(Integer, nullable=False, unique=True)
    chat_id = Column(Integer, nullable=False)
    post_text = Column(Text, nullable=False)
    user_id = Column(Text, nullable=False)


class Users(Base):
    __tablename__ = 'users'
    id = Column(Integer, primary_key=True, autoincrement=True)
    telegram_username = Column(String, nullable=False, unique=True, )
    update_ts = Column(TIMESTAMP, default=datetime.datetime.utcnow, nullable=False)


class Scores(Base):
Exemple #36
0
def table_dml(engine):
    return Table('test_pybigquery.sample_dml',
                 MetaData(bind=engine),
                 autoload=True)
Exemple #37
0
 def create_metadata(self):
     return MetaData(bind=self.create_engine())
Exemple #38
0
# In[8]:

from sqlalchemy.schema import MetaData

# 예제

# In[10]:

from sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey

# MetaData 생성

# In[11]:

metadata = MetaData()

# Table 생성
# - Table('테이블명', 메타데이타, Columns)
# - metadata에 생성

# In[12]:

users = Table(
    'users',
    metadata,
    Column('id', Integer, primary_key=True),
    Column('name', String),
    Column('fullname', String),
)
Exemple #39
0
def table_schema(name: str, session: Session) -> Table:
    """
    :returns: Table schema using SQLAlchemy reflect as it currently exists in the db
    :rtype: Table
    """
    return Table(name, MetaData(bind=session.bind), autoload=True)
class HGECtx:
    def __init__(self, hge_url, pg_url, hge_key, hge_webhook, hge_jwt_key_file,
                 webhook_insecure):
        server_address = ('0.0.0.0', 5592)

        self.resp_queue = queue.Queue(maxsize=1)
        self.error_queue = queue.Queue()
        self.ws_queue = queue.Queue(maxsize=-1)
        self.httpd = WebhookServer(self.resp_queue, self.error_queue,
                                   server_address)
        self.web_server = threading.Thread(target=self.httpd.serve_forever)
        self.web_server.start()

        self.pg_url = pg_url
        self.engine = create_engine(self.pg_url)
        self.meta = MetaData()

        self.http = requests.Session()
        self.hge_url = hge_url
        self.hge_key = hge_key
        self.hge_webhook = hge_webhook
        if hge_jwt_key_file is None:
            self.hge_jwt_key = None
        else:
            with open(hge_jwt_key_file) as f:
                self.hge_jwt_key = f.read()
        self.webhook_insecure = webhook_insecure
        self.may_skip_test_teardown = False

        self.ws_url = urlparse(hge_url)
        self.ws_url = self.ws_url._replace(scheme='ws')
        self.ws_url = self.ws_url._replace(path='/v1alpha1/graphql')
        self.ws = websocket.WebSocketApp(self.ws_url.geturl(),
                                         on_message=self._on_message)
        self.wst = threading.Thread(target=self.ws.run_forever)
        self.wst.daemon = True
        self.wst.start()

        # start the graphql server
        self.graphql_server = graphql_server.create_server('127.0.0.1', 5000)
        self.gql_srvr_thread = threading.Thread(
            target=self.graphql_server.serve_forever)
        self.gql_srvr_thread.start()

        result = subprocess.run(['../../scripts/get-version.sh'],
                                shell=False,
                                stdout=subprocess.PIPE,
                                check=True)
        self.version = result.stdout.decode('utf-8').strip()
        try:
            st_code, resp = self.v1q_f('queries/clear_db.yaml')
        except requests.exceptions.RequestException as e:
            self.teardown()
            raise HGECtxError(repr(e))
        assert st_code == 200, resp

    def _on_message(self, message):
        my_json = json.loads(message)
        if my_json['type'] != 'ka':
            self.ws_queue.put(message)

    def get_event(self, timeout):
        return self.resp_queue.get(timeout=timeout)

    def get_error_queue_size(self):
        sz = 0
        while not self.error_queue.empty():
            self.error_queue.get()
            sz = sz + 1
        return sz

    def get_ws_event(self, timeout):
        return json.loads(self.ws_queue.get(timeout=timeout))

    def reflect_tables(self):
        self.meta.reflect(bind=self.engine)

    def anyq(self, u, q, h):
        resp = self.http.post(self.hge_url + u, json=q, headers=h)
        return resp.status_code, resp.json()

    def v1q(self, q, headers={}):
        h = headers.copy()
        if self.hge_key is not None:
            h['X-Hasura-Access-Key'] = self.hge_key
        resp = self.http.post(self.hge_url + "/v1/query", json=q, headers=h)
        return resp.status_code, resp.json()

    def v1q_f(self, fn):
        with open(fn) as f:
            return self.v1q(yaml.load(f))

    def teardown(self):
        self.http.close()
        self.engine.dispose()
        self.httpd.shutdown()
        self.httpd.server_close()
        self.ws.close()
        self.web_server.join()
        self.wst.join()
        graphql_server.stop_server(self.graphql_server)
        self.gql_srvr_thread.join()
Exemple #41
0
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals

from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.schema import MetaData

# Recommended naming convention used by Alembic, as various different database
# providers will autogenerate vastly different names making migrations more
# difficult. See: http://alembic.readthedocs.org/en/latest/naming.html
NAMING_CONVENTION = {
    "ix": 'ix_%(column_0_label)s',
    "uq": "uq_%(table_name)s_%(column_0_name)s",
    "ck": "ck_%(table_name)s_%(constraint_name)s",
    "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
    "pk": "pk_%(table_name)s"
}

metadata = MetaData(naming_convention=NAMING_CONVENTION)
Base = declarative_base(metadata=metadata)
Exemple #42
0
class Database(object):
    def __init__(self, url, schema=None, reflectMetadata=True):
        kw = {}
        if url.startswith('postgres'):
            kw['poolclass'] = NullPool
        self.lock = threading.RLock()
        self.local = threading.local()
        if '?' in url:
            url, query = url.split('?', 1)
            query = parse_qs(query)
            if schema is None:
                # le pop
                schema_qs = query.pop('schema', query.pop('searchpath', []))
                if len(schema_qs):
                    schema = schema_qs.pop()
        self.schema = schema
        engine = create_engine(url, **kw)
        self.url = url
        self.engine = construct_engine(engine)
        self.metadata = MetaData(schema=schema)
        self.metadata.bind = self.engine
        if reflectMetadata:
            self.metadata.reflect(self.engine)
        self._tables = {}

    @property
    def executable(self):
        """ The current connection or engine against which statements
        will be executed. """
        if hasattr(self.local, 'connection'):
            return self.local.connection
        return self.engine

    def _acquire(self):
        self.lock.acquire()

    def _release(self):
        if not hasattr(self.local, 'tx'):
            self.lock.release()
            self.local.must_release = False
        else:
            self.local.must_release = True

    def _release_internal(self):
        if not hasattr(self.local, 'must_release') and self.local.must_release:
            self.lock.release()
            self.local.must_release = False

    def begin(self):
        """ Enter a transaction explicitly. No data will be written
        until the transaction has been committed. """
        if not hasattr(self.local, 'connection'):
            self.local.connection = self.engine.connect()
        if not hasattr(self.local, 'tx'):
            self.local.tx = self.local.connection.begin()

    def commit(self):
        """ Commit the current transaction, making all statements executed
        since the transaction was begun permanent. """
        self.local.tx.commit()
        del self.local.tx
        self._release_internal()

    def rollback(self):
        """ Roll back the current transaction, discarding all statements
        executed since the transaction was begun. """
        self.local.tx.rollback()
        del self.local.tx
        self._release_internal()

    @property
    def tables(self):
        """ Get a listing of all tables that exist in the database.

        >>> print db.tables
        set([u'user', u'action'])
        """
        return list(set(self.metadata.tables.keys() + self._tables.keys()))

    def create_table(self,
                     table_name,
                     primary_id='id',
                     primary_type='Integer'):
        """
        Creates a new table. The new table will automatically have an `id` column 
        unless specified via optional parameter primary_id, which will be used 
        as the primary key of the table. Automatic id is set to be an 
        auto-incrementing integer, while the type of custom primary_id can be a 
        Text or an Integer as specified with primary_type flag. 
        The caller will be responsible for the uniqueness of manual primary_id.

        This custom id feature is only available via direct create_table call. 

        Returns a :py:class:`Table <dataset.Table>` instance.
        ::

            table = db.create_table('population')

            # custom id and type
            table2 = db.create_table('population2', 'age')
            table3 = db.create_table('population3', primary_id='race', primary_type='Text')
        """
        self._acquire()
        try:
            log.debug("Creating table: %s on %r" % (table_name, self.engine))
            table = SQLATable(table_name, self.metadata)
            if primary_type is 'Integer':
                auto_flag = False
                if primary_id is 'id':
                    auto_flag = True
                col = Column(primary_id,
                             Integer,
                             primary_key=True,
                             autoincrement=auto_flag)
            elif primary_type is 'Text':
                col = Column(primary_id, Text, primary_key=True)
            else:
                raise DatasetException(
                    "The primary_type has to be either 'Integer' or 'Text'.")

            table.append_column(col)
            table.create(self.engine)
            self._tables[table_name] = table
            return Table(self, table)
        finally:
            self._release()

    def load_table(self, table_name):
        """
        Loads a table. This will fail if the tables does not already
        exist in the database. If the table exists, its columns will be
        reflected and are available on the :py:class:`Table <dataset.Table>`
        object.

        Returns a :py:class:`Table <dataset.Table>` instance.
        ::

            table = db.load_table('population')
        """
        self._acquire()
        try:
            log.debug("Loading table: %s on %r" % (table_name, self))
            table = SQLATable(table_name, self.metadata, autoload=True)
            self._tables[table_name] = table
            return Table(self, table)
        finally:
            self._release()

    def get_table(self, table_name, primary_id='id', primary_type='Integer'):
        """
        Smart wrapper around *load_table* and *create_table*. Either loads a table
        or creates it if it doesn't exist yet.
        For short-hand to create a table with custom id and type using [], where
        table_name, primary_id, and primary_type are specified as a tuple

        Returns a :py:class:`Table <dataset.Table>` instance.
        ::

            table = db.get_table('population')
            # you can also use the short-hand syntax:
            table = db['population']

            # custom id and type
            table2 = db['population2', 'age'] # default type is 'Integer'
            table3 = db['population3', 'race', 'Text']
        """
        if table_name in self._tables:
            return Table(self, self._tables[table_name])
        self._acquire()
        try:
            if self.engine.has_table(table_name, schema=self.schema):
                return self.load_table(table_name)
            else:
                return self.create_table(table_name, primary_id, primary_type)
        finally:
            self._release()

    def __getitem__(self, table_name):
        if type(table_name) is tuple:
            return self.get_table(*table_name[:3])
        else:
            return self.get_table(table_name)

    def query(self, query, **kw):
        """
        Run a statement on the database directly, allowing for the
        execution of arbitrary read/write queries. A query can either be
        a plain text string, or a `SQLAlchemy expression <http://docs.sqlalchemy.org/ru/latest/core/tutorial.html#selecting>`_. The returned
        iterator will yield each result sequentially.

        Any keyword arguments will be passed into the query to perform
        parameter binding. 
        ::

            res = db.query('SELECT user, COUNT(*) c FROM photos GROUP BY user')
            for row in res:
                print row['user'], row['c']
        """
        return ResultIter(self.executable.execute(query, **kw))

    def __repr__(self):
        return '<Database(%s)>' % self.url
Exemple #43
0
def db_export_metadefs(engine, metadata_path=None):
    meta = MetaData()
    meta.bind = engine

    _export_data_to_file(meta, metadata_path)
Exemple #44
0
 def inspect_db(self):
     metadata = MetaData()
     metadata.reflect(bind=self.db.engine)
     return metadata
Exemple #45
0
import warnings
import sqlalchemy
from sqlalchemy.orm import relationship, backref
import sqlalchemy.sql
import sqlalchemy.exc
import sqlalchemy.schema
import geoalchemy as ga
from geoalchemy.postgis import PGComparator
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, Unicode, DateTime
from sqlalchemy.schema import Table, MetaData, PrimaryKeyConstraint
from sqlalchemy.sql.expression import Function

import Database

meta = MetaData(bind=Database.engine(), schema='gazetteer')
base = declarative_base(metadata=meta)


def objectClass(objtype):
    '''
    Derive the class of an objet given its name
    '''
    module = sys.modules[__name__]
    if objtype not in dir(module):
        raise ValueError("Invalid object type: " + objtype)
    oclass = getattr(module, objtype)
    if '__table__' not in dir(oclass):
        raise ValueError("Invalid object typeid: " + objtype)
    return oclass
Exemple #46
0
def db_unload_metadefs(engine):
    meta = MetaData()
    meta.bind = engine

    _clear_metadata(meta)
Exemple #47
0
 def metadata(self):
     """Return a SQLAlchemy schema cache object."""
     return MetaData(schema=self.schema, bind=self.executable)
Exemple #48
0
    def process_bind_param(self, value, dialect):
        return json.dumps(value)

    def process_result_value(self, value, dialect):
        if not value:
            return {}
        return json.loads(value)


class BaseModel(object):
    """Primary mixin that provides common behavior for SQLAlchemy models."""

    def __init__(self, **kwargs):
        for key, value in list(kwargs.items()):
            setattr(self, key, value)


Versioned = make_versioned_class()

metadata = MetaData(naming_convention={
  "ix": 'ix_%(column_0_label)s',
  "uq": "uq_%(table_name)s_%(column_0_name)s",
  "ck": "ck_%(table_name)s_%(constraint_name)s",
  "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
  "pk": "pk_%(table_name)s"
})

DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base(metadata=metadata, cls=BaseModel)
Exemple #49
0
def read_sql_table(engine,
                   table_name,
                   index_col=None,
                   columns=None,
                   select_from=None,
                   limit=None,
                   order_by=None,
                   where=None,
                   coerce_types=None,
                   raise_on_missing=True):
    """ Load a table from a SQL database.
    
    Parameters
    ----------
    engine : SQLAlchemy engine
        The SQL database to load from.
    
    table_name : str
        The name of the table to load.
    
    index_col : str, optional
        Column name to use as index for the returned data frame.
    
    columns : sequence of str, optional
        Columns to select from the table. By default, all columns are selected.

    select_from : str or SQLAlchemy clause, optional
        A FROM clause to use for the select statement. Defaults to the
        table name.
    
    limit : int, optional
        Limit the number of rows selected.
    
    order_by : str or SQLAlchemy clause, optional
        An ORDER BY clause to sort the selected rows.
    
    where : str or SQLAlchemy clause, optional
        A WHERE clause used to filter the selected rows.
    
    coerce_types : dict(str : dtype or Python type), optional
        Override pandas type inference for specific columns.
    
    Returns
    -------
    A pandas DataFrame.
    """
    # Pandas does not expose many of these options, so we pull out some of
    # Pandas' internals.
    #
    # An alternative approach would be to use `pandas.read_sql_query` with an
    # appropriate (dialect-specific) query. However, this approach would not
    # utilize Pandas' logic for column type inference (performed by
    # `_harmonize_columns()` below), and would hence produce inferior results.

    from sqlalchemy.schema import MetaData
    from pandas.io.sql import SQLDatabase, SQLTable

    # From pandas.io.sql.read_sql_table
    # and  pandas.io.sql.SQLDatabase.read_table:
    meta = MetaData(engine)
    try:
        meta.reflect(only=[table_name])
    except sqlalchemy.exc.InvalidRequestError:
        if raise_on_missing:
            raise ValueError("Table %s not found" % table_name)
        else:
            return None

    pd_db = SQLDatabase(engine, meta=meta)
    pd_tbl = SQLTable(table_name, pd_db, index=None)

    # Adapted from pandas.io.SQLTable.read:
    if columns is not None and len(columns) > 0:
        if index_col is not None and index_col not in columns:
            columns = [index_col] + columns

        cols = [pd_tbl.table.c[n] for n in columns]
    else:
        cols = pd_tbl.table.c

    if pd_tbl.index is not None:
        [cols.insert(0, pd_tbl.table.c[idx]) for idx in pd_tbl.index[::-1]]

    # Strip the table name from each of the column names to allow for more
    # general FROM clauses.
    sql_select = sqlalchemy.select([
        sqlalchemy.column(str(c).replace('{}.'.format(table_name), '', 1))
        for c in cols
    ])

    if select_from is not None:
        sql_select = sql_select.select_from(select_from)
    else:
        sql_select = sql_select.select_from(sqlalchemy.table(table_name))

    if where is not None:
        if isinstance(where, basestring):
            where = sqlalchemy.text(where)
        sql_select = sql_select.where(where)
    if limit is not None:
        sql_select = sql_select.limit(limit)
    if order_by is not None:
        if isinstance(order_by, basestring):
            order_by = sqlalchemy.sql.column(order_by)
        sql_select = sql_select.order_by(order_by)

    result = pd_db.execute(sql_select)
    data = result.fetchall()
    column_names = result.keys()

    pd_tbl.frame = pandas.DataFrame.from_records(data,
                                                 index=index_col,
                                                 columns=column_names)

    # This line has caused issues with incorrect type inference -- add it
    # back with caution.
    # pd_tbl._harmonize_columns()

    # Added by me: coerce types
    if coerce_types:
        frame = pd_tbl.frame
        for col, dtype in coerce_types.iteritems():
            frame[col] = frame[col].astype(dtype, copy=False)

    return pd_tbl.frame
Exemple #50
0
def start_generate():
    parser = argparse.ArgumentParser(
        description='Generates SQLAlchemy model code from an existing database.'
    )
    parser.add_argument('url',
                        nargs='?',
                        help='SQLAlchemy url to the database')
    parser.add_argument('--version',
                        action='store_true',
                        help="print the version number and exit")
    parser.add_argument('--schema',
                        help='load tables from an alternate schema')
    parser.add_argument(
        '--tables', help='tables to process (comma-separated, default: all)')
    parser.add_argument('--noviews', action='store_true', help="ignore views")
    parser.add_argument('--noindexes',
                        action='store_true',
                        help='ignore indexes')
    parser.add_argument('--noconstraints',
                        action='store_true',
                        help='ignore constraints')
    parser.add_argument('--nojoined',
                        action='store_true',
                        help="don't autodetect joined table inheritance")
    parser.add_argument(
        '--noinflect',
        action='store_true',
        help="don't try to convert tables names to singular form")
    parser.add_argument('--noclasses',
                        action='store_true',
                        help="don't generate classes, only tables")
    parser.add_argument('--nocomments',
                        action='store_true',
                        help="don't render column comments")
    parser.add_argument('--outfile',
                        help='file to write output to (default: stdout)')
    args = parser.parse_args()

    if args.version:
        version = pkg_resources.get_distribution('sqlacodegen').parsed_version
        print(version.public)
        return
    if not args.url:
        print('You must supply a url\n', file=sys.stderr)
        parser.print_help()
        return

    # Use reflection to fill in the metadata
    engine = create_engine(args.url)
    metadata = MetaData(engine)
    # 需要生成的数据库表,没有则默认全部生成
    tables = args.tables.split(',') if args.tables else None
    metadata.reflect(engine, args.schema, not args.noviews, tables)

    # Write the generated model code to the specified file or standard output
    outfile = io.open(args.outfile, 'w',
                      encoding='utf-8') if args.outfile else sys.stdout
    generator = CodeGenerator(metadata,
                              args.noindexes,
                              args.noconstraints,
                              args.nojoined,
                              args.noinflect,
                              args.noclasses,
                              nocomments=args.nocomments)
    generator.render(outfile)
Exemple #51
0
fh.setFormatter(formatter)

app.logger.addHandler(fh)

app.logger.info('Connecting to database %s.',
                app.config['SQLALCHEMY_DATABASE_URI'])

# Setup DB and bcrypt
db = None  # type: SQLAlchemy
db = SQLAlchemy(
    app,
    metadata=MetaData(
        naming_convention={
            'pk': 'pk_%(table_name)s',
            'fk':
            'fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s',
            'ix': 'ix_%(table_name)s_%(column_0_name)s',
            'uq': 'uq_%(table_name)s_%(column_0_name)s',
            'ck': 'ck_%(table_name)s_%(column_0_name)s',
        }))
migrate = Migrate(app, db)  # type: Migrate
bcrypt = Bcrypt(app)  # type: Bcrypt

# Setup JWT
jwt = JWTManager(app)  # type: JWTManager

# Setup Headers
CORS(app)

webpack.init_app(app)
Exemple #52
0
from .config import Configuration


def _time_offset_to_local_time(time, offset):
    timezone = datetime.timezone(datetime.timedelta(seconds=offset))
    return time.replace(tzinfo=datetime.timezone.utc).astimezone(timezone)


_naming_convention = {
    "ix": 'ix_%(column_0_label)s',
    "uq": "uq_%(table_name)s_%(column_0_name)s",
    "ck": "ck_%(table_name)s_%(constraint_name)s",
    "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
    "pk": "pk_%(table_name)s"
}
_metadata = MetaData(naming_convention=_naming_convention)
Base = declarative_base(metadata=_metadata)


class Project(Base):
    __tablename__ = 'projects'

    project_name = Column(String, primary_key=True)


class Repository(Base):
    __tablename__ = 'repositories'

    id = Column(Integer, primary_key=True)
    repository_path = Column(String)
    configuration_file_path = Column(String)
Exemple #53
0
def test_reflect_dataset_does_not_exist(engine):
    with pytest.raises(NoSuchTableError):
        Table('dataset_does_not_exist.table_does_not_exist',
              MetaData(bind=engine),
              autoload=True)
Exemple #54
0
def test_querying_wildcard_tables(engine):
    table = Table('bigquery-public-data.noaa_gsod.gsod*',
                  MetaData(bind=engine),
                  autoload=True)
    rows = table.select().limit(1).execute().first()
    assert len(rows) > 0
def generate_tsb_metadata(schema_name=None, bind=None):

    metadata = MetaData(schema=schema_name, bind=bind)

    # Two-letter state - some countries have 3 or more, but two will do for US
    tsb_asmt = Table(
        'tsb_asmt', metadata,
        Column('tsb_asmt_rec_id',
               BigInteger,
               Sequence('tsb_asmt_rec_id_seq'),
               primary_key=True),
        Column('StateAbbreviation', String(50), nullable=True),
        Column('ResponsibleDistrictIdentifier', String(50), nullable=True),
        Column('OrganizationName', String(50), nullable=True),
        Column('ResponsibleSchoolIdentifier', String(50), nullable=True),
        Column('NameOfInstitution', String(50), nullable=True),
        Column('StudentIdentifier', String(50), nullable=True),
        Column('ExternalSSID', String(50), nullable=True),
        Column('FirstName', String(50), nullable=True),
        Column('MiddleName', String(50), nullable=True),
        Column('LastOrSurname', String(50), nullable=True),
        Column('Sex', String(50), nullable=True),
        Column('Birthdate', String(50), nullable=True),
        Column('GradeLevelWhenAssessed', String(50), nullable=True),
        Column('HispanicOrLatinoEthnicity', String(50), nullable=True),
        Column('AmericanIndianOrAlaskaNative', String(50), nullable=True),
        Column('Asian', String(50), nullable=True),
        Column('BlackOrAfricanAmerican', String(50), nullable=True),
        Column('NativeHawaiianOrOtherPacificIslander',
               String(50),
               nullable=True), Column('White', String(50), nullable=True),
        Column('DemographicRaceTwoOrMoreRaces', String(50), nullable=True),
        Column('IDEAIndicator', String(50), nullable=True),
        Column('LEPStatus', String(50), nullable=True),
        Column('Section504Status', String(50), nullable=True),
        Column('EconomicDisadvantageStatus', String(50), nullable=True),
        Column('MigrantStatus', String(50), nullable=True),
        Column('AssessmentGuid',
               String(255),
               ForeignKey('tsb_metadata.asmt_guid'),
               nullable=False),
        Column('AssessmentSessionLocationId', String(50), nullable=True),
        Column('AssessmentSessionLocation', String(50), nullable=True),
        Column('AssessmentAdministrationFinishDate', String(50),
               nullable=True),
        Column('AssessmentYear', String(50), nullable=True),
        Column('AssessmentType', String(50), nullable=True),
        Column('AssessmentAcademicSubject', String(50), nullable=True),
        Column('AssessmentLevelForWhichDesigned', String(50), nullable=True),
        Column('AssessmentSubtestResultScoreValue', String(50), nullable=True),
        Column('AssessmentPerformanceLevelIdentifier',
               String(50),
               nullable=True),
        Column('AssessmentSubtestMinimumValue', String(50), nullable=True),
        Column('AssessmentSubtestMaximumValue', String(50), nullable=True),
        Column('AssessmentSubtestResultScoreClaim1Value',
               String(50),
               nullable=True),
        Column('AssessmentSubtestClaim1MinimumValue',
               String(50),
               nullable=True),
        Column('AssessmentSubtestClaim1MaximumValue',
               String(50),
               nullable=True),
        Column('AssessmentClaim1PerformanceLevelIdentifier',
               String(50),
               nullable=True),
        Column('AssessmentSubtestResultScoreClaim2Value',
               String(50),
               nullable=True),
        Column('AssessmentSubtestClaim2MinimumValue',
               String(50),
               nullable=True),
        Column('AssessmentSubtestClaim2MaximumValue',
               String(50),
               nullable=True),
        Column('AssessmentClaim2PerformanceLevelIdentifier',
               String(50),
               nullable=True),
        Column('AssessmentSubtestResultScoreClaim3Value',
               String(50),
               nullable=True),
        Column('AssessmentSubtestClaim3MinimumValue',
               String(50),
               nullable=True),
        Column('AssessmentSubtestClaim3MaximumValue',
               String(50),
               nullable=True),
        Column('AssessmentClaim3PerformanceLevelIdentifier',
               String(50),
               nullable=True),
        Column('AssessmentSubtestResultScoreClaim4Value',
               String(50),
               nullable=True),
        Column('AssessmentSubtestClaim4MinimumValue',
               String(50),
               nullable=True),
        Column('AssessmentSubtestClaim4MaximumValue',
               String(50),
               nullable=True),
        Column('AssessmentClaim4PerformanceLevelIdentifier',
               String(50),
               nullable=True), Column('Group1Id', String(50), nullable=True),
        Column('Group1Text', String(50), nullable=True),
        Column('Group2Id', String(50), nullable=True),
        Column('Group2Text', String(50), nullable=True),
        Column('Group3Id', String(50), nullable=True),
        Column('Group3Text', String(50), nullable=True),
        Column('Group4Id', String(50), nullable=True),
        Column('Group4Text', String(50), nullable=True),
        Column('Group5Id', String(50), nullable=True),
        Column('Group5Text', String(50), nullable=True),
        Column('Group6Id', String(50), nullable=True),
        Column('Group6Text', String(50), nullable=True),
        Column('Group7Id', String(50), nullable=True),
        Column('Group7Text', String(50), nullable=True),
        Column('Group8Id', String(50), nullable=True),
        Column('Group8Text', String(50), nullable=True),
        Column('Group9Id', String(50), nullable=True),
        Column('Group9Text', String(50), nullable=True),
        Column('Group10Id', String(50), nullable=True),
        Column('Group10Text', String(50), nullable=True),
        Column('AccommodationAmericanSignLanguage', String(50), nullable=True),
        Column('AccommodationClosedCaptioning', String(50), nullable=True),
        Column('AccommodationBraille', String(50), nullable=True),
        Column('AccommodationTextToSpeech', String(50), nullable=True),
        Column('AccommodationStreamlineMode', String(50), nullable=True),
        Column('AccommodationPrintOnDemand', String(50), nullable=True),
        Column('AccommodationPrintOnDemandItems', String(50), nullable=True),
        Column('AccommodationAbacus', String(50), nullable=True),
        Column('AccommodationAlternateResponseOptions',
               String(50),
               nullable=True),
        Column('AccommodationReadAloud', String(50), nullable=True),
        Column('AccommodationCalculator', String(50), nullable=True),
        Column('AccommodationMultiplicationTable', String(50), nullable=True),
        Column('AccommodationScribe', String(50), nullable=True),
        Column('AccommodationSpeechToText', String(50), nullable=True),
        Column('AccommodationNoiseBuffer', String(50), nullable=True),
        Column('CompleteStatus', String(50), nullable=True),
        Column('AdministrationCondition', String(50), nullable=True),
        UniqueConstraint('StudentIdentifier', 'AssessmentGuid'))

    tsb_metadata = Table('tsb_metadata', metadata,
                         Column('asmt_guid', String(255), primary_key=True),
                         Column('state_code', String(2), nullable=False),
                         Column('content', Text, nullable=True))

    tsb_error = Table(
        'tsb_error', metadata,
        Column('tsb_error_rec_id',
               BigInteger,
               Sequence('tsb_error_rec_id_seq'),
               primary_key=True),
        Column('asmt_guid', String(255), nullable=False),
        Column('state_code', String(2), nullable=False),
        Column('err_code', String(50), nullable=False),
        Column('err_source', String(50), nullable=True),
        Column('err_code_text', String(50), nullable=True),
        Column('err_source_text', String(50), nullable=True),
        Column('err_input', String(50), nullable=True))

    return metadata
Exemple #56
0
def test_reflect_select_shared_table(engine):
    one_row = Table('bigquery-public-data.samples.natality',
                    MetaData(bind=engine),
                    autoload=True)
    row = one_row.select().limit(1).execute().first()
    assert len(row) >= 1
Exemple #57
0
            'data': {
                'c1': 1
            }
        }
    }


tests = [
    t1Insert, t1Update, t1Delete, t3Insert, t3Update, t3Delete, t4Insert,
    t4Update, t4Delete
]

httpd, webServer = startWebserver()

engine = create_engine('postgresql://admin@localhost:5432/skor_test')
meta = MetaData()
meta.reflect(bind=engine)

conn = engine.connect()

for t in tests:
    testParams = t(meta)
    print("-" * 20)
    print("Running Test: {}".format(testParams['name']))
    stmt = testParams['statement']
    conn.execute(stmt)
    print(stmt)
    resp = testParams['resp']
    success = assertEvent(respQ, resp)
    res = "Succeeded" if success else "Failed"
    print("Test result: {}".format(res))
Exemple #58
0
def get_base(engine):
    metadata = MetaData(naming_convention=NAMING_CONVENTION)
    Base = declarative_base(bind=engine, metadata=metadata)
    return Base
Exemple #59
0
    along with CRATE. If not, see <http://www.gnu.org/licenses/>.
===============================================================================
"""

from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.schema import Column, Index, MetaData
from sqlalchemy.types import BigInteger, DateTime, String

from crate_anon.anonymise.constants import TABLE_KWARGS
from crate_anon.nlp_manager.constants import (
    HashClass,
    MAX_STRING_PK_LENGTH,
    SqlTypeDbIdentifier,
)

progress_meta = MetaData()
ProgressBase = declarative_base(metadata=progress_meta)

# =============================================================================
# Global constants
# =============================================================================

SqlTypeHash = HashClass("dummysalt").sqla_column_type()

# =============================================================================
# Record of progress
# =============================================================================


class NlpRecord(ProgressBase):
    """
Exemple #60
0
def table_using_test_dataset(engine_using_test_dataset):
    return Table('sample',
                 MetaData(bind=engine_using_test_dataset),
                 autoload=True)