def test_create_table_all_together(self, compiler):
        table = Table('t1',
                      MetaData(),
                      Column('id', Integer, primary_key=True),
                      Column('name', String),
                      redshift_diststyle="KEY",
                      redshift_distkey="id",
                      redshift_sortkey=["id", "name"])

        create_table = CreateTable(table)
        actual = compiler.process(create_table)
        expected = (u"\nCREATE TABLE t1 ("
                    u"\n\tid INTEGER NOT NULL, "
                    u"\n\tname VARCHAR, "
                    u"\n\tPRIMARY KEY (id)\n) "
                    u"DISTSTYLE KEY DISTKEY (id) SORTKEY (id, name)\n\n")
        assert expected == actual, self._compare_strings(expected, actual)
Beispiel #2
0
async def test_implicit_transaction_success(asyncio_engine, mytable):
    if ':memory:' in str(asyncio_engine._engine.url):
        pytest.skip(":memory: connections don't persist across threads")

    async with asyncio_engine.begin() as conn:
        assert isinstance(conn, AsyncConnection)

        await conn.execute(CreateTable(mytable))
        await conn.execute(mytable.insert())
        result = await conn.execute(mytable.select())
        rows = await result.fetchall()
        assert len(rows) == 1

    # Transaction should have been committed automatically
    result = await asyncio_engine.execute(mytable.select())
    rows = await result.fetchall()
    assert len(rows) == 1
Beispiel #3
0
def save_model(models):
    prefix = '../' if 'tools' in realpath('.') else ''
    try:
        if isfile(prefix + 'src/schema.ts'):
            nameStr = prefix + 'src/schema-' + datetime.now().isoformat()[0:10]
            if isfile(nameStr + '.bkp'):
                i = 1
                while isfile(nameStr + '(' + str(i) + ')' + '.bkp'):
                    i += 1
                nameStr = nameStr + '(' + str(i) + ')'

            rename(prefix + 'src/schema.ts', nameStr + '.bkp')

        clientModel = client_only_model(models)

        with open(prefix + 'src/schema.ts', 'w+') as f:
            f.write('''/**
 * F2B Schema
 *
 * This file is automatically generated. DO NOT MAKE CHANGES HERE. Use the schema builder:
 *   cd ltc-provider-app/tools
 *   python3 schemaBuilder.py
 */
''')
            f.write('export const schema = ')

            ujson.dump(clientModel, f, indent=2, sort_keys=False)
            f.write(';\n\nexport const schemaSQL = `')
            sqliteModel = convert_sql_types(clientModel)

            db_engine = create_engine('sqlite:///:memory:')
            metadata = MetaData()
            for key in sqliteModel:
                model = sqliteModel[key]
                table = generateTable(key, model, metadata)
                f.write(str(CreateTable(table).compile(db_engine)) + ';')
            f.write('`;\n\n')
            f.write('export const version = \'' + str(uuid4()) + '\';\n')

    except:
        import sys
        print('Failed to save model')
        import traceback
        traceback.print_exc()
        print(sys.exc_info()[0])
Beispiel #4
0
    def handle(self, options, global_options, *args):
        from sqlalchemy.schema import CreateTable, CreateIndex

        engine = get_engine(options, global_options)

        tables = get_sorted_tables(
            get_tables(global_options.apps_dir,
                       tables=args,
                       engine_name=options.engine,
                       settings_file=global_options.settings,
                       local_settings_file=global_options.local_settings))
        for name, t in tables:
            if t.__mapping_only__:
                continue
            print "%s;" % str(
                CreateTable(t).compile(dialect=engine.dialect)).rstrip()
            for x in t.indexes:
                print "%s;" % CreateIndex(x)
Beispiel #5
0
async def set_up():
    import databases
    from sqlalchemy.schema import CreateTable
    from starlette.config import Config

    config = Config('.env')
    DATABASE_URL = config('DATABASE_URL')

    database = databases.Database(DATABASE_URL)

    await database.connect()
    await database.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";')
    for table in tables:
        try:
            await database.execute(str(CreateTable(table)))
        except Exception as e:
            print(e)
    await database.disconnect()
Beispiel #6
0
def create_table(engine,
                 table_name,
                 column_configs,
                 schema=None,
                 metadata=None):
    try:
        metadata = metadata or MetaData()

        columns = [Column(*args, **kwargs) for args, kwargs in column_configs]

        table = Table(table_name, metadata, *columns, schema=schema)

        engine.execute(CreateTable(table))
    except ProgrammingError as e:
        if 'already exists' in str(e):
            pass
        else:
            raise e
 def test_autoincrement_true(self):
     t = Table(
         "foo_table",
         MetaData(),
         Column(
             "foo",
             Integer(),
             Identity(always=True, start=3),
             primary_key=True,
             autoincrement=True,
         ),
     )
     self.assert_compile(
         CreateTable(t),
         "CREATE TABLE foo_table ("
         "foo INTEGER GENERATED ALWAYS AS IDENTITY (START WITH 3)"
         ", PRIMARY KEY (foo))",
     )
Beispiel #8
0
def create_tables():
    """Create all the tables needed for the test."""
    DB_URI = f'postgresql://{C.DB_USER}:{C.DB_PASSWORD}@{C.DB_HOST}:{C.DB_PORT}/{C.DB_NAME}'
    if database_exists(DB_URI):
        drop_database(DB_URI)
    create_database(DB_URI)
    engine = create_engine(DB_URI)

    conn = engine.connect()
    models = [
        mBlueprint, mCargo, mApi, mProbe, mUser_probes, mJob, mUser_cargos,
        mLog
    ]
    for model in models:
        conn.execute(CreateTable(model).compile(engine).__str__())
    add_blueprint = """INSERT INTO "public"."blueprints"("uuid","repository","name","tag","link","description","public","created_at","user_id")
    VALUES('2a83d4be0f7011e89e4b35694e577c22','cassinyio','notebook','02946e48',NULL,NULL,TRUE,E'2017-12-16 22:46:13.515348+00',NULL);"""
    conn.execute(add_blueprint)
 def test_other_options(self):
     t = Table(
         "foo_table",
         MetaData(),
         Column(
             "foo",
             Integer(),
             Identity(always=True, start=3),
             nullable=False,
             unique=True,
         ),
     )
     self.assert_compile(
         CreateTable(t),
         "CREATE TABLE foo_table ("
         "foo INTEGER GENERATED ALWAYS AS IDENTITY (START "
         "WITH 3), UNIQUE (foo))",
     )
    def test_create_column_skip(self):
        @compiles(CreateColumn)
        def skip_xmin(element, compiler, **kw):
            if element.element.name == "xmin":
                return None
            else:
                return compiler.visit_create_column(element, **kw)

        t = Table(
            "t",
            MetaData(),
            Column("a", Integer),
            Column("xmin", Integer),
            Column("c", Integer),
        )

        self.assert_compile(CreateTable(t),
                            "CREATE TABLE t (a INTEGER, c INTEGER)")
    def test_create_table_with_identity(self, compiler):

        table = Table(
            't1',
            MetaData(),
            Column('id', Integer, primary_key=True, info={'identity': [1, 2]}),
            Column('name', String),
        )

        create_table = CreateTable(table)
        actual = compiler.process(create_table)
        expected = (
            u"\nCREATE TABLE t1 ("
            u"\n\tid INTEGER IDENTITY(1,2) NOT NULL, "
            u"\n\tname VARCHAR, "
            u"\n\tPRIMARY KEY (id)\n)\n\n"
        )
        assert expected == actual, self._compare_strings(expected, actual)
Beispiel #12
0
async def prepare_tables(pg, verbose=True):
    """
    Get all tables from models.py, delete them and create new tables
    :param pg: connect to DB engine(PostgreSQL)
    :param verbose: set logs
    :return: None
    """
    tables = [getattr(models, table) for table in models.__all__]
    await delete_tables(pg, tables, verbose=verbose)
    async with pg.acquire() as conn:
        for table in tables:
            create_expr = CreateTable(table)
            try:
                await conn.execute(create_expr)
                if verbose:
                    logger.debug('DB_CREATE: %s' % table)
            except psycopg2.ProgrammingError as e:
                logger.error('DB_CREATE(ERROR): %s' % e)
 def createTableFromTable(name, tableNam, engine):
     """copy一个已有表的结构,并创建新的表
     """
     metadata = MetaData(engine)
     BaseSQL.metadata.reflect(engine)
     # 获取原表对象
     table = BaseSQL.metadata.tables[tableNam]
     # 获取原表建表语句
     c = str(CreateTable(table))
     print(c)
     print(tableNam, name)
     # 替换表名
     c = c.replace('"', '').replace('CREATE TABLE {}'.format(tableNam), "CREATE TABLE if not exists {}".format(name))
     print(c)
     db_conn = engine.connect()
     db_conn.execute(c)
     db_conn.close()
     BaseSQL.metadata.clear()
Beispiel #14
0
    def test_transaction_rollback(self):
        metadata = sqlalchemy.MetaData()
        tbl = sqlalchemy.Table(
            'mytable', metadata,
            sqlalchemy.Column("id", sqlalchemy.Integer(), primary_key=True),
            sqlalchemy.Column("num", sqlalchemy.Integer()),
        )

        conn = self.get_connection()
        self.successResultOf(conn.execute(CreateTable(tbl)))
        trx = self.successResultOf(conn.begin())
        self.successResultOf(conn.execute(tbl.insert().values(num=42)))
        rows = self.execute_fetchall(conn, tbl.select())
        assert len(rows) == 1

        self.successResultOf(trx.rollback())
        rows = self.execute_fetchall(conn, tbl.select())
        assert len(rows) == 0
Beispiel #15
0
    def test_traced_all_engines(self):
        # Don't register the engine explicitly.
        tracer = DummyTracer()
        sqlalchemy_opentracing.init_tracing(tracer,
                                            trace_all_engines=True,
                                            trace_all_queries=False)

        creat = CreateTable(self.users_table)
        sqlalchemy_opentracing.set_traced(creat)
        self.engine.execute(creat)

        # Unregister the main Engine class before doing our assertions,
        # in case we fail.
        sqlalchemy_opentracing.unregister_engine(Engine)

        self.assertEqual(1, len(tracer.spans))
        self.assertEqual('create_table', tracer.spans[0].operation_name)
        self.assertEqual(True, tracer.spans[0].is_finished)
    def test_create_table_with_diststyle(self, compiler):

        table = Table('t1',
                      MetaData(),
                      Column('id', Integer, primary_key=True),
                      Column('name', String),
                      redshift_diststyle="EVEN")

        create_table = CreateTable(table)
        actual = compiler.process(create_table)
        expected = (
            u"\nCREATE TABLE t1 ("
            u"\n\tid INTEGER NOT NULL, "
            u"\n\tname VARCHAR, "
            u"\n\tPRIMARY KEY (id)\n) "
            u"DISTSTYLE EVEN\n\n"
        )
        assert expected == actual, self._compare_strings(expected, actual)
Beispiel #17
0
async def test_implicit_transaction_failure(engine, mytable):
    await engine.execute(CreateTable(mytable))

    with pytest.raises(RuntimeError):
        async with engine.begin() as conn:
            assert isinstance(conn, AsyncioConnection)

            await conn.execute(mytable.insert())
            result = await conn.execute(mytable.select())
            rows = await result.fetchall()
            assert len(rows) == 1

            raise RuntimeError

    # Transaction should have been rolled back automatically
    result = await engine.execute(mytable.select())
    rows = await result.fetchall()
    assert len(rows) == 0
Beispiel #18
0
    def test_unregister_engine(self):
        tracer = DummyTracer()
        sqlalchemy_opentracing.init_tracing(tracer,
                                            False,
                                            trace_all_queries=True)
        sqlalchemy_opentracing.register_engine(self.engine)

        creat = CreateTable(self.users_table)
        self.engine.execute(creat)
        self.assertEqual(1, len(tracer.spans))

        tracer.clear()
        sqlalchemy_opentracing.unregister_engine(self.engine)

        # Further events should cause no spans at all.
        sel = select([self.users_table])
        sqlalchemy_opentracing.set_traced(sel)
        self.engine.execute(sel)
        self.assertEqual(0, len(tracer.spans))
Beispiel #19
0
    async def fetch_content(self):
        """Корутина сбора данных со страницы вакансии.

        Корутина получает на вход очередь c данными о вакансиях.
        Назначение: асинхронно собрать информацию о вакансиях и записать ее в базу данных.
        Сначала метод создает таблицу в базе данных для записи информации на текущую дату. Далее запускает
        бесконечный цикл, в котором ожидает получения элемента(списка с данными) из очереди. Получив элемент,
        загружает по полученной из элемента ссылке страницу с информацией о вакансии. Со страницы асинхронно
        получает полное описание вакансии. Полученные данные(ссылка на вакансию, название вакансии, зарплата,
        требования к кандидату, название компнании, тип занятости, полное описание вакансии)записываем в базу.
        Условием выхода из цикла является получение элемента None из очереди.
        Работа данного метода связана с работой метода get_links. Получив ссылку со страницы результатов поиска, метод
        get_links передает ссылку в метод fetch_content. Метод fetch_content начинает загрузку страницы с вакансией и
        сбор информации. Не дождавшись завершения процесса и получив новый элемент из очереди, метод fetch_content
        открывает новую страницу по ссылке и начинает сбор данных с нее. Так продолжается пока в очереди есть элементы.
        Таким образом одновременно идет сбор информации со всех ссылок из списка, что позволяет в разы сократить время
        выполнения задачи по сбору данных.

        """
        await engine.execute(
            CreateTable(MoiKrug_db)
        )  # создаем таблицу для хранения данных о вакансии на текущую дату
        while True:
            item = await self.queue.get(
            )  # ждем пока появится новый элемент в очереди
            if item is None:  # Элемент None означает конец очереди
                break
            async with get_session(self.service, self.browser) as web_session:
                await web_session.get(item[0])  # загружаем страницу вакансии
                description_object = await web_session.get_element(
                    'div[class=vacancy_description]')
                description = await description_object.get_text(
                )  # получаем описание вакансии
                async with engine.connect() as conn:
                    async with conn.begin() as trans:
                        await conn.execute(MoiKrug_db.insert().values(
                            link=item[0],  # записываем данные в базу
                            title=item[1],
                            salary=item[2],
                            skills=item[3],
                            company=item[4],
                            occupation=item[5],
                            description=description))
Beispiel #20
0
def get_create_table_command(table_name, engine):
    metadata = MetaData()
    # TODO: handle for non-"public" schema
    bind_schema = None
    metadata.reflect(bind=engine, schema=bind_schema)
    # https://stackoverflow.com/a/5605077/4709762
    insp = reflection.Inspector.from_engine(engine)
    indexes = []
    for idx in insp.get_indexes(table_name):
        if 'duplicates_constraint' not in idx:
            create_idx_text = format_create_index_command(idx, table_name)
            indexes.append(create_idx_text)
    # unfound object foo returns "KeyError: 'foo'" here
    tbl = metadata.tables[table_name]
    create_table_text = str(
        CreateTable(tbl).compile(dialect=postgresql.dialect())) + ";\n"
    create_table_text = remove_sequence(create_table_text)
    create_table_text = create_table_text + "\n".join(indexes) + "\n"
    return create_table_text
Beispiel #21
0
    def make_comparison_table(self, measure, source_schema):
        key_fields = ['patient_id', 'population_id']
        all_cols = []
        key_table = Table(self.key_table_name(measure), self.meta, schema=self.key_schema, autoload=True)
        source_table = Table(self.key_table_name(measure), self.meta, schema=source_schema, autoload=True)

        for field in key_fields:
            kcol = key_table.c.get(field)
            all_cols.append(kcol)

        for col in key_table.c:
            if col.name.startswith(RESULT_COLUMN_PREFIX):
                basename=col.name[len(RESULT_COLUMN_PREFIX):]
                all_cols.append(source_table.c.get(col.name).label(basename))
                all_cols.append(col.label(basename + EXPECTED_SUFFIX))

        cnames = []
        table = Table(self.comparison_table_name(measure), self.meta, schema=source_schema)
        for col in all_cols:
            table.append_column(Column(col.name, col.type))
            cnames.append(col.name)
        print(self.terminated_statement(str(DropTable(table, bind=self.engine))))
        print(self.terminated_statement(str(CreateTable(table, bind=self.engine))))

        join_conds = []
        for field in key_fields:
            scol=source_table.c.get(field)
            dcol = key_table.c.get(field)
            join_conds.append(or_(
                    and_(scol == None, dcol == None),
                    scol == dcol))

        query = union(
            select(all_cols).\
                select_from(key_table.\
                                join(source_table, and_(*join_conds), isouter=True)),
            select(all_cols).\
                select_from(source_table.\
                                join(key_table, and_(*join_conds), isouter=True)))

        ins = table.insert().from_select(cnames, query)
        print(self.terminated_statement(sql_to_string(ins)))
        self.comparison_tables[measure] = table
    def _copy_existing_table(self, conn, table_obj, number_of_rows=-1):
        """Copy an existing table structure with an additional suffix"""

        # store rows of original table
        table_entries = conn.execute(table_obj.select()).all()

        # take all data or just a subset of the original table
        if number_of_rows != -1:
            try:
                test_entries = random.sample(table_entries, number_of_rows)
            except ValueError:
                logging.error(
                    "Number of rows specified in yaml file exceeds rows in table"
                )
                raise
        else:
            # in case of no subset take the whole data set, hence all entries
            # in table
            test_entries = table_entries

        self._change_table_constraints(table_obj)

        # create table object, store it and execute it
        create_res = CreateTable(table_obj, bind=conn)

        # add result of create table statement to statement string
        self.string_to_store += str(create_res) + ";\n"

        # get column order for insert statements
        column_order = [col[0] for col in table_obj.columns._collection]

        # add subset of entries
        for s in test_entries:
            insert_res = insert(table_obj, values=s, bind=conn)

            s_convert = [
                "'" + test + "'" if isinstance(test, str) else test
                for test in s
            ]

            # store insert statements in statement string
            self.string_to_store += (
                str(insert_res) % dict(zip(column_order, s_convert)) + ";\n")
Beispiel #23
0
 def ddl(self, dialect=None, creates=True, drops=True):
     """
     Returns SQL to define the table.
     """
     dialect = self._dialect(dialect)
     creator = CreateTable(self.table).compile(mock_engines[dialect])
     creator = "\n".join(l for l in str(creator).splitlines() if l.strip()) # remove empty lines
     comments = "\n\n".join(self._comment_wrapper.fill("in %s: %s" %
                                                     (col, self.comments[col]))
                                                     for col in self.comments)
     result = []
     if drops:
         result.append(self._dropper(dialect) + ';')
     if creates:
         result.append("%s;\n%s" % (creator, comments))
     for child in self.children.values():
         result.append(child.ddl(dialect=dialect, creates=creates,
                       drops=drops))
     return '\n\n'.join(result)
Beispiel #24
0
 def test_on_null(self):
     t = Table(
         "foo_table",
         MetaData(),
         Column(
             "foo",
             Integer(),
             Identity(always=False, on_null=True, start=42, order=True),
         ),
     )
     text = " ON NULL" if testing.against("oracle") else ""
     self.assert_compile(
         CreateTable(t),
         (
             "CREATE TABLE foo_table (foo INTEGER GENERATED BY DEFAULT"
             + text
             + " AS IDENTITY (START WITH 42 ORDER))"
         ),
     )
    def _create_new_table(self, conn, table_name, table_config, n_rows):

        columns = []
        for key, val in table_config.get("column_names").items():

            col_type = val.get("type")
            col_obj = Column(key, getattr(sqlalchemy, col_type))
            columns.append(col_obj)

        # create table instance
        table_obj = Table(table_name, self._get_meta_data(conn), *columns)
        # prepare Create table with bind of current connection and execute it
        create_res = CreateTable(table_obj, bind=conn)
        conn.execute(create_res)
        # add result of create table statement to statement string
        self.string_to_store += str(create_res) + ";\n"

        val_dict = {}
        for key, val in table_config.get("column_names").items():
            val_dict[key] = get_random_list(val, n_rows)

        # get column order for insert statements
        column_order = [col[0] for col in table_obj.columns._collection]

        test_entries = self.rearrange_dict_for_insert(column_order, val_dict,
                                                      n_rows)

        # add subset of entries
        for s in test_entries:
            insert_res = insert(table_obj, values=s, bind=conn)

            s_convert = [
                "'" + test + "'" if isinstance(test, str) else test
                for test in s
            ]

            # store insert statements in statement string
            self.string_to_store += (
                str(insert_res) % dict(zip(column_order, s_convert)) + ";\n")

            # conn.execute(insert_res)
            conn.execute(table_obj.insert(s))
Beispiel #26
0
    def _insert_datapoint(self):
        """Insert first datapoint in the database.

        Args:
            None

        Returns:
            None

        """
        # Insert
        if db_datapoint.idx_datapoint_exists(1) is False:
            record = Datapoint(
                id_datapoint=general.encode(self.reserved),
                agent_label=general.encode(self.reserved),
                agent_source=general.encode(self.reserved)
            )
            print(CreateTable(record.__table__))
            database = db.Database()
            database.add(record, 1047)
Beispiel #27
0
async def create(conn: Database, data):
    data = jsonable_encoder(data)
    transaction = await conn.transaction()
    id = False
    try:
        id = await conn.execute(query=Asset.__table__.insert(), values=data["base"])
        model = AssetHI.model(point_id=id)  # register to metadata for all pump_unit
        if data["base"]["asset_type"] == 0:
            await conn.execute(str(CreateTable(model.__table__).compile(meta_engine)))
        await transaction.commit()
        return True
    except Exception as e:
        # print(e)
        if id:
            query = Asset.__table__.delete().where(Asset.__table__.c.id == id)
            await conn.execute(
                query=str(query.compile(compile_kwargs={"literal_binds": True}))
            )
            await transaction.commit()
        return False
Beispiel #28
0
async def async_create_database(loop, conf):
    """ Need fixes:
    * correct way to retain creation order
    * remove try/except

    """
    db = await create_engine(**conf, loop=loop)
    tables = [CreateTable(table).compile(db)
              for table in (metadata.tables['companies'],
                            metadata.tables['accounts'],
                            metadata.tables['software'],
                            metadata.tables['software_orders'],
                            metadata.tables['software_order_items'])]

    async with db.acquire() as conn:
        for table_create_stmt in tables:
            await conn.execute(table_create_stmt.string)

    db.close()
    await db.wait_closed()
Beispiel #29
0
    def get_pool():
        pool = yield from create_engine(dsn)

        connection = yield from pool.acquire()
        try:
            result = yield from connection.execute(
                'SELECT tablename FROM pg_tables '
                'WHERE schemaname=%s', ('public', ))
            existing_table_names = {name[0] for name in result}
            print('Existing tables:', existing_table_names)

            for name, table in tables.metadata.tables.items():
                if name not in existing_table_names:
                    create_statement = CreateTable(table)
                    print(create_statement.compile(dialect=dialect))
                    yield from connection.execute(create_statement)
        finally:
            connection.close()

        return pool
Beispiel #30
0
    def test_traced_clear_connection(self):
        tracer = DummyTracer()
        sqlalchemy_opentracing.init_tracing(tracer, False, False)
        sqlalchemy_opentracing.register_engine(self.engine)

        creat = CreateTable(self.users_table)
        ins = self.users_table.insert().values(name='John Doe')

        conn = self.engine.connect()
        with conn.begin() as tx:
            sqlalchemy_opentracing.set_traced(conn)
            conn.execute(creat)

            # Stop tracing from this point.
            sqlalchemy_opentracing.clear_traced(conn)

            conn.execute(ins)

        self.assertEqual(1, len(tracer.spans))
        self.assertEqual('create_table', tracer.spans[0].operation_name)