Пример #1
0
def establish_connection(bind: engine.Engine) -> engine.Engine:
    for _ in range(100):
        try:
            bind.connect()
            break
        except exc.OperationalError:
            time.sleep(0.05)
    return bind
Пример #2
0
def does_postgres_accept_connection(engine: Engine) -> bool:
    """ Test if the target PostgreSQL database accept connexions
    """
    try:
        engine.connect()
    except OperationalError:
        return False
    else:
        return True
Пример #3
0
def wait_for_postgres(engine: Engine, max_waiting_time: int = 10):
    logging.info('Waiting until PostgreSQL accept connexions')
    for i in range(max_waiting_time):
        if does_postgres_accept_connection(engine):
            logging.info('PostgreSQL is ready to accept connexions')
            return
        logging.info(
            'PostgreSQL is not ready to accept connexions, waiting {} more seconds'
            .format(max_waiting_time - i))
        sleep(1)

    engine.connect()  # Raise exception
Пример #4
0
def execute_query(engine: Engine, query: str, variables: Union[list, tuple] = None, isolation_level: str = 'AUTOCOMMIT', include_headers: bool = False) -> List[Iterable]:
    """Execute query with chosen isolation level.

    Arguments
    ---------
    engine
        SQL Alchemy engine.
    query
        SQL query or statement to be executed.
    variables
        Variables for query.
    isolation_level
        Transaction isolation level.
        See https://docs.sqlalchemy.org/en/13/core/connections.html#sqlalchemy.engine.Connection.execution_options.params.isolation_level
    include_headers
        Indicator to add result headers to first returned row.

    Returns
    -------
    list
        Query output as list. If query returns no output, empty list is returned.
    """
    with engine.connect() as connection:
        connection.execution_options(isolation_level=isolation_level)
        if variables is not None and isinstance(variables, (tuple, list)):
            result_set = connection.execute(query, *variables)
        else:
            result_set = connection.execute(query)
        query_output = []
        if result_set.returns_rows:
            if include_headers is True:
                query_output.append(list(result_set.keys()))
            query_output.extend([row for row in result_set])
        return query_output
Пример #5
0
def execute_try_catch(engine: Engine, query: str, variables: Union[list, tuple] = None, throw: bool = False):
    """Execute query with try catch.
    If throw is set to True, raise error in case query execution fails.

    Arguments
    ---------
    engine
        SQL Alchemy engine.
    query
        SQL query or statement to be executed.
    variables
        Variables for query.
    throw
        Indicator to raise error if query execution fails.
    """
    with engine.connect() as connection:
        trans = connection.begin()
        try:
            if variables is not None and isinstance(variables, (tuple, list)):
                connection.execute(query, *variables)
            else:
                connection.execute(query)
            trans.commit()
        except:
            trans.rollback()
            if throw is True:
                raise
Пример #6
0
def db_session(db_engine: Engine):
    """Function-scoped sqlalchemy database session"""
    connection = db_engine.connect()

    # begin a non-ORM transaction
    trans = connection.begin()

    # bind an individual Session to the connection
    db_session = create_db_session(connection)()

    # start the session in a SAVEPOINT...
    db_session.begin_nested()

    # then each time that SAVEPOINT ends, reopen it
    @event.listens_for(db_session, "after_transaction_end")
    def restart_savepoint(session: Session, transaction: SessionTransaction):
        if transaction.nested and not transaction._parent.nested:

            # ensure that state is expired the way
            # session.commit() at the top level normally does
            # (optional step)
            session.expire_all()
            session.begin_nested()

    yield db_session
    db_session.close()

    # rollback - everything that happened with the
    # Session above (including calls to commit())
    # is rolled back.
    trans.rollback()

    # return connection to the Engine
    connection.close()
Пример #7
0
def update_schema(engine: Engine):
    """
    Check and apply any missing schema changes to the database.

    This is run by an administrator.

    See the `schema_is_latest()` function above: this should apply updates
    that it requires.
    """
    # This will typically check if something exists (like a newly added column), and
    # run the SQL of the change inside a single transaction.

    # Empty, as no schema changes have been made recently.
    # -> If you need to write one, look at the Git history of this
    #    function for some examples.

    # Post 1.8 DB Incremental Sync triggers
    if not pg_column_exists(engine, schema_qualified('dataset'), 'updated'):
        _LOG.info("Adding 'updated'/'added' fields and triggers to schema.")
        c = engine.connect()
        c.execute('begin')
        install_timestamp_trigger(c)
        install_added_column(c)
        c.execute('commit')
        c.close()
    else:
        _LOG.info("No schema updates required.")
Пример #8
0
def insert_securities_recs(engine: Engine):
    if engine.dialect.has_table(engine, 'securities'):
        with engine.connect() as db_conn:
            # Record - 1
            resp = db_conn.execute('INSERT INTO securities '
                                   '(symbol, price) '
                                   'VALUES (\'BULL.ST\', 25.75)')
            if resp.rowcount == 1:
                logging.info("Inserted record for BULL.ST")
            else:
                logging.info("Failed to insert record for BULL.ST")

            # Record - 2
            resp = db_conn.execute('INSERT INTO securities '
                                   '(symbol, price) '
                                   'VALUES (\'DOG.ST\', 54.15)')
            if resp.rowcount == 1:
                logging.info("Inserted record for DOG.ST")
            else:
                logging.info("Failed to insert record for DOG.ST")

            # Record - 3
            resp = db_conn.execute('INSERT INTO securities '
                                   '(symbol, price) '
                                   'VALUES (\'BARK.ST\', 144.90)')
            if resp.rowcount == 1:
                logging.info("Inserted record for BARK.ST")
            else:
                logging.info("Failed to insert record for BARK.ST")
    else:
        logging.info("The securities table *DOES NOT* exist !!!")
Пример #9
0
def insert_image_in_db(engine: Engine,
                       siteid: int,
                       matchdict: dict,
                       exif=None,
                       iptc=None):
    ''' Inserts an image into the database '''
    query = text('insert into geopaysages.t_photo \
        (id_site, path_file_photo, date_photo, filter_date, display_gal_photo, id_licence_photo)\
        values (:id_site, :path, :strfdate, :f_date, :display, :id_licence)')

    id_licence_photo = get_licence_id(engine, iptc) if iptc else None
    filter_date = date_from_group_dict(matchdict).isoformat()
    cnx = engine.connect()

    tran = cnx.begin()
    try:
        cnx.execute(query,
                    id_site=siteid,
                    path=matchdict.get('ofilename'),
                    strfdate=date_from_group_dict(matchdict).isoformat(),
                    f_date=filter_date,
                    display=True,
                    id_licence=id_licence_photo)
        tran.commit()
    except:
        tran.rollback()
        raise
Пример #10
0
def get_licence_id(engine: Engine, iptc: dict):
    '''Get the Licence id that matches the copyright notice or create it if none exists in the database'''
    if not iptc:
        return None

    notice = iptc.get('copyright notice')
    author = iptc.get('by-line') or ''
    if not notice:
        return None

    licence = '{0} | {1}'.format(notice, author)

    cnx = engine.connect()

    id_licence_photo = cnx.execute(text(
        'select id_licence_photo from geopaysages.dico_licence_photo where name_licence_photo = :nt'
    ),
                                   nt=licence).scalar()

    if not id_licence_photo:
        id_licence_photo = cnx.execute(text(
            'insert into geopaysages.dico_licence_photo (name_licence_photo, description_licence_photo) values (:nt,:desc) returning id_licence_photo'
        ),
                                       nt=licence,
                                       desc=licence).scalar()

    return id_licence_photo
Пример #11
0
def drop_primary_keys(engine: Engine, table: Table, pks_to_drop: Iterable[dict]):
    with engine.connect() as conn:
        pks = [col.name for col in table.columns if col.primary_key]
        statement = table.delete()
        for pk in pks:
            statement = statement.where(table.c[pk].in_([pks_for_row[pk] for pks_for_row in pks_to_drop]))
        conn.execute(statement)
Пример #12
0
def data(database: Engine, destination: Engine) -> (Engine, Engine):
    publicMeta = MetaData()
    publicMeta.reflect(bind=database)
    PublicTable = publicMeta.tables['public_table']

    conn = database.connect()
    ret = conn.execute(PublicTable.insert().values(name="PublicTable1"))
    pk1 = ret.inserted_primary_key[0]
    conn.execute(PublicTable.insert().values(name="PublicTable2"))

    for schema in ['tenant1', 'tenant2']:
        meta = MetaData()
        meta.reflect(bind=database, schema=schema)

        masterTable = meta.tables[f'{schema}.tenant_master_table']
        detailTable = meta.tables[f'{schema}.tenant_detail_table']
        ret = conn.execute(masterTable.insert().values(
            name=f"{schema}.MasterTable.1.1", public_id=pk1))
        pk2 = ret.inserted_primary_key[0]
        conn.execute(masterTable.insert().values(
            name=f"{schema}.MasterTable.1.1", public_id=pk1))

        conn.execute(detailTable.insert().values(
            name="{meta.schema}.DetailTable1.1.1", master_id=pk2))
        conn.execute(detailTable.insert().values(
            name="{meta.schema}.DetailTable1.1.2", master_id=pk2))
    return database, destination
Пример #13
0
def get_page_views(engine: Engine, mode: str = 'current') -> Union[int, None]:
    """Get page views for current date or all

    :param engine: SQLAlchemy engine object
    :param mode: page view aggregation method ('current', 'all')
    """

    # SQL query return placeholder
    result = []

    # get page views from database
    with engine.connect() as conn:
        if mode == 'all':
            result = conn.execute('SELECT total(page_views) FROM stats')
        elif mode == 'current':
            result = conn.execute('SELECT page_views FROM stats'
                                  ' WHERE tick_date = ?', date_now())

        # unpack results into list of JSON records
        result = [dict(row) for row in result]

        # check results and return sanitized value
        if len(result) > 0:
            return int(list(result[0].values())[0])
        else:
            return None
Пример #14
0
def update_page_views(engine: Engine) -> None:
    """Increment page views

    :param engine: SQLAlchemy engine object
    :return:
    """
    # get current page_views value if it exists
    page_views = get_page_views(engine, mode='current')

    # update page views
    with engine.connect() as conn:

        # no page_views for current date? new entry
        if page_views is None:
            sql_query = text('INSERT INTO stats (tick_date, page_views)'
                             ' VALUES (:tick_date, :page_views)')

            data = {'tick_date': date_now(), 'page_views': 1}

            conn.execute(sql_query, **data)

        # increment page_views count
        else:
            sql_query = text('UPDATE stats SET page_views = :page_views'
                             ' WHERE tick_date = :tick_date')

            data = {'tick_date': date_now(), 'page_views': page_views + 1}

            conn.execute(sql_query, **data)
Пример #15
0
def get_posts(engine: Engine, post_id: int = None) -> list:
    """
    Get all blog posts as a list of table records

    :param engine: SQLAlchemy engine object
    :param post_id: blog entry 'id' (optional)
    :return: list of post records
    """

    with engine.connect() as conn:
        if post_id is not None:
            result = conn.execute('SELECT * FROM entries WHERE id = ?',
                                  post_id)
        else:
            result = conn.execute('SELECT * FROM entries')

        # unpack results into list of JSON records
        posts = [dict(row) for row in result]

        # data correctly retrieved
        if len(posts) > 0:

            # unpack post 'tags'
            for i in range(len(posts)):
                posts[i]['tags'] = posts[i]['tags'].split(',')

        return posts
Пример #16
0
def check_database(
    engine: Engine,
    user: pwd.struct_passwd,
    tables: Iterable[Table],
):
    """Check a set of tables as a user.

    :param engine: The SQLAlchemy engine
    :param user: The user to switch to
    :param tables: The tables to check
    :raises DBAPIError: if errors occur.
    """
    logger.info("Checking database access as user %s", user.pw_name)
    try:
        conn = engine.connect()
    except DBAPIError as e:
        logger.critical("Could not connect to database as %s: %s",
                        user.pw_name,
                        exc_info=e)
        raise
    with contextlib.closing(conn):
        for table in tables:
            try:
                check_table(conn, table)
            except DBAPIError as e:
                logger.critical(
                    "Query check for table %s as user %s failed: "
                    "%s",
                    table.name,
                    user.pw_name,
                    exc_info=e)
                raise
Пример #17
0
 def _init_table(self, engine: Engine, table: str):
     query = """
     CREATE TABLE IF NOT EXISTS {table} (
         id BIGINT PRIMARY KEY,
         content BYTEA NOT NULL
     )
     """.format(table=table)
     with engine.connect() as conn:
         conn.execute(query)
Пример #18
0
def connection_scope(engine: Engine = None) -> Connection:
    engine = engine or db_engine
    conn = engine.connect()
    try:
        yield conn
    except Exception:
        raise
    finally:
        conn.close()
Пример #19
0
    def tryConnect(cls, engine: Engine) -> Connection:
        try:
            return engine.connect()
        except DatabaseError as exc:
            # If we're having trouble connecting, wait a bit
            from time import sleep  # ISSUE: ambient
            sleep(cls.problem_pause)

            raise ConnectionProblem.refine(exc, str(engine)) from None
def _test_schema_sync_helper(source_engine: Engine, type_mapping: dict, dolt: Dolt):
    sync_schema_to_dolt(source_engine, dolt , TABLE_MAP, type_mapping)
    _compare_schema_helper(TEST_SOURCE_TABLE, source_engine, TEST_TARGET_TABLE, dolt.engine, type_mapping)

    with source_engine.connect() as conn:
        conn.execute(ALTER_TABLE)

    sync_schema_to_dolt(source_engine, dolt, TABLE_MAP, type_mapping)
    _compare_schema_helper(TEST_SOURCE_TABLE, source_engine, TEST_TARGET_TABLE, dolt.engine, type_mapping)
Пример #21
0
def syncronyze_extensions(source: Engine, destination: Engine):
    connFrom = source.connect()
    cur = connFrom.execute("SELECT * from pg_extension;")
    extensions = [line[0] for line in cur.fetchall()]
    extensions += ['btree_gin']
    connFrom.close()

    connTo = destination.connect()
    connTo.connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)

    for ext in extensions:
        try:
            connTo.execute("CREATE EXTENSION %s;" % ext)
            print("Creating extension %s" % ext)
        except ProgrammingError as e:  # pragma: no cover
            if 'already exists' not in str(e):
                raise
    connTo.close()
    return extensions
Пример #22
0
def delete_dummy_security(engine: Engine):
    if engine.dialect.has_table(engine, 'securities'):
        with engine.connect() as db_conn:
            resp = db_conn.execute('DELETE FROM securities '
                                   'WHERE symbol = \'DUMMY\'')
            if resp.rowcount == 1:
                logging.info("Deleted record for DUMMY")
            else:
                logging.info("Record for DUMMY *DOES NOT* exist !!!")
    else:
        logging.info("The securities table *DOES NOT* exist !!!")
def execute_insert_many_addresses(engine: Engine, address_table):
    conn = engine.connect()
    return conn.execute(
        address_table.insert(),
        [
            {"user_id": 1, "email_address": "*****@*****.**"},
            {"user_id": 1, "email_address": "*****@*****.**"},
            {"user_id": 2, "email_address": "*****@*****.**"},
            {"user_id": 2, "email_address": "*****@*****.**"},
        ],
    )
Пример #24
0
def get_vote_result(engine: Engine) -> Mapping[str, str]:
    sql = 'SELECT name, count(id) AS vote FROM pokemon_vote GROUP BY name'
    connection = engine.connect()
    pokemon_vote_result_list = connection.execute(text(sql))
    result = {}
    for pokemon_vote_result in pokemon_vote_result_list:
        pokemon_name = pokemon_vote_result[0]
        vote = pokemon_vote_result[1]
        result[pokemon_name] = vote
    connection.close()
    return result
async def get_last_rate(engine: Engine, currency_id: int) -> Optional[float]:
    """ Return last rate on given currency """
    conn = engine.connect()

    query = sa.select(
        (Rate.c.rate, )).where(Rate.c.currency_id == currency_id).order_by(
            Rate.c.date.desc()).limit(1)

    rate = conn.execute(query).scalar()

    return rate
def fill_currencies(engine: Engine):
    """ Fill currencies table with CurrencyEnum values """
    conn = engine.connect()
    trans = conn.begin()
    for c in CurrencyEnum:
        query = sa.select((1, )).where(Currency.c.name == c.value)
        already_exists = conn.execute(query).scalar()
        if not already_exists:
            query = Currency.insert().values(name=c.value)
            conn.execute(query)
    trans.commit()
def get_vote_result(engine: Engine) -> Mapping[str, str]:
    sql = 'SELECT name, count(id) AS vote FROM pokemon_vote GROUP BY name'
    connection = engine.connect()
    pokemon_vote_result_list = connection.execute(text(sql))
    results = []
    for pokemon_vote_result in pokemon_vote_result_list:
        pokemon_name = pokemon_vote_result[0]
        vote = pokemon_vote_result[1]
        results.append('{pokemon_name}\t: {vote}'.format(
            pokemon_name=pokemon_name, vote=vote))
    connection.close()
    return '\n'.join(results)
Пример #28
0
def update_dummy_security(engine: Engine):
    if engine.dialect.has_table(engine, 'securities'):
        with engine.connect() as db_conn:
            resp = db_conn.execute('UPDATE securities '
                                   'SET price = 2.00 '
                                   'WHERE symbol = \'DUMMY\'')
            if resp.rowcount == 1:
                logging.info("Updated record for DUMMY")
            else:
                logging.info("Record for DUMMY *DOES NOT* exist !!!")
    else:
        logging.info("The securities table *DOES NOT* exist !!!")
async def get_currencies(engine: Engine, **kwargs) -> List[RowProxy]:
    """ Fetch paginated data from currencies table """
    conn = engine.connect()

    limit = param_to_positive_int(kwargs, 'page_size', DEFAULT_PAGE_SIZE)
    page = param_to_positive_int(kwargs, 'page', DEFAULT_PAGE_NUM)
    offset = limit * (page - 1)

    query = sa.select((Currency, )).limit(limit).offset(offset)
    currencies = conn.execute(query).fetchall()

    return currencies
Пример #30
0
def create_dummy_security(engine: Engine):
    if engine.dialect.has_table(engine, 'securities'):
        with engine.connect() as db_conn:
            resp = db_conn.execute('INSERT INTO securities '
                                   '(symbol, price) '
                                   'VALUES (\'DUMMY\', 1.00)')
            if resp.rowcount == 1:
                logging.info("Inserted record for DUMMY")
            else:
                logging.info("Failed to insert record for DUMMY")
    else:
        logging.info("The securities table *DOES NOT* exist !!!")
Пример #31
0
def check_database(engine: Engine, user_name: pwd.struct_passwd,
                   tables: Iterable[Table]):
    logger.info("Checking database access as user %s", user_name)
    try:
        conn = engine.connect()
    except DBAPIError as e:
        logger.critical("Could not connect to database as %s: %s",
                        user_name, e)
        raise
    with contextlib.closing(conn):
        for table in tables:
            try:
                check_table(conn, table)
            except DBAPIError as e:
                logger.critical("Query check for table %s as user %s failed: "
                                "%s", table.name, user_name, e)
                raise
Пример #32
0
def get_revision(
        config: Config,
        engine: Engine,
        script: ScriptDirectory,
        revision_type='current'
) -> str:
    """
    Helper to get revision id
    """
    with engine.connect() as conn:
        with EnvironmentContext(config, script) as env_context:
            env_context.configure(conn, version_table="migrate_version")
            if revision_type == 'head':
                revision = env_context.get_head_revision()
            else:
                migration_context = env_context.get_context()
                revision = migration_context.get_current_revision()
    return revision