def _set_session_sql_mode(dbapi_con, connection_rec, connection_proxy, sql_mode=None): """ Set the sql_mode session variable. """ cursor = dbapi_con.cursor() if sql_mode is not None: cursor.execute("SET SESSION sql_mode = %s", [sql_mode]) # Check against the real effective SQL mode. Even when unset by # our own config, the server may still be operating in a specific # SQL mode as set by the server configuration cursor.execute("SHOW VARIABLES LIKE 'sql_mode'") row = cursor.fetchone() if row is None: LOG.warning(_LW('Unable to detect effective SQL mode')) return realmode = row[1] LOG.info(_LI('MySQL server mode set to %s') % realmode) # 'TRADITIONAL' mode enables several other modes, so # we need a substring match here if not ('TRADITIONAL' in realmode.upper() or 'STRICT_ALL_TABLES' in realmode.upper()): LOG.warning( _LW("MySQL SQL mode is '%s', " "consider enabling TRADITIONAL or STRICT_ALL_TABLES") % realmode)
def _set_session_sql_mode(dbapi_con, connection_rec, connection_proxy, sql_mode=None): """ Set the sql_mode session variable. """ cursor = dbapi_con.cursor() if sql_mode is not None: cursor.execute("SET SESSION sql_mode = %s", [sql_mode]) # Check against the real effective SQL mode. Even when unset by # our own config, the server may still be operating in a specific # SQL mode as set by the server configuration cursor.execute("SHOW VARIABLES LIKE 'sql_mode'") row = cursor.fetchone() if row is None: LOG.warning(_LW('Unable to detect effective SQL mode')) return realmode = row[1] LOG.info(_LI('MySQL server mode set to %s') % realmode) # 'TRADITIONAL' mode enables several other modes, so # we need a substring match here if not ('TRADITIONAL' in realmode.upper() or 'STRICT_ALL_TABLES' in realmode.upper()): LOG.warning(_LW("MySQL SQL mode is '%s', " "consider enabling TRADITIONAL or STRICT_ALL_TABLES") % realmode)
def _ping_listener(engine, dbapi_conn, connection_rec, connection_proxy): """ Ensures that MySQL and DB2 connections are alive. """ cursor = dbapi_conn.cursor() try: ping_sql = 'select 1' if engine.name == 'ibm_db_sa': # DB2 requires a table expression ping_sql = 'select 1 from (values (1)) AS t1' cursor.execute(ping_sql) except Exception as ex: if engine.dialect.is_disconnect(ex, dbapi_conn, cursor): msg = _LW('Database server has gone away: %s') % ex LOG.warning(msg) raise sqla_exc.DisconnectionError(msg) else: raise
def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None, mysql_traditional_mode=False, idle_timeout=3600, connection_debug=0, max_pool_size=None, max_overflow=None, pool_timeout=None, sqlite_synchronous=True, connection_trace=False, max_retries=10, retry_interval=10): """Return a new SQLAlchemy engine.""" connection_dict = sqlalchemy.engine.url.make_url(sql_connection) engine_args = { "pool_recycle": idle_timeout, 'convert_unicode': True, } logger = logging.getLogger('sqlalchemy.engine') # Map SQL debug level to Python log level if connection_debug >= 100: logger.setLevel(logging.DEBUG) elif connection_debug >= 50: logger.setLevel(logging.INFO) else: logger.setLevel(logging.WARNING) if "sqlite" in connection_dict.drivername: if sqlite_fk: engine_args["listeners"] = [SqliteForeignKeysListener()] engine_args["poolclass"] = NullPool if sql_connection == "sqlite://": engine_args["poolclass"] = StaticPool engine_args["connect_args"] = {'check_same_thread': False} else: if max_pool_size is not None: engine_args['pool_size'] = max_pool_size if max_overflow is not None: engine_args['max_overflow'] = max_overflow if pool_timeout is not None: engine_args['pool_timeout'] = pool_timeout engine = sqlalchemy.create_engine(sql_connection, **engine_args) sqlalchemy.event.listen(engine, 'checkin', _thread_yield) if engine.name in ['mysql', 'ibm_db_sa']: ping_callback = functools.partial(_ping_listener, engine) sqlalchemy.event.listen(engine, 'checkout', ping_callback) if engine.name == 'mysql': if mysql_traditional_mode: mysql_sql_mode = 'TRADITIONAL' if mysql_sql_mode: mode_callback = functools.partial(_set_session_sql_mode, sql_mode=mysql_sql_mode) sqlalchemy.event.listen(engine, 'checkout', mode_callback) elif 'sqlite' in connection_dict.drivername: if not sqlite_synchronous: sqlalchemy.event.listen(engine, 'connect', _synchronous_switch_listener) sqlalchemy.event.listen(engine, 'connect', _add_regexp_listener) if connection_trace and engine.dialect.dbapi.__name__ == 'MySQLdb': _patch_mysqldb_with_stacktrace_comments() try: engine.connect() except sqla_exc.OperationalError as e: if not _is_db_connection_error(e.args[0]): raise remaining = max_retries if remaining == -1: remaining = 'infinite' while True: msg = _LW('SQL connection failed. %s attempts left.') LOG.warning(msg % remaining) if remaining != 'infinite': remaining -= 1 time.sleep(retry_interval) try: engine.connect() break except sqla_exc.OperationalError as e: if (remaining != 'infinite' and remaining == 0) or \ not _is_db_connection_error(e.args[0]): raise return engine
def paginate_query(query, model, limit, sort_keys, marker=None, sort_dir=None, sort_dirs=None): """ Returns a query with sorting / pagination criteria added. """ if 'id' not in sort_keys: # TODO(justinsb): If this ever gives a false-positive, check # the actual primary key, rather than assuming its id LOG.warning(_LW('Id not in sort_keys; is sort_keys unique?')) assert (not (sort_dir and sort_dirs)) # Default the sort direction to ascending if sort_dirs is None and sort_dir is None: sort_dir = 'asc' # Ensure a per-column sort direction if sort_dirs is None: sort_dirs = [sort_dir for _sort_key in sort_keys] assert (len(sort_dirs) == len(sort_keys)) # Add sorting for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs): try: sort_dir_func = { 'asc': sqlalchemy.asc, 'desc': sqlalchemy.desc, }[current_sort_dir] except KeyError: raise ValueError( _("Unknown sort direction, " "must be 'desc' or 'asc'")) try: sort_key_attr = getattr(model, current_sort_key) except AttributeError: raise InvalidSortKey() query = query.order_by(sort_dir_func(sort_key_attr)) # Add pagination if marker is not None: marker_values = [] for sort_key in sort_keys: v = getattr(marker, sort_key) marker_values.append(v) # Build up an array of sort criteria as in the docstring criteria_list = [] for i in range(len(sort_keys)): crit_attrs = [] for j in range(i): model_attr = getattr(model, sort_keys[j]) crit_attrs.append((model_attr == marker_values[j])) model_attr = getattr(model, sort_keys[i]) if sort_dirs[i] == 'desc': crit_attrs.append((model_attr < marker_values[i])) else: crit_attrs.append((model_attr > marker_values[i])) criteria = sqlalchemy.sql.and_(*crit_attrs) criteria_list.append(criteria) f = sqlalchemy.sql.or_(*criteria_list) query = query.filter(f) if limit is not None: query = query.limit(limit) return query
def paginate_query(query, model, limit, sort_keys, marker=None, sort_dir=None, sort_dirs=None): """ Returns a query with sorting / pagination criteria added. """ if 'id' not in sort_keys: # TODO(justinsb): If this ever gives a false-positive, check # the actual primary key, rather than assuming its id LOG.warning(_LW('Id not in sort_keys; is sort_keys unique?')) assert(not (sort_dir and sort_dirs)) # Default the sort direction to ascending if sort_dirs is None and sort_dir is None: sort_dir = 'asc' # Ensure a per-column sort direction if sort_dirs is None: sort_dirs = [sort_dir for _sort_key in sort_keys] assert(len(sort_dirs) == len(sort_keys)) # Add sorting for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs): try: sort_dir_func = { 'asc': sqlalchemy.asc, 'desc': sqlalchemy.desc, }[current_sort_dir] except KeyError: raise ValueError(_("Unknown sort direction, " "must be 'desc' or 'asc'")) try: sort_key_attr = getattr(model, current_sort_key) except AttributeError: raise InvalidSortKey() query = query.order_by(sort_dir_func(sort_key_attr)) # Add pagination if marker is not None: marker_values = [] for sort_key in sort_keys: v = getattr(marker, sort_key) marker_values.append(v) # Build up an array of sort criteria as in the docstring criteria_list = [] for i in range(len(sort_keys)): crit_attrs = [] for j in range(i): model_attr = getattr(model, sort_keys[j]) crit_attrs.append((model_attr == marker_values[j])) model_attr = getattr(model, sort_keys[i]) if sort_dirs[i] == 'desc': crit_attrs.append((model_attr < marker_values[i])) else: crit_attrs.append((model_attr > marker_values[i])) criteria = sqlalchemy.sql.and_(*crit_attrs) criteria_list.append(criteria) f = sqlalchemy.sql.or_(*criteria_list) query = query.filter(f) if limit is not None: query = query.limit(limit) return query