def _create_engine(self): # NOTE(harlowja): copy the internal one so that we don't modify it via # all the popping that will happen below. conf = copy.deepcopy(self._conf) engine_args = { "echo": misc.as_bool(conf.pop("echo", False)), "convert_unicode": misc.as_bool(conf.pop("convert_unicode", True)), "pool_recycle": 3600, } try: idle_timeout = misc.as_int(conf.pop("idle_timeout", None)) engine_args["pool_recycle"] = idle_timeout except TypeError: pass sql_connection = conf.pop("connection") e_url = sa.engine.url.make_url(sql_connection) if "sqlite" in e_url.drivername: engine_args["poolclass"] = sa_pool.NullPool # Adjustments for in-memory sqlite usage if sql_connection.lower().strip() in SQLITE_IN_MEMORY: engine_args["poolclass"] = sa_pool.StaticPool engine_args["connect_args"] = {"check_same_thread": False} else: for (k, lookup_key) in [ ("pool_size", "max_pool_size"), ("max_overflow", "max_overflow"), ("pool_timeout", "pool_timeout"), ]: try: engine_args[k] = misc.as_int(conf.pop(lookup_key, None)) except TypeError: pass # If the configuration dict specifies any additional engine args # or engine arg overrides make sure we merge them in. engine_args.update(conf.pop("engine_args", {})) engine = sa.create_engine(sql_connection, **engine_args) if misc.as_bool(conf.pop("checkin_yield", True)): sa.event.listen(engine, "checkin", _thread_yield) if "mysql" in e_url.drivername: if misc.as_bool(conf.pop("checkout_ping", True)): sa.event.listen(engine, "checkout", _ping_listener) try: max_retries = misc.as_int(conf.pop("max_retries", None)) except TypeError: max_retries = 0 return self._test_connected(engine, max_retries=max_retries)
def _create_engine(self): # NOTE(harlowja): copy the internal one so that we don't modify it via # all the popping that will happen below. conf = copy.deepcopy(self._conf) engine_args = { 'echo': misc.as_bool(conf.pop('echo', False)), 'convert_unicode': misc.as_bool(conf.pop('convert_unicode', True)), 'pool_recycle': 3600, } try: idle_timeout = misc.as_int(conf.pop('idle_timeout', None)) engine_args['pool_recycle'] = idle_timeout except TypeError: pass sql_connection = conf.pop('connection') e_url = sa.engine.url.make_url(sql_connection) if 'sqlite' in e_url.drivername: engine_args["poolclass"] = sa_pool.NullPool # Adjustments for in-memory sqlite usage if sql_connection.lower().strip() in SQLITE_IN_MEMORY: engine_args["poolclass"] = sa_pool.StaticPool engine_args["connect_args"] = {'check_same_thread': False} else: for (k, lookup_key) in [('pool_size', 'max_pool_size'), ('max_overflow', 'max_overflow'), ('pool_timeout', 'pool_timeout')]: try: engine_args[k] = misc.as_int(conf.pop(lookup_key, None)) except TypeError: pass # If the configuration dict specifies any additional engine args # or engine arg overrides make sure we merge them in. engine_args.update(conf.pop('engine_args', {})) engine = sa.create_engine(sql_connection, **engine_args) checkin_yield = conf.pop('checkin_yield', eventlet_utils.EVENTLET_AVAILABLE) if misc.as_bool(checkin_yield): sa.event.listen(engine, 'checkin', _thread_yield) if 'mysql' in e_url.drivername: if misc.as_bool(conf.pop('checkout_ping', True)): sa.event.listen(engine, 'checkout', _ping_listener) try: max_retries = misc.as_int(conf.pop('max_retries', None)) except TypeError: max_retries = 0 return self._test_connected(engine, max_retries=max_retries)
def get_connection(self): conn = Connection(self, self._get_session_maker()) if not self._validated: try: max_retries = misc.as_int(self._conf.get('max_retries', None)) except TypeError: max_retries = 0 conn.validate(max_retries=max_retries) self._validated = True return conn
def get_connection(self): conn = Connection(self) if not self._validated: try: max_retries = misc.as_int(self._conf.get("max_retries", None)) except TypeError: max_retries = 0 conn.validate(max_retries=max_retries) self._validated = True return conn
def __init__(self, conf, engine=None): super(SQLAlchemyBackend, self).__init__(conf) if engine is not None: self._engine = engine self._owns_engine = False else: self._engine = self._create_engine(self._conf) self._owns_engine = True self._validated = False try: self._max_retries = misc.as_int(self._conf.get('max_retries')) except TypeError: self._max_retries = 0
def execute(self, request): return { 'user': request.user, 'user_id': misc.as_int(request.id), 'request_id': uuidutils.generate_uuid(), }
def _create_engine(self): # NOTE(harlowja): copy the internal one so that we don't modify it via # all the popping that will happen below. conf = copy.deepcopy(self._conf) engine_args = { 'echo': misc.as_bool(conf.pop('echo', False)), 'convert_unicode': misc.as_bool(conf.pop('convert_unicode', True)), 'pool_recycle': 3600, } if 'idle_timeout' in conf: idle_timeout = misc.as_int(conf.pop('idle_timeout')) engine_args['pool_recycle'] = idle_timeout sql_connection = conf.pop('connection') e_url = sa.engine.url.make_url(sql_connection) if 'sqlite' in e_url.drivername: engine_args["poolclass"] = sa_pool.NullPool # Adjustments for in-memory sqlite usage. if sql_connection.lower().strip() in SQLITE_IN_MEMORY: engine_args["poolclass"] = sa_pool.StaticPool engine_args["connect_args"] = {'check_same_thread': False} else: for (k, lookup_key) in [('pool_size', 'max_pool_size'), ('max_overflow', 'max_overflow'), ('pool_timeout', 'pool_timeout')]: if lookup_key in conf: engine_args[k] = misc.as_int(conf.pop(lookup_key)) if 'isolation_level' not in conf: # Check driver name exact matches first, then try driver name # partial matches... txn_isolation_levels = conf.pop('isolation_levels', DEFAULT_TXN_ISOLATION_LEVELS) level_applied = False for (driver, level) in six.iteritems(txn_isolation_levels): if driver == e_url.drivername: engine_args['isolation_level'] = level level_applied = True break if not level_applied: for (driver, level) in six.iteritems(txn_isolation_levels): if e_url.drivername.find(driver) != -1: engine_args['isolation_level'] = level break else: engine_args['isolation_level'] = conf.pop('isolation_level') # If the configuration dict specifies any additional engine args # or engine arg overrides make sure we merge them in. engine_args.update(conf.pop('engine_args', {})) engine = sa.create_engine(sql_connection, **engine_args) checkin_yield = conf.pop('checkin_yield', eventlet_utils.EVENTLET_AVAILABLE) if misc.as_bool(checkin_yield): sa.event.listen(engine, 'checkin', _thread_yield) if 'mysql' in e_url.drivername: if misc.as_bool(conf.pop('checkout_ping', True)): sa.event.listen(engine, 'checkout', _ping_listener) mode = None if misc.as_bool(conf.pop('mysql_traditional_mode', True)): mode = 'TRADITIONAL' if 'mysql_sql_mode' in conf: mode = conf.pop('mysql_sql_mode') if mode is not None: sa.event.listen(engine, 'connect', functools.partial(_set_sql_mode, mode)) return engine
def _create_engine(conf): # NOTE(harlowja): copy the internal one so that we don't modify it via # all the popping that will happen below. conf = copy.deepcopy(conf) engine_args = { 'echo': _as_bool(conf.pop('echo', False)), 'convert_unicode': _as_bool(conf.pop('convert_unicode', True)), 'pool_recycle': 3600, } if 'idle_timeout' in conf: idle_timeout = misc.as_int(conf.pop('idle_timeout')) engine_args['pool_recycle'] = idle_timeout sql_connection = conf.pop('connection') e_url = sa.engine.url.make_url(sql_connection) if 'sqlite' in e_url.drivername: engine_args["poolclass"] = sa_pool.NullPool # Adjustments for in-memory sqlite usage. if sql_connection.lower().strip() in SQLITE_IN_MEMORY: engine_args["poolclass"] = sa_pool.StaticPool engine_args["connect_args"] = {'check_same_thread': False} else: for (k, lookup_key) in [('pool_size', 'max_pool_size'), ('max_overflow', 'max_overflow'), ('pool_timeout', 'pool_timeout')]: if lookup_key in conf: engine_args[k] = misc.as_int(conf.pop(lookup_key)) if 'isolation_level' not in conf: # Check driver name exact matches first, then try driver name # partial matches... txn_isolation_levels = conf.pop('isolation_levels', DEFAULT_TXN_ISOLATION_LEVELS) level_applied = False for (driver, level) in six.iteritems(txn_isolation_levels): if driver == e_url.drivername: engine_args['isolation_level'] = level level_applied = True break if not level_applied: for (driver, level) in six.iteritems(txn_isolation_levels): if e_url.drivername.find(driver) != -1: engine_args['isolation_level'] = level break else: engine_args['isolation_level'] = conf.pop('isolation_level') # If the configuration dict specifies any additional engine args # or engine arg overrides make sure we merge them in. engine_args.update(conf.pop('engine_args', {})) engine = sa.create_engine(sql_connection, **engine_args) log_statements = conf.pop('log_statements', False) if _as_bool(log_statements): log_statements_level = conf.pop("log_statements_level", logging.TRACE) sa.event.listen( engine, "before_cursor_execute", functools.partial(_log_statements, log_statements_level)) checkin_yield = conf.pop('checkin_yield', eventlet_utils.EVENTLET_AVAILABLE) if _as_bool(checkin_yield): sa.event.listen(engine, 'checkin', _thread_yield) if 'mysql' in e_url.drivername: if _as_bool(conf.pop('checkout_ping', True)): sa.event.listen(engine, 'checkout', _ping_listener) mode = None if 'mysql_sql_mode' in conf: mode = conf.pop('mysql_sql_mode') if mode is not None: sa.event.listen(engine, 'connect', functools.partial(_set_sql_mode, mode)) return engine
def _create_engine(conf): # NOTE(harlowja): copy the internal one so that we don't modify it via # all the popping that will happen below. conf = copy.deepcopy(conf) engine_args = { "echo": _as_bool(conf.pop("echo", False)), "convert_unicode": _as_bool(conf.pop("convert_unicode", True)), "pool_recycle": 3600, } if "idle_timeout" in conf: idle_timeout = misc.as_int(conf.pop("idle_timeout")) engine_args["pool_recycle"] = idle_timeout sql_connection = conf.pop("connection") e_url = sa.engine.url.make_url(sql_connection) if "sqlite" in e_url.drivername: engine_args["poolclass"] = sa_pool.NullPool # Adjustments for in-memory sqlite usage. if sql_connection.lower().strip() in SQLITE_IN_MEMORY: engine_args["poolclass"] = sa_pool.StaticPool engine_args["connect_args"] = {"check_same_thread": False} else: for (k, lookup_key) in [ ("pool_size", "max_pool_size"), ("max_overflow", "max_overflow"), ("pool_timeout", "pool_timeout"), ]: if lookup_key in conf: engine_args[k] = misc.as_int(conf.pop(lookup_key)) if "isolation_level" not in conf: # Check driver name exact matches first, then try driver name # partial matches... txn_isolation_levels = conf.pop("isolation_levels", DEFAULT_TXN_ISOLATION_LEVELS) level_applied = False for (driver, level) in six.iteritems(txn_isolation_levels): if driver == e_url.drivername: engine_args["isolation_level"] = level level_applied = True break if not level_applied: for (driver, level) in six.iteritems(txn_isolation_levels): if e_url.drivername.find(driver) != -1: engine_args["isolation_level"] = level break else: engine_args["isolation_level"] = conf.pop("isolation_level") # If the configuration dict specifies any additional engine args # or engine arg overrides make sure we merge them in. engine_args.update(conf.pop("engine_args", {})) engine = sa.create_engine(sql_connection, **engine_args) log_statements = conf.pop("log_statements", False) if _as_bool(log_statements): log_statements_level = conf.pop("log_statements_level", logging.BLATHER) sa.event.listen(engine, "before_cursor_execute", functools.partial(_log_statements, log_statements_level)) checkin_yield = conf.pop("checkin_yield", eventlet_utils.EVENTLET_AVAILABLE) if _as_bool(checkin_yield): sa.event.listen(engine, "checkin", _thread_yield) if "mysql" in e_url.drivername: if _as_bool(conf.pop("checkout_ping", True)): sa.event.listen(engine, "checkout", _ping_listener) mode = None if "mysql_sql_mode" in conf: mode = conf.pop("mysql_sql_mode") if mode is not None: sa.event.listen(engine, "connect", functools.partial(_set_sql_mode, mode)) return engine