def __init__(self, pool, dialect, url, logging_name=None, echo=None, execution_options=None, loop=None, **kwargs): self._engine = Engine(pool, dialect, url, logging_name=logging_name, echo=echo, execution_options=execution_options, **kwargs) self._loop = loop max_workers = None # https://www.python.org/dev/peps/pep-0249/#threadsafety if dialect.dbapi.threadsafety < 2: # This might seem overly-restrictive, but when we instantiate an # AsyncioResultProxy from AsyncioEngine.execute, subsequent # fetchone calls could be in different threads. Let's limit to one. max_workers = 1 self._engine_executor = ThreadPoolExecutor(max_workers=max_workers)
def __init__(self, pool, dialect, u, single_worker=True, **kwargs): if single_worker: worker = ExecutorThread() else: worker = None self._worker = worker self._engine = Engine(pool, dialect, u, **kwargs)
def __init__(self, pool, dialect, url, logging_name=None, echo=None, execution_options=None, **kwargs): self._engine = Engine(pool, dialect, url, logging_name=logging_name, echo=echo, execution_options=execution_options, **kwargs) self._engine_worker = None
def mock_sqlalchemy_engine(dialect): """ Create a sqlalchemy.engine.Engine without it connecting to a database. Examples -------- :: from siuba.sql import LazyTbl from siuba import _, mutate, show_query engine = mock_sqlalchemy_engine('postgresql') tbl = LazyTbl(engine, 'some_table', ['x']) query = mutate(tbl, y = _.x + _.x) show_query(query) """ from sqlalchemy.engine import Engine from sqlalchemy.dialects import registry dialect_cls = registry.load('postgresql') return Engine(None, dialect_cls(), '')