def __init__(self, host=None, port=None, user=None, passwd=None, db=None): """Creates a datastore implementation. Args: host: Passed to MySQLdb.Connect when creating a new connection. port: Passed to MySQLdb.Connect when creating a new connection. user: Passed to MySQLdb.Connect when creating a new connection. passwd: Passed to MySQLdb.Connect when creating a new connection. db: Passed to MySQLdb.Connect when creating a new connection. """ # Turn all SQL warnings into exceptions. warnings.filterwarnings("error", category=MySQLdb.Warning) def Connect(): return MySQLdb.Connect(host=host, port=port, user=user, passwd=passwd, db=db, autocommit=False, use_unicode=True, charset="utf8") self.pool = mysql_pool.Pool(Connect) with contextlib.closing(self.pool.get()) as connection: with contextlib.closing(connection.cursor()) as cursor: self._MariaDBCompatibility(cursor) self._SetBinlogFormat(cursor) self._InitializeSchema(cursor) self.handler_thread = None self.handler_stop = True
def testGoodConnection(self): good_cursor_mock = mock.MagicMock() for m in [ 'callproc', 'execute', 'executemany', 'fetchone', 'fetchmany', 'fetchall' ]: getattr(good_cursor_mock, m).return_value = m good_connection_mock = mock.MagicMock() good_connection_mock.cursor.return_value = good_cursor_mock def gen_good(): return good_connection_mock pool = mysql_pool.Pool(gen_good, max_size=5) for m, op in [('callproc', lambda c: c.callproc('my_proc')), ('execute', lambda c: c.execute('SELECT foo FROM bar')), ('executemany', lambda c: c.executemany( 'INSERT INTO foo(bar) VALUES %s', ['A', 'B'])), ('fetchone', lambda c: c.fetchone()), ('fetchmany', lambda c: c.fetchmany(size=5)), ('fetchall', lambda c: c.fetchall())]: # If we can fail 10 times, then idling a connection doesn't consume pool # capacity. for _ in range(10): con = pool.get() cur = con.cursor() self.assertEqual(m, op(cur)) cur.close() con.close() # whitebox: make sure the connection did end up on the idle list self.assertEqual(1, len(pool.idle_conns))
def testMaxSize(self): mocks = [] def gen_mock(): c = mock.MagicMock() mocks.append(c) return c proxies = [] pool = mysql_pool.Pool(gen_mock, max_size=5) for _ in range(5): c = pool.get(blocking=False) self.assertIsNotNone(c) proxies.append(c) self.assertIsNone(pool.get(blocking=False)) for p in proxies: p.close() for m in mocks: # Should be returned to the pool. m.close.assert_not_called() for _ in range(5): c = pool.get(blocking=False) self.assertIsNotNone(c) proxies.append(c) for p in proxies: p.close() self.assertEqual(5, len(mocks), 'Should have created only 5 mocks.')
def __init__(self, host=None, port=None, user=None, passwd=None, db=None): """Creates a datastore implementation. Args: host: Passed to MySQLdb.Connect when creating a new connection. port: Passed to MySQLdb.Connect when creating a new connection. user: Passed to MySQLdb.Connect when creating a new connection. passwd: Passed to MySQLdb.Connect when creating a new connection. db: Passed to MySQLdb.Connect when creating a new connection. """ # Turn all SQL warnings not mentioned below into exceptions. warnings.filterwarnings("error", category=MySQLdb.Warning) for message in [ # We use INSERT IGNOREs which generate useless duplicate entry warnings. ".*Duplicate entry.*", # Same for CREATE TABLE IF NOT EXISTS. ".*Table '.*' already exists", # And CREATE INDEX IF NOT EXISTS. ".*Duplicate key name.*", ]: warnings.filterwarnings("ignore", category=MySQLdb.Warning, message=message) def Connect(): """Returns a MySQLdb connection and creates the db if it doesn't exist.""" try: return MySQLdb.Connect(**self._GetConnectionArgs( host=host, port=port, user=user, passwd=passwd, db=db)) except MySQLdb.Error as e: # Database does not exist if e[0] == _ER_BAD_DB_ERROR: self._CreateDatabase() return MySQLdb.Connect(**self._GetConnectionArgs( host=host, port=port, user=user, passwd=passwd, db=db)) else: raise self.pool = mysql_pool.Pool(Connect) with contextlib.closing(self.pool.get()) as connection: with contextlib.closing(connection.cursor()) as cursor: self._MariaDBCompatibility(cursor) self._SetBinlogFormat(cursor) self._InitializeSchema(cursor) self._CheckForSSL(cursor) self.handler_thread = None self.handler_stop = True self.flow_processing_request_handler_thread = None self.flow_processing_request_handler_stop = None self.flow_processing_request_handler_pool = ( threadpool.ThreadPool.Factory("flow_processing_pool", min_threads=2, max_threads=50)) self.flow_processing_request_handler_pool.Start()
def testConnectFailure(self): class TestException(Exception): pass def gen_failure(): raise TestException() pool = mysql_pool.Pool(gen_failure, max_size=5) # Repeated tries should fail, but not use up pool capacity. Try 10>5 times. for _ in range(10): with self.assertRaises(TestException): pool.get()
def __init__(self, host=None, port=None, user=None, password=None, database=None): """Creates a datastore implementation. Args: host: Passed to MySQLdb.Connect when creating a new connection. port: Passed to MySQLdb.Connect when creating a new connection. user: Passed to MySQLdb.Connect when creating a new connection. password: Passed to MySQLdb.Connect when creating a new connection. database: Passed to MySQLdb.Connect when creating a new connection. """ # Turn all SQL warnings not mentioned below into exceptions. warnings.filterwarnings("error", category=MySQLdb.Warning) for message in [ # We use INSERT IGNOREs which generate useless duplicate entry warnings. ".*Duplicate entry.*", # Same for CREATE TABLE IF NOT EXISTS. ".*Table '.*' already exists", # And CREATE INDEX IF NOT EXISTS. ".*Duplicate key name.*", ]: warnings.filterwarnings("ignore", category=MySQLdb.Warning, message=message) self._args = self._GetConnectionArgs(host=host, port=port, user=user, password=password, database=database) _SetupDatabase(**self._args) max_pool_size = config.CONFIG.Get("Mysql.conn_pool_max", 10) self.pool = mysql_pool.Pool(self._Connect, max_size=max_pool_size) self.handler_thread = None self.handler_stop = True self.flow_processing_request_handler_thread = None self.flow_processing_request_handler_stop = None self.flow_processing_request_handler_pool = ( threadpool.ThreadPool.Factory("flow_processing_pool", min_threads=2, max_threads=50)) self.flow_processing_request_handler_pool.Start()
def testBadConnection(self): def operational_error(*args, **kwargs): del args, kwargs # Unused raise MySQLdb.OperationalError('Bad Cursor') bad_cursor_mock = mock.MagicMock() for m in [ 'callproc', 'execute', 'executemany', 'fetchone', 'fetchmany', 'fetchall' ]: getattr(bad_cursor_mock, m).side_effect = operational_error bad_connection_mock = mock.MagicMock() bad_connection_mock.cursor.return_value = bad_cursor_mock def gen_bad(): return bad_connection_mock pool = mysql_pool.Pool(gen_bad, max_size=5) for op in [ lambda c: c.callproc('my_proc'), lambda c: c.execute('SELECT foo FROM bar'), lambda c: c.executemany('INSERT INTO foo(bar) VALUES %s', ['A', 'B']), lambda c: c.fetchone(), lambda c: c.fetchmany(size=5), lambda c: c.fetchall() ]: # If we can fail 10 times, then failed connections aren't consuming # pool capacity. for _ in range(10): con = pool.get() cur = con.cursor() with self.assertRaises(MySQLdb.OperationalError): op(cur) cur.close() con.close() # whitebox: make sure the connection didn't end up on the idle list self.assertFalse(pool.idle_conns)
def __init__(self, host=None, port=None, user=None, password=None, database=None): """Creates a datastore implementation. Args: host: Passed to MySQLdb.Connect when creating a new connection. port: Passed to MySQLdb.Connect when creating a new connection. user: Passed to MySQLdb.Connect when creating a new connection. password: Passed to MySQLdb.Connect when creating a new connection. database: Passed to MySQLdb.Connect when creating a new connection. """ # Turn all SQL warnings not mentioned below into exceptions. warnings.filterwarnings("error", category=MySQLdb.Warning) for message in [ # We use INSERT IGNOREs which generate useless duplicate entry warnings. ".*Duplicate entry.*", # Same for CREATE TABLE IF NOT EXISTS. ".*Table '.*' already exists", # And CREATE INDEX IF NOT EXISTS. ".*Duplicate key name.*", # TODO: this is caused by an old version of the MySQLdb # library that doesn't wrap bytes SQL arguments with the _binary() # type hint. This issue should go away when a new version of the # MySQLdb is used with Python 3. ".*Invalid.*character string.*", ]: warnings.filterwarnings( "ignore", category=MySQLdb.Warning, message=message) self._connect_args = dict( host=host or config.CONFIG["Mysql.host"], port=port or config.CONFIG["Mysql.port"], user=user or config.CONFIG["Mysql.username"], password=password or config.CONFIG["Mysql.password"], database=database or config.CONFIG["Mysql.database"]) client_key_path = config.CONFIG["Mysql.client_key_path"] if client_key_path: logging.debug("Client key file configured, trying to use SSL.") self._connect_args["client_key_path"] = client_key_path self._connect_args["client_cert_path"] = config.CONFIG[ "Mysql.client_cert_path"] self._connect_args["ca_cert_path"] = config.CONFIG["Mysql.ca_cert_path"] _SetupDatabase(**self._connect_args) self._max_pool_size = config.CONFIG["Mysql.conn_pool_max"] self.pool = mysql_pool.Pool(self._Connect, max_size=self._max_pool_size) self.handler_thread = None self.handler_stop = True self.flow_processing_request_handler_thread = None self.flow_processing_request_handler_stop = None self.flow_processing_request_handler_pool = ( threadpool.ThreadPool.Factory( "flow_processing_pool", min_threads=2, max_threads=50)) self.flow_processing_request_handler_pool.Start()
def __init__(self, host=None, port=None, user=None, password=None, database=None): """Creates a datastore implementation. Args: host: Passed to MySQLdb.Connect when creating a new connection. port: Passed to MySQLdb.Connect when creating a new connection. user: Passed to MySQLdb.Connect when creating a new connection. password: Passed to MySQLdb.Connect when creating a new connection. database: Passed to MySQLdb.Connect when creating a new connection. """ # Turn all SQL warnings not mentioned below into exceptions. warnings.filterwarnings("error", category=MySQLdb.Warning) for message in [ # We use INSERT IGNOREs which generate useless duplicate entry warnings. ".*Duplicate entry.*", # Same for CREATE TABLE IF NOT EXISTS. ".*Table '.*' already exists", # And CREATE INDEX IF NOT EXISTS. ".*Duplicate key name.*", ]: warnings.filterwarnings("ignore", category=MySQLdb.Warning, message=message) self._connect_args = dict(host=host or config.CONFIG["Mysql.host"], port=port or config.CONFIG["Mysql.port"], user=user or config.CONFIG["Mysql.username"], password=password or config.CONFIG["Mysql.password"], database=database or config.CONFIG["Mysql.database"]) client_key_path = config.CONFIG["Mysql.client_key_path"] if client_key_path is not None: logging.debug("Client key file configured, trying to use SSL.") self._connect_args["client_key_path"] = client_key_path self._connect_args["client_cert_path"] = config.CONFIG[ "Mysql.client_cert_path"] self._connect_args["ca_cert_path"] = config.CONFIG[ "Mysql.ca_cert_path"] _SetupDatabase(**self._connect_args) max_pool_size = config.CONFIG.Get("Mysql.conn_pool_max", 10) self.pool = mysql_pool.Pool(self._Connect, max_size=max_pool_size) self.handler_thread = None self.handler_stop = True self.flow_processing_request_handler_thread = None self.flow_processing_request_handler_stop = None self.flow_processing_request_handler_pool = ( threadpool.ThreadPool.Factory("flow_processing_pool", min_threads=2, max_threads=50)) self.flow_processing_request_handler_pool.Start()