def postgresql_factory(request): """ #. Load required process fixture. #. Get postgresql module and config. #. Connect to postgresql. #. Flush database after tests. :param FixtureRequest request: fixture request object :rtype: psycopg2.connection :returns: postgresql client """ proc_fixture = request.getfuncargvalue(process_fixture_name) psycopg2, config = try_import("psycopg2", request) pg_host = proc_fixture.host pg_port = proc_fixture.port pg_db = db or config.postgresql.db init_postgresql_database(psycopg2, config.postgresql.user, pg_host, pg_port, pg_db) conn = psycopg2.connect(dbname=pg_db, user=config.postgresql.user, host=pg_host, port=pg_port) def drop_database(): conn.close() drop_postgresql_database(psycopg2, config.postgresql.user, pg_host, pg_port, pg_db) request.addfinalizer(drop_database) return conn
def dynamodb_factory(request): """ Connect to the local DynamoDB. :param FixtureRequest request: fixture request object :rtype: Subclass of :py:class:`~boto3.resources.base.ServiceResource` https://boto3.readthedocs.io/en/latest/reference/services/dynamodb.html#DynamoDB.Client :returns: connection to DynamoDB database """ proc_fixture = get_process_fixture(request, process_fixture_name) boto3, config = try_import('boto3', request) dynamo_db = boto3.resource( 'dynamodb', endpoint_url='http://{host}:{port}'.format( host=proc_fixture.host, port=proc_fixture.port ), # these args do not matter (we have to put something to them) region_name='us-east-1', aws_access_key_id='', aws_secret_access_key='', ) # remove all tables request.addfinalizer( lambda: [t.delete() for t in dynamo_db.tables.all()] ) return dynamo_db
def mongodb_factory(request): """ #. Get pymongo module and config. #. Get connection to mongo. #. Drop collections before and after tests. :param FixtureRequest request: fixture request object :rtype: pymongo.connection.Connection :returns: connection to mongo database """ get_process_fixture(request, process_fixture_name) pymongo, config = try_import('pymongo', request) mongo_host = host or config.mongo.host mongo_port = port or config.mongo.port mongo_conn = pymongo.Connection( mongo_host, mongo_port, ) def drop(): for db in mongo_conn.database_names(): for collection_name in mongo_conn[db].collection_names(): if collection_name != 'system.indexes': mongo_conn[db][collection_name].drop() drop() request.addfinalizer(drop) return mongo_conn
def mongodb_factory(request): """ #. Get pymongo module and config. #. Get connection to mongo. #. Drop collections before and after tests. :param FixtureRequest request: fixture request object :rtype: pymongo.connection.Connection :returns: connection to mongo database """ proc_fixture = get_process_fixture(request, process_fixture_name) pymongo, _ = try_import('pymongo', request) mongo_host = proc_fixture.host mongo_port = proc_fixture.port try: client = pymongo.MongoClient except AttributeError: client = pymongo.Connection mongo_conn = client(mongo_host, mongo_port) return mongo_conn
def redisdb_factory(request): """ #. Load required process fixture. #. Get redis module and config. #. Connect to redis. #. Flush database after tests. :param FixtureRequest request: fixture request object :rtype: redis.client.Redis :returns: Redis client """ proc_fixture = get_process_fixture(request, process_fixture_name) redis, config = try_import('redis', request) redis_host = proc_fixture.host redis_port = proc_fixture.port redis_db = db or config.redis.db redis_class = redis.StrictRedis if strict else redis.Redis redis_client = redis_class( redis_host, redis_port, redis_db, decode_responses=True) request.addfinalizer(redis_client.flushall) return redis_client
def redisdb_factory(request): """ #. Load required process fixture. #. Get redis module and config. #. Connect to redis. #. Flush database after tests. :param FixtureRequest request: fixture request object :rtype: redis.client.Redis :returns: Redis client """ get_process_fixture(request, process_fixture_name) redis, config = try_import('redis', request) redis_host = host or config.redis.host redis_port = port or config.redis.port redis_db = db or config.redis.db redis_class = redis.StrictRedis if strict else redis.Redis redis_client = redis_class( redis_host, redis_port, redis_db, decode_responses=True) request.addfinalizer(redis_client.flushall) return redis_client
def rabbitmq_factory(request): """ #. Get module and config. #. Connect to RabbitMQ using the parameters from config. :param TCPExecutor rabbitmq_proc: tcp executor :param FixtureRequest request: fixture request object :rtype: rabbitpy.adapters.blocking_connection.BlockingConnection :returns: instance of :class:`BlockingConnection` """ # load required process fixture process = request.getfuncargvalue(process_fixture_name) rabbitpy, config = try_import('rabbitpy', request) connection = rabbitpy.Connection( 'amqp://{host}:{port}/%2F'.format( host=process.host, port=process.port ) ) def finalizer(): teardown(process, connection) connection.close() request.addfinalizer(finalizer) return connection
def rabbitmq_factory(request): """ #. Get module and config. #. Connect to RabbitMQ using the parameters from config. :param TCPExecutor rabbitmq_proc: tcp executor :param FixtureRequest request: fixture request object :rtype: pika.adapters.blocking_connection.BlockingConnection :returns: instance of :class:`BlockingConnection` """ # load required process fixture process = request.getfuncargvalue(process_fixture_name) pika, config = try_import('pika', request) rabbit_params = pika.connection.ConnectionParameters( host=host or config.rabbit.host, port=port or config.rabbit.port, connection_attempts=3, retry_delay=2, ) try: rabbit_connection = pika.BlockingConnection(rabbit_params) except pika.adapters.blocking_connection.exceptions.ConnectionClosed: print "Be sure that you're connecting rabbitmq-server >= 2.8.4" def finalizer(): teardown(process, rabbit_connection) request.addfinalizer(finalizer) return rabbit_connection
def postgresql_factory(request): """ #. Load required process fixture. #. Get postgresql module and config. #. Connect to postgresql. #. Flush database after tests. :param FixtureRequest request: fixture request object :rtype: psycopg2.connection :returns: postgresql client """ request.getfuncargvalue(process_fixture_name) psycopg2, config = try_import('psycopg2', request) pg_host = host or config.postgresql.unixsocketdir pg_port = port or config.postgresql.port pg_db = db or config.postgresql.db init_postgresql_database(psycopg2, config.postgresql.user, pg_host, pg_port, pg_db) conn = psycopg2.connect(dbname=pg_db, user=config.postgresql.user, host=pg_host, port=pg_port) def drop_database(): conn.close() drop_postgresql_database(psycopg2, config.postgresql.user, pg_host, pg_port, pg_db) request.addfinalizer(drop_database) return conn
def rabbitmq_factory(request): """ #. Get module and config. #. Connect to RabbitMQ using the parameters from config. :param TCPExecutor rabbitmq_proc: tcp executor :param FixtureRequest request: fixture request object :rtype: rabbitpy.adapters.blocking_connection.BlockingConnection :returns: instance of :class:`BlockingConnection` """ # load required process fixture process = request.getfuncargvalue(process_fixture_name) rabbitpy, config = try_import('rabbitpy', request) connection = rabbitpy.Connection( 'amqp://*****:*****@{host}:{port}/%2F'.format(host=process.host, port=process.port)) def finalizer(): teardown(process, connection) connection.close() request.addfinalizer(finalizer) return connection
def apply_database_plumbing(request, postgresql_proc): """Bolt pytest-dbfixtures onto Django to work around its lack of no-setup testing facilities.""" psycopg2, config = try_import('psycopg2', request) settings.DATABASES['default'].update({ 'NAME': config.postgresql.db, 'USER': config.postgresql.user, 'HOST': postgresql_proc.host, 'PORT': postgresql_proc.port, }) init_postgresql_database(psycopg2, config.postgresql.user, postgresql_proc.host, postgresql_proc.port, config.postgresql.db)
def pg_connection(request, postgresql_proc): psycopg2, config = try_import('psycopg2', request) pg_host = postgresql_proc.host pg_port = postgresql_proc.port pg_db = config.postgresql.db init_postgresql_database(psycopg2, config.postgresql.user, pg_host, pg_port, pg_db) apply_migrations(config.postgresql.user, pg_host, pg_port, pg_db) conn = psycopg2.connect(dbname=pg_db, user=config.postgresql.user, host=pg_host, port=pg_port) return conn
def mysql_fixture(request): """ #. Get config. #. Try to import MySQLdb package. #. Connect to mysql server. #. Create database. #. Use proper database. #. Drop database after tests. :param FixtureRequest request: fixture request object :rtype: MySQLdb.connections.Connection :returns: connection to database """ proc_fixture = get_process_fixture(request, process_fixture_name) config = get_config(request) mysql_port = proc_fixture.port mysql_host = proc_fixture.host mysql_user = user or config.mysql.user mysql_passwd = passwd or config.mysql.password mysql_db = db or config.mysql.db unixsocket = '/tmp/mysql.{port}.sock'.format(port=mysql_port) MySQLdb, config = try_import( 'MySQLdb', request, pypi_package='mysqlclient' ) mysql_conn = MySQLdb.connect( host=mysql_host, unix_socket=unixsocket, user=mysql_user, passwd=mysql_passwd, ) mysql_conn.query( '''CREATE DATABASE {name} DEFAULT CHARACTER SET {charset} DEFAULT COLLATE {collation}''' .format( name=mysql_db, charset=charset, collation=collation ) ) mysql_conn.query('USE %s' % mysql_db) def drop_database(): mysql_conn.query('DROP DATABASE IF EXISTS %s' % mysql_db) mysql_conn.close() request.addfinalizer(drop_database) return mysql_conn
def mysqldb_fixture(request): """ #. Get config. #. Try to import MySQLdb package. #. Connect to mysql server. #. Create database. #. Use proper database. #. Drop database after tests. :param FixtureRequest request: fixture request object :rtype: MySQLdb.connections.Connection :returns: connection to database """ get_process_fixture(request, process_fixture_name) config = get_config(request) mysql_port = port or config.mysql.port mysql_host = host or config.mysql.host mysql_user = user or config.mysql.user mysql_passwd = passwd or config.mysql.password mysql_db = db or config.mysql.db unixsocket = '/tmp/mysql.{port}.sock'.format(port=mysql_port) MySQLdb, config = try_import( 'MySQLdb', request, pypi_package='MySQL-python' ) mysql_conn = MySQLdb.connect( host=mysql_host, unix_socket=unixsocket, user=mysql_user, passwd=mysql_passwd, ) mysql_conn.query( '''CREATE DATABASE {name} DEFAULT CHARACTER SET {charset} DEFAULT COLLATE {collation}''' .format( name=mysql_db, charset=charset, collation=collation ) ) mysql_conn.query('USE %s' % mysql_db) def drop_database(): mysql_conn.query('DROP DATABASE IF EXISTS %s' % mysql_db) mysql_conn.close() request.addfinalizer(drop_database) return mysql_conn
def elasticsearch_fixture(request): """Elasticsearch client fixture.""" get_process_fixture(request, process_fixture_name) elasticsearch, _ = try_import('elasticsearch', request) client = elasticsearch.Elasticsearch(hosts=hosts) def drop_indexes(): client.indices.delete(index='*') request.addfinalizer(drop_indexes) return client
def redisdb_factory(request): """ #. Load required process fixture. #. Get redis module and config. #. Connect to redis. #. Flush database after tests. :param FixtureRequest request: fixture request object :rtype: redis.client.Redis :returns: Redis client """ get_process_fixture(request, process_fixture_name) redis, config = try_import('redis', request) redis_host = host or config.redis.host redis_port = port or config.redis.port redis_db = db or config.redis.db redis_client = redis.Redis(redis_host, redis_port, redis_db) request.addfinalizer(redis_client.flushall) return redis_client
def redisdb_factory(request): """ #. Load required process fixture. #. Get redis module and config. #. Connect to redis. #. Flush database after tests. :param FixtureRequest request: fixture request object :rtype: redis.client.Redis :returns: Redis client """ request.getfuncargvalue(process_fixture_name) redis, config = try_import('redis', request) redis_host = host or config.redis.host redis_port = port or config.redis.port redis_db = db or config.redis.db redis_client = redis.Redis(redis_host, redis_port, redis_db) request.addfinalizer(redis_client.flushall) return redis_client
def mongodb_factory(request): """ #. Get pymongo module and config. #. Get connection to mongo. #. Drop collections before and after tests. :param FixtureRequest request: fixture request object :rtype: pymongo.connection.Connection :returns: connection to mongo database """ proc_fixture = get_process_fixture(request, process_fixture_name) pymongo, config = try_import("pymongo", request) mongo_host = proc_fixture.host mongo_port = proc_fixture.port try: client = pymongo.MongoClient except AttributeError: client = pymongo.Connection mongo_conn = client(mongo_host, mongo_port) def drop(): for db in mongo_conn.database_names(): for collection_name in mongo_conn[db].collection_names(): # Do not delete any of Mongo "system" collections if not collection_name.startswith("system."): mongo_conn[db][collection_name].drop() drop() request.addfinalizer(drop) return mongo_conn
def mongodb_factory(request): """ #. Get pymongo module and config. #. Get connection to mongo. #. Drop collections before and after tests. :param FixtureRequest request: fixture request object :rtype: pymongo.connection.Connection :returns: connection to mongo database """ proc_fixture = get_process_fixture(request, process_fixture_name) pymongo, config = try_import('pymongo', request) mongo_host = proc_fixture.host mongo_port = proc_fixture.port try: client = pymongo.MongoClient except AttributeError: client = pymongo.Connection mongo_conn = client(mongo_host, mongo_port) def drop(): for db in mongo_conn.database_names(): for collection_name in mongo_conn[db].collection_names(): # Do not delete any of Mongo "system" collections if not collection_name.startswith('system.'): mongo_conn[db][collection_name].drop() drop() request.addfinalizer(drop) return mongo_conn