import pytest from pytest_postgresql import factories as psql_factories from pytest_redis import factories as redis_factories from sanauth.core import sanauth postgresql_my_proc = psql_factories.postgresql_proc() postgresql_my = psql_factories.postgresql('postgresql_my_proc') redis_my_proc = redis_factories.redis_proc() redis_my = redis_factories.redisdb('redis_my_proc') @pytest.fixture def db_settings(request): return dict( user='******', password='', host='127.0.0.1', port=5433, ), dict(address=('127.0.0.1', 6380), minsize=1, maxsize=10) @pytest.fixture def app(db_settings, postgresql_my_proc, postgresql_my, redis_my_proc, redis_my): pg_settings, redis_config = db_settings sanauth_app = sanauth(pg_cfg=pg_settings, r_cfg=redis_config) yield sanauth_app @pytest.fixture def app_fixture(loop, app, test_server):
if os.environ.get("CI"): @pytest.fixture def dummy_fixture(): pass @pytest.fixture def conn_fixture(): return Redis() new_redis_proc = dummy_fixture redis_conn = conn_fixture else: from pytest_redis.factories import redis_proc, redisdb new_redis_proc = redis_proc(executable=path_to_redis()) redis_conn = redisdb("new_redis_proc") def test_invoke_for_each(new_redis_proc, redis_conn): items = {"item_{}".format(i): i for i in range(10)} invoke_for_each(square, items, connection=redis_conn) assert get_aggregate_status(connection=redis_conn) == "queued" Worker([DEFAULT_QUEUE_NAME], connection=redis_conn).work(burst=True) assert get_aggregate_status(connection=redis_conn) == "finished" assert get_results(connection=redis_conn) == { k: x * x for k, x in items.items()
parser.addoption("--redis-save", action="store", dest="redis_save", help=_help_save) parser.addoption("--redis-compression", action="store_true", dest="redis_compression", help=_help_compress) parser.addoption("--redis-rdbchecksum", action="store_true", dest="redis_rdbchecksum", help=_help_rdbchecksum) parser.addoption("--redis-syslog", action="store_true", dest="redis_syslog", help=_help_syslog) parser.addoption("--redis-client-decode", action="store_true", dest="redis_decode", help=_help_decode) parser.addoption("--redis-datadir", action="store", dest="redis_datadir", help=_help_datadir) redis_proc = factories.redis_proc() redis_nooproc = factories.redis_noproc() redisdb = factories.redisdb("redis_proc") # pylint:enable=invalid-name
0x00A1, 0x00AC), (0x00AE, 0x00FF), (0x0100, 0x017F), (0x0180, 0x024F), (0x2C60, 0x2C7F), (0x16A0, 0x16F0), (0x0370, 0x0377), (0x037A, 0x037E), (0x0384, 0x038A), (0x038C, 0x038C)] for c in range(r[0], r[1] + 1) ]): # pragma: no cover return ''.join(random.choice(alphabet) for i in range(length)) redis_proc = redis_factories.redis_proc(executable=which('redis-server')) @pytest.fixture(scope='session', autouse=True) def app(postgresql_proc, redis_proc): opt.define('connect_timeout', default=3, group='httpclient') opt.define('request_timeout', default=3, group='httpclient') opt.define('max_redirects', default=1, group='httpclient') opt.define('youtube', group='app', default={ 'key': 'yt-key', 'secret': 'yt-secret', 'redirect_uri': 'yt.to/auth', 'api_key': 'yt-api-key' })
"pages": [ { "pageid": 39027, "ns": 0, "title": "Mike Tyson", "links": [ {"ns": 0, "title": "Albany, New York"}, {"ns": 0, "title": "Alcoholism"}, ], } ] }, } redis_proc = factories.redis_proc(host="redis", port=6379, logsdir="/tmp") redis_mock_status = factories.redisdb("redis_nooproc", 1) redis_mock_visited = factories.redisdb("redis_nooproc", 2) redis_mock_scores = factories.redisdb("redis_nooproc", 3) redis_mock_traversed = factories.redisdb("redis_nooproc", 4) @pytest.fixture() def celery_mock_find( monkeypatch, celery_app, redis_mock_status, redis_mock_visited, redis_mock_scores, redis_mock_traversed, ):
from splinter import Browser from sqlalchemy import create_engine from onegov_testing.postgresql import Postgresql from uuid import uuid4 from webdriver_manager.chrome import ChromeDriverManager try: from elasticsearch import Elasticsearch except ImportError: def Elasticsearch(*args, **kwargs): assert False, "Elasticsearch is not installed" redis_path = find_executable('redis-server') redis_server = factories.redis_proc(host='127.0.0.1', executable=redis_path) @pytest.fixture(scope='session') def monkeysession(request): mp = MonkeyPatch() yield mp mp.undo() @pytest.fixture(scope='session', autouse=True) def scan_onegov(): import importscan import onegov importscan.scan(onegov, ignore=['.test', '.tests'])
# -*- coding: utf-8 -*- """ Test module for redis.py Created on 28 June 2016 @author: Charlie Lewis, dgrossman, MShel """ import logging from pytest_redis import factories from poseidon.helpers.endpoint import endpoint_factory from poseidon.helpers.redis import PoseidonRedisClient redis_my_proc = factories.redis_proc(port=None) redis_my = factories.redisdb('redis_my_proc') def test_redis_smoke(redis_my, redis_my_proc): logger = logging.getLogger('test') logger.setLevel(logging.DEBUG) prc = PoseidonRedisClient(logger, host='localhost', port=redis_my_proc.port) prc.connect() prc.r.flushall() endpoint = endpoint_factory('foo') endpoint.endpoint_data = { 'tenant': 'foo', 'mac': '00:00:00:00:00:00', 'segment': 'foo',
def kafka_server(kafka_port): _, port = kafka_port return "localhost", port @pytest.fixture def statsd_server(): port = port_for.select_random(None) server = StatsDStub(port=port) server.start() yield server server.stop() postgres_server = pg_factories.postgresql_proc(password="******") redis_server = redis_factories.redis_proc( executable=shutil.which("redis-server"), timeout=3600) KAFKA_BIN = download_kafka() zookeeper_server = make_zookeeper_process( str(KAFKA_BIN / "zookeeper-server-start.sh"), zk_config_template=""" dataDir={zk_data_dir} clientPort={zk_port} maxClientCnxns=0 admin.enableServer=false""", ) kafka_port = make_kafka_server( kafka_bin=str(KAFKA_BIN / "kafka-server-start.sh"), zookeeper_fixture_name="zookeeper_server", )
from xprocess import ProcessStarter from glide import * from glide.extensions.rq import * from ..conftest import noop_fixture from ..test_utils import * # Hack: allows tests to run in CI environment too if redis_running(): print("External Redis server detected") redis_server = noop_fixture else: print("Starting Redis server") redis_server = factories.redis_proc( executable=test_config.get("RedisExecutable", "/usr/bin/redis-server"), host=test_config.get("RedisHost", "localhost"), port=test_config.get("RedisPort", 6379), ) @pytest.fixture(scope="session") def rq_worker(xprocess): class Starter(ProcessStarter): pattern = "Listening on" args = ["rq", "worker", "--with-scheduler"] name = "rq_worker" print("Ensuring RQ worker...") logfile = xprocess.ensure(name, Starter) yield print("Stopping RQ worker.")
from splinter import Browser from sqlalchemy import create_engine from onegov_testing.postgresql import Postgresql from uuid import uuid4 from webdriver_manager.chrome import ChromeDriverManager try: from elasticsearch import Elasticsearch except ImportError: def Elasticsearch(*args, **kwargs): assert False, "Elasticsearch is not installed" redis_path = find_executable('redis-server') redis_server = factories.redis_proc(host='127.0.0.1', executable=redis_path) class HTTPExecutor(HTTPExecutorBase): # Ipmlements https://github.com/ClearcodeHQ/mirakuru/issues/181 def __init__(self, *args, **kwargs): self.method = kwargs.pop('method', 'HEAD') super().__init__(*args, **kwargs) def after_start_check(self): """Check if defined URL returns expected status to a HEAD request.""" try: conn = HTTPConnection(self.host, self.port) conn.request(self.method, self.url.path)
import falcon import pytest import redis from pytest_redis import factories from falcon import testing from api.app.app import api redis_my_proc = factories.redis_proc(port=6379) redis_my = factories.redisdb('redis_my_proc') @pytest.fixture def client(): return testing.TestClient(api) def setup_redis(): r = redis.StrictRedis(host='localhost', port=6379, db=0, decode_responses=True) r.sadd('ip_addresses', '10.0.0.1') r.sadd('ip_addresses', 'None') r.sadd('ip_addresses', '2601:645:8200:a571:18fd:6640:9cd9:10d3') r.sadd('mac_addresses', '00:00:00:00:00:01') r.sadd('mac_addresses', '00:00:00:00:00:02') r.sadd('mac_addresses', '00:00:00:00:00:03') r.hset('p0f_10.0.0.1', mapping={
if os.environ.get("CI"): @pytest.fixture def dummy_fixture(): pass @pytest.fixture def conn_fixture(): return Redis() new_redis_proc = dummy_fixture redis_conn = conn_fixture else: from pytest_redis.factories import redis_proc, redisdb new_redis_proc = redis_proc(executable=path_to_redis(), logsdir="/tmp") redis_conn = redisdb("new_redis_proc") def test_invoke_for_each(new_redis_proc, redis_conn): items = {"item_{}".format(i): i for i in range(10)} invoke_for_each(square, items, connection=redis_conn) assert get_aggregate_status(connection=redis_conn) == "queued" Worker([DEFAULT_QUEUE_NAME], connection=redis_conn).work(burst=True) assert get_aggregate_status(connection=redis_conn) == "finished" assert get_results(connection=redis_conn) == { k: x * x for k, x in items.items()
"""Tests main conftest file.""" import warnings from pytest_redis import factories warnings.filterwarnings("error", category=DeprecationWarning, module="(_pytest|pytest|redis|path|mirakuru).*") # pylint:disable=invalid-name redis_proc2 = factories.redis_proc(port=6381) redis_nooproc2 = factories.redis_noproc(port=6381) redisdb2 = factories.redisdb("redis_proc2") redisdb2_noop = factories.redisdb("redis_nooproc2") # pylint:enable=invalid-name