def test_static_middleware(monkeypatch): WhiteNoise = pretend.call_recorder(lambda app, root, prefix, max_age: app) monkeypatch.setattr( application, "WhiteNoise", WhiteNoise, ) Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) assert WhiteNoise.calls == [ pretend.call( mock.ANY, root=os.path.abspath( os.path.join( os.path.dirname(warehouse.__file__), "static", "compiled", ), ), prefix="/static/", max_age=31557600, ) ]
def test_header_rewrite_middleware(monkeypatch): HeaderRewriterFix = pretend.call_recorder(lambda app, **kw: app) monkeypatch.setattr(application, "HeaderRewriterFix", HeaderRewriterFix) Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) assert HeaderRewriterFix.calls == [ pretend.call( mock.ANY, add_headers=[ ( "X-Powered-By", "Warehouse {__version__} ({__build__})".format( __version__=warehouse.__version__, __build__=warehouse.__build__, ), ), ], ), ]
def test_static_middleware(monkeypatch): SharedDataMiddleware = pretend.call_recorder(lambda app, c: app) monkeypatch.setattr( application, "SharedDataMiddleware", SharedDataMiddleware, ) Warehouse.from_yaml( os.path.abspath( os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) assert SharedDataMiddleware.calls == [ pretend.call( mock.ANY, { "/static/": os.path.abspath( os.path.join( os.path.dirname(warehouse.__file__), "static", ), ), }, ) ]
def test_static_middleware(monkeypatch): SharedDataMiddleware = pretend.call_recorder(lambda app, c: app) monkeypatch.setattr( application, "SharedDataMiddleware", SharedDataMiddleware, ) Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) assert SharedDataMiddleware.calls == [ pretend.call( mock.ANY, { "/static/": os.path.abspath( os.path.join( os.path.dirname(warehouse.__file__), "static", ), ), }, ) ]
def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), )
def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath( os.path.join( os.path.dirname(__file__), "test_config.yml", )), )
def test_guard_middleware(monkeypatch): ContentSecurityPolicy = pretend.call_recorder(lambda app, policy: app) monkeypatch.setattr(guard, "ContentSecurityPolicy", ContentSecurityPolicy) Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) assert ContentSecurityPolicy.calls == [pretend.call(mock.ANY, mock.ANY)]
def test_guard_middleware(monkeypatch): ContentSecurityPolicy = pretend.call_recorder(lambda app, policy: app) monkeypatch.setattr(guard, "ContentSecurityPolicy", ContentSecurityPolicy) Warehouse.from_yaml( os.path.abspath( os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) assert ContentSecurityPolicy.calls == [pretend.call(mock.ANY, mock.ANY)]
def test_guard_middleware_theme_debug(monkeypatch): ContentSecurityPolicy = pretend.call_recorder(lambda app, policy: app) monkeypatch.setattr(guard, "ContentSecurityPolicy", ContentSecurityPolicy) Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), override={"theme_debug": True}, ) assert ContentSecurityPolicy.calls == []
def test_guard_middleware_theme_debug(monkeypatch): ContentSecurityPolicy = pretend.call_recorder(lambda app, policy: app) monkeypatch.setattr(guard, "ContentSecurityPolicy", ContentSecurityPolicy) Warehouse.from_yaml( os.path.abspath( os.path.join( os.path.dirname(__file__), "test_config.yml", )), override={"theme_debug": True}, ) assert ContentSecurityPolicy.calls == []
def dbapp(database, _database): from warehouse.application import Warehouse return Warehouse.from_yaml( override={"database": {"url": _database}}, engine=database, )
def _database_url(request): from warehouse.application import Warehouse def _get_name(): tag = "".join( random.choice(string.ascii_lowercase + string.digits) for x in range(7) ) return "warehousetest_{}".format(tag) def _check_name(engine, name): with engine.connect() as conn: results = conn.execute( "SELECT datname FROM pg_database WHERE datistemplate = false" ) return name not in [r[0] for r in results] database_url_default = 'postgresql://localhost/test_warehouse' database_url_environ = os.environ.get("WAREHOUSE_DATABASE_URL") database_url_option = request.config.getvalue("database_url") if (not database_url_default and not database_url_environ and not database_url_option): pytest.skip("No database provided") # Configure our engine so that we can empty the database database_url = ( database_url_option or database_url_environ or database_url_default ) # Create the database schema engine = sqlalchemy.create_engine( database_url, poolclass=sqlalchemy.pool.NullPool, ) app = Warehouse.from_yaml( override={ "database": { "url": database_url, }, "search": {"hosts": []}, }, engine=engine, redis=False, ) with app.engine.connect() as conn: conn.execute("DROP SCHEMA public CASCADE") conn.execute("CREATE SCHEMA public") conn.execute("CREATE EXTENSION IF NOT EXISTS citext") conn.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"') alembic_cfg = alembic.config.Config() alembic_cfg.set_main_option( "script_location", "warehouse:migrations", ) alembic_cfg.set_main_option("url", app.config.database.url) alembic.command.upgrade(alembic_cfg, "head") engine.dispose() return database_url
def app(): from warehouse.application import Warehouse def connect(): raise RuntimeError( "Cannot access the database through the app fixture") return Warehouse.from_yaml( override={ "site": { "access_token": "testing", "hosts": "localhost", }, "database": { "url": "postgresql:///nonexistent" }, "redis": { "downloads": "redis://nonexistent/0", "sessions": "redis://nonexistent/0", }, "search": { "hosts": [] }, }, engine=pretend.stub(connect=connect, execute=connect), redis_class=ErrorRedis, )
def app(): from warehouse.application import Warehouse def connect(): raise RuntimeError( "Cannot access the database through the app fixture" ) engine = pretend.stub(connect=connect) return Warehouse.from_yaml( override={ "site": { "access_token": "testing", "hosts": "localhost", }, "database": {"url": "postgresql:///nonexistant"}, "redis": { "downloads": "redis://nonexistant/0", "sessions": "redis://nonexistant/0", }, "search": {"hosts": []}, }, engine=engine, redis_class=ErrorRedis, )
def test_camo_settings(monkeypatch): ContentSecurityPolicy = pretend.call_recorder(lambda app, policy: app) monkeypatch.setattr(guard, "ContentSecurityPolicy", ContentSecurityPolicy) Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), override={"camo": {"url": "https://camo.example.com/", "key": "skey"}}, ) assert ContentSecurityPolicy.calls == [pretend.call(mock.ANY, mock.ANY)] assert set(ContentSecurityPolicy.calls[0].args[1]["img-src"]) == { "'self'", "https://camo.example.com", "https://secure.gravatar.com", }
def test_shared_static(): app = Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), override={"debug": True}, ) assert isinstance(app.wsgi_app, SharedDataMiddleware)
def test_sentry_middleware(monkeypatch): Sentry = pretend.call_recorder(lambda app, client: app) client_obj = pretend.stub() Client = pretend.call_recorder(lambda **kw: client_obj) monkeypatch.setattr(application, "Sentry", Sentry) monkeypatch.setattr(application, "Client", Client) Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), override={"sentry": {"dsn": "http://*****:*****@example.com/1"}} ) assert Sentry.calls == [pretend.call(mock.ANY, client_obj)] assert Client.calls == [ pretend.call(dsn="http://*****:*****@example.com/1"), ]
def dbapp(database, _database_url): from warehouse.application import Warehouse return Warehouse.from_yaml( override={ "database": {"url": _database_url}, "search": {"hosts": []}, }, engine=database, redis=False, )
def dbapp(engine): from warehouse.application import Warehouse return Warehouse.from_yaml( override={ "site": {"access_token": "testing", "hosts": "localhost"}, "redis": {"downloads": "redis://nonexistant/0", "sessions": "redis://nonexistant/0"}, "search": {"hosts": []}, }, engine=engine, redis_class=ErrorRedis, )
def app(): from warehouse.application import Warehouse def connect(): raise RuntimeError( "Cannot access the database through the app fixture" ) engine = pretend.stub(connect=connect) return Warehouse.from_yaml( override={"database": {"url": "postgresql:///nonexistant"}}, engine=engine, )
def dbapp(database, _database_url): from warehouse.application import Warehouse return Warehouse.from_yaml( override={ "database": { "url": _database_url }, "search": { "hosts": [] }, }, engine=database, redis=False, )
def dbapp(database, engine): from warehouse.application import Warehouse return Warehouse.from_yaml( override={ "site": {"hosts": "localhost"}, "database": {"url": database}, "redis": { "downloads": "redis://nonexistant/0", "sessions": "redis://nonexistant/0", }, "search": {"hosts": []}, }, engine=engine, redis_class=ErrorRedis, )
def test_passlib_context(): app = Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) assert app.passlib.to_dict() == { "schemes": [ "bcrypt_sha256", "bcrypt", "django_bcrypt", "unix_disabled", ], "default": "bcrypt_sha256", "deprecated": ["auto"], }
def dbapp(engine): from warehouse.application import Warehouse return Warehouse.from_yaml( override={ "site": { "access_token": "testing", "hosts": "localhost", }, "redis": { "downloads": "redis://nonexistent/0", "sessions": "redis://nonexistent/0", }, "search": { "hosts": [] }, }, engine=engine, redis_class=ErrorRedis, )
def app(): from warehouse.application import Warehouse def connect(): raise RuntimeError( "Cannot access the database through the app fixture") engine = pretend.stub(connect=connect) return Warehouse.from_yaml( override={ "database": { "url": "postgresql:///nonexistant" }, "search": { "hosts": [] }, }, engine=engine, redis=False, )
def _database_url(request): from warehouse.application import Warehouse def _get_name(): tag = "".join( random.choice(string.ascii_lowercase + string.digits) for x in range(7)) return "warehousetest_{}".format(tag) def _check_name(engine, name): with engine.connect() as conn: results = conn.execute( "SELECT datname FROM pg_database WHERE datistemplate = false") return name not in [r[0] for r in results] database_url_default = 'postgresql://localhost/test_warehouse' database_url_environ = os.environ.get("WAREHOUSE_DATABASE_URL") database_url_option = request.config.getvalue("database_url") if (not database_url_default and not database_url_environ and not database_url_option): pytest.skip("No database provided") # Configure our engine so that we can create a database database_url = (database_url_option or database_url_environ or database_url_default) engine = sqlalchemy.create_engine(database_url, isolation_level="AUTOCOMMIT", poolclass=sqlalchemy.pool.NullPool) # Make a random database name that doesn't exist name = _get_name() while not _check_name(engine, name): name = _get_name() # Create the database with engine.connect() as conn: conn.execute("CREATE DATABASE {} ENCODING 'UTF8'".format(name)) # Create a new database_url with the name replaced parsed = urllib_parse.urlparse(database_url) test_database_url = urllib_parse.urlunparse(parsed[:2] + ("/" + name, ) + parsed[3:]) # Create the database schema test_engine = sqlalchemy.create_engine( test_database_url, poolclass=sqlalchemy.pool.NullPool, ) app = Warehouse.from_yaml( override={ "database": { "url": test_database_url }, "search": { "hosts": [] }, }, engine=test_engine, redis=False, ) with app.engine.connect() as conn: conn.execute("CREATE EXTENSION IF NOT EXISTS citext") alembic_cfg = alembic.config.Config() alembic_cfg.set_main_option( "script_location", app.config.database.migrations, ) alembic_cfg.set_main_option("url", app.config.database.url) alembic.command.upgrade(alembic_cfg, "head") test_engine.dispose() # Drop the database at the end of the session def _drop_database(): with engine.connect() as conn: # Terminate all open connections to the test database conn.execute( """SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = %s """, [name], ) conn.execute("DROP DATABASE {}".format(name)) request.addfinalizer(_drop_database) return test_database_url
def _database_url(request): from warehouse.application import Warehouse def _get_name(): tag = "".join( random.choice(string.ascii_lowercase + string.digits) for x in range(7) ) return "warehousetest_{}".format(tag) def _check_name(engine, name): with engine.connect() as conn: results = conn.execute( "SELECT datname FROM pg_database WHERE datistemplate = false" ) return name not in [r[0] for r in results] database_url_default = 'postgresql://localhost/test_warehouse' database_url_environ = os.environ.get("WAREHOUSE_DATABASE_URL") database_url_option = request.config.getvalue("database_url") if (not database_url_default and not database_url_environ and not database_url_option): pytest.skip("No database provided") # Configure our engine so that we can create a database database_url = ( database_url_option or database_url_environ or database_url_default ) engine = sqlalchemy.create_engine( database_url, isolation_level="AUTOCOMMIT", poolclass=sqlalchemy.pool.NullPool ) # Make a random database name that doesn't exist name = _get_name() while not _check_name(engine, name): name = _get_name() # Create the database with engine.connect() as conn: conn.execute("CREATE DATABASE {} ENCODING 'UTF8'".format(name)) # Create a new database_url with the name replaced parsed = urllib_parse.urlparse(database_url) test_database_url = urllib_parse.urlunparse( parsed[:2] + ("/" + name,) + parsed[3:] ) # Create the database schema test_engine = sqlalchemy.create_engine( test_database_url, poolclass=sqlalchemy.pool.NullPool, ) app = Warehouse.from_yaml( override={ "database": {"url": test_database_url}, "search": {"hosts": []}, }, engine=test_engine, redis=False, ) with app.engine.connect() as conn: conn.execute("CREATE EXTENSION IF NOT EXISTS citext") alembic_cfg = alembic.config.Config() alembic_cfg.set_main_option( "script_location", app.config.database.migrations, ) alembic_cfg.set_main_option("url", app.config.database.url) alembic.command.upgrade(alembic_cfg, "head") test_engine.dispose() # Drop the database at the end of the session def _drop_database(): with engine.connect() as conn: # Terminate all open connections to the test database conn.execute( """SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = %s """, [name], ) conn.execute("DROP DATABASE {}".format(name)) request.addfinalizer(_drop_database) return test_database_url