def app_client( sql_time_limit_ms=None, max_returned_rows=None, cors=False, config=None, filename="fixtures.db", ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) conn = sqlite3.connect(filepath) conn.executescript(TABLES) os.chdir(os.path.dirname(filepath)) plugins_dir = os.path.join(tmpdir, "plugins") os.mkdir(plugins_dir) open(os.path.join(plugins_dir, "my_plugin.py"), "w").write(PLUGIN1) open(os.path.join(plugins_dir, "my_plugin_2.py"), "w").write(PLUGIN2) config = config or {} config.update( { "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, } ) ds = Datasette( [filepath], cors=cors, metadata=METADATA, plugins_dir=plugins_dir, config=config, ) ds.sqlite_functions.append(("sleep", 1, lambda n: time.sleep(float(n)))) client = TestClient(ds.app().test_client) client.ds = ds yield client
def make_app_client( sql_time_limit_ms=None, max_returned_rows=None, cors=False, memory=False, config=None, filename="fixtures.db", is_immutable=False, extra_databases=None, inspect_data=None, static_mounts=None, template_dir=None, ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) if is_immutable: files = [] immutables = [filepath] else: files = [filepath] immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) if extra_databases is not None: for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) sqlite3.connect(extra_filepath).executescript(extra_sql) files.append(extra_filepath) os.chdir(os.path.dirname(filepath)) plugins_dir = os.path.join(tmpdir, "plugins") os.mkdir(plugins_dir) open(os.path.join(plugins_dir, "my_plugin.py"), "w").write(PLUGIN1) open(os.path.join(plugins_dir, "my_plugin_2.py"), "w").write(PLUGIN2) config = config or {} config.update( { "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, } ) ds = Datasette( files, immutables=immutables, memory=memory, cors=cors, metadata=METADATA, plugins_dir=plugins_dir, config=config, inspect_data=inspect_data, static_mounts=static_mounts, template_dir=template_dir, ) ds.sqlite_functions.append(("sleep", 1, lambda n: time.sleep(float(n)))) client = TestClient(ds.app()) client.ds = ds yield client
def make_app_client( sql_time_limit_ms=None, max_returned_rows=None, cors=False, memory=False, config=None, filename="fixtures.db", is_immutable=False, extra_databases=None, inspect_data=None, static_mounts=None, template_dir=None, metadata=None, ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) if is_immutable: files = [] immutables = [filepath] else: files = [filepath] immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) if extra_databases is not None: for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) sqlite3.connect(extra_filepath).executescript(extra_sql) files.append(extra_filepath) os.chdir(os.path.dirname(filepath)) config = config or {} config.update({ "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, # Default is 3 but this results in "too many open files" # errors when running the full test suite: "num_sql_threads": 1, }) ds = Datasette( files, immutables=immutables, memory=memory, cors=cors, metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, config=config, inspect_data=inspect_data, static_mounts=static_mounts, template_dir=template_dir, ) ds.sqlite_functions.append( ("sleep", 1, lambda n: time.sleep(float(n)))) client = TestClient(ds.app()) client.ds = ds yield client
def test_metadata_yaml(tmp_path_factory, filename): config_dir = tmp_path_factory.mktemp("yaml-config-dir") (config_dir / filename).write_text("title: Title from metadata", "utf-8") ds = Datasette([], config_dir=config_dir) client = _TestClient(ds.app()) client.ds = ds response = client.get("/-/metadata.json") assert 200 == response.status assert {"title": "Title from metadata"} == response.json
def make_app_client(database, metadata=None): ds = Datasette([database], immutables=[], metadata=metadata, template_dir=str( pathlib.Path(datasette_cldf.__file__).parent / 'templates')) client = Client(ds.app()) client.ds = ds return client
async def test_schema_caching(tmp_path_factory, db_path, template, expected): template_dir = tmp_path_factory.mktemp("templates") pages_dir = template_dir / "pages" pages_dir.mkdir() (pages_dir / "about.html").write_text(template) ds = Datasette([db_path], template_dir=template_dir) async with httpx.AsyncClient(app=ds.app()) as client: response = await client.get("http://localhost/about") assert response.status_code == 200 assert response.text.strip() == expected
def config_dir_client(tmp_path_factory): config_dir = tmp_path_factory.mktemp("config-dir") plugins_dir = config_dir / "plugins" plugins_dir.mkdir() (plugins_dir / "hooray.py").write_text(PLUGIN, "utf-8") templates_dir = config_dir / "templates" templates_dir.mkdir() (templates_dir / "row.html").write_text( "Show row here. Plugin says {{ from_plugin }}", "utf-8") static_dir = config_dir / "static" static_dir.mkdir() (static_dir / "hello.css").write_text(CSS, "utf-8") (config_dir / "metadata.json").write_text(json.dumps(METADATA), "utf-8") (config_dir / "config.json").write_text(json.dumps(CONFIG), "utf-8") for dbname in ("demo.db", "immutable.db"): db = sqlite3.connect(str(config_dir / dbname)) db.executescript(""" CREATE TABLE cities ( id integer primary key, name text ); INSERT INTO cities (id, name) VALUES (1, 'San Francisco') ; """) # Mark "immutable.db" as immutable (config_dir / "inspect-data.json").write_text( json.dumps({ "immutable": { "hash": "hash", "size": 8192, "file": "immutable.db", "tables": { "cities": { "count": 1 } }, } }), "utf-8", ) ds = Datasette([], config_dir=config_dir) client = _TestClient(ds.app()) client.ds = ds yield client
async def test_datasette_mask_columns(tmpdir): path = str(tmpdir / "foo.db") db = sqlite_utils.Database(path) db["users"].insert({"id": 1, "password": "******"}) datasette = Datasette([path], memory=True) # Without the plugin: async with httpx.AsyncClient(app=datasette.app()) as client: response = await client.get( "http://localhost/foo/users.json?_shape=array") assert 200 == response.status_code assert [{"rowid": 1, "id": 1, "password": "******"}] == response.json() # The text 'REDACTED' should not show up on the table page html_response = await client.get("http://localhost/foo/users") assert b"REDACTED" not in html_response.content # With the plugin: datasette2 = Datasette( [path], memory=True, metadata={ "databases": { "foo": { "plugins": { "datasette-mask-columns": { "users": ["password"] } } } } }, ) async with httpx.AsyncClient(app=datasette2.app()) as client: response = await client.get( "http://localhost/foo/users.json?_shape=array") assert 200 == response.status_code assert [{"rowid": 1, "id": 1, "password": None}] == response.json() # The text 'REDACTED' SHOULD show up on the table page html_response = await client.get("http://localhost/foo/users") assert b"REDACTED" in html_response.content
def app_client(max_returned_rows=None): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, 'test_tables.h5') populate_file(filepath) ds = Datasette( [filepath], config={ 'default_page_size': 50, 'max_returned_rows': max_returned_rows or 1000, } ) client = ds.app().test_client client.ds = ds yield client
def app_client(): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, 'test_tables.db') conn = sqlite3.connect(filepath) conn.executescript(TABLES) os.chdir(os.path.dirname(filepath)) ds = Datasette( [filepath], page_size=50, max_returned_rows=100, sql_time_limit_ms=20, ) ds.sqlite_functions.append( ('sleep', 1, lambda n: time.sleep(float(n))), ) yield ds.app().test_client
async def test_import_table_multiple_databases(tmpdir): db_path1 = str(tmpdir / "test.db") db_path2 = str(tmpdir / "test2.db") datasette = Datasette([db_path1, db_path2]) cookies = {"ds_actor": datasette.sign({"a": {"id": "root"}}, "actor")} async with httpx.AsyncClient(app=datasette.app()) as client: response = await client.get("http://localhost/-/import-table", cookies=cookies) assert response.status_code == 200 assert "<option>test</option>" in response.text assert "<option>test2</option>" in response.text response2 = await client.get( "http://localhost/-/import-table?database=test2", cookies=cookies) assert response2.status_code == 200 assert '<option selected="selected">test2</option>' in response2.text
async def test_non_matching_authorization_endpoint(httpx_mock): # See https://github.com/simonw/datasette-indieauth/issues/22 httpx_mock.add_response( url="https://simonwillison.net", data= b'<link rel="authorization_endpoint" href="https://indieauth.simonwillison.net/auth">', ) httpx_mock.add_response( url="https://indieauth.simonwillison.net/auth", method="POST", data="me=https%3A%2F%2Fsimonwillison.net%2Fme".encode("utf-8"), ) httpx_mock.add_response( url="https://simonwillison.net/me", data=b'<link rel="authorization_endpoint" href="https://example.com">', ) datasette = Datasette([], memory=True) app = datasette.app() async with httpx.AsyncClient(app=app) as client: csrftoken = await _get_csrftoken(client) # Submit the form post_response = await client.post( "http://localhost/-/indieauth", data={ "csrftoken": csrftoken, "me": "https://simonwillison.net/" }, cookies={"ds_csrftoken": csrftoken}, allow_redirects=False, ) ds_indieauth = post_response.cookies["ds_indieauth"] state = dict( urllib.parse.parse_qsl(post_response.headers["location"].split( "?", 1)[1]))["state"] # ... after redirecting back again response = await client.get( "http://localhost/-/indieauth/done", params={ "state": state, "code": "123", }, cookies={"ds_indieauth": ds_indieauth}, allow_redirects=False, ) # This should be an error because the authorization_endpoint did not match assert ( ""me" value resolves to a different authorization_endpoint" in response.text)
async def test_permissions(tmpdir): path = str(tmpdir / "test.db") ds = Datasette([path]) app = ds.app() async with httpx.AsyncClient(app=app) as client: response = await client.get("http://localhost/-/import-table") assert 403 == response.status_code # Now try with a root actor async with httpx.AsyncClient(app=app) as client2: response2 = await client2.get( "http://localhost/-/import-table", cookies={"ds_actor": ds.sign({"a": { "id": "root" }}, "actor")}, allow_redirects=False, ) assert 403 != response2.status_code
async def test_invalid_ds_indieauth_cookie(bad_cookie): datasette = Datasette([], memory=True) app = datasette.app() state = datasette.sign({"a": "auth-url"}, "datasette-indieauth-state") if isinstance(bad_cookie, dict): ds_indieauth = datasette.sign(bad_cookie, "datasette-indieauth-cookie") else: ds_indieauth = bad_cookie async with httpx.AsyncClient(app=app) as client: response = await client.get( "http://localhost/-/indieauth/done", params={ "state": state, "code": "123", }, cookies={"ds_indieauth": ds_indieauth}, allow_redirects=False, ) assert '<p class="message-error">Invalid ds_indieauth cookie' in response.text
async def test_restrict_access(): datasette = Datasette( [], memory=True, metadata={ "plugins": { "datasette-indieauth": { "restrict_access": "https://simonwillison.net/" } } }, ) app = datasette.app() paths = ("/-/actor.json", "/", "/:memory:", "/-/metadata") async with httpx.AsyncClient(app=app) as client: # All pages should 403 and show login form for path in paths: response = await client.get("http://localhost{}".format(path)) assert response.status_code == 403 assert '<form action="/-/indieauth" method="post">' in response.text assert "simonwillison.net" not in response.text # Now try with a signed ds_actor cookie - everything should 200 cookies = { "ds_actor": datasette.sign( { "a": { "me": "https://simonwillison.net/", "display": "simonwillison.net", } }, "actor", ) } for path in paths: response2 = await client.get( "http://localhost{}".format(path), cookies=cookies, ) assert response2.status_code == 200 assert "simonwillison.net" in response2.text
def app_client(): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, 'test_tables.db') conn = sqlite3.connect(filepath) conn.executescript(TABLES) os.chdir(os.path.dirname(filepath)) plugins_dir = os.path.join(tmpdir, 'plugins') os.mkdir(plugins_dir) open(os.path.join(plugins_dir, 'my_plugin.py'), 'w').write(PLUGIN) ds = Datasette( [filepath], page_size=50, max_returned_rows=100, sql_time_limit_ms=20, metadata=METADATA, plugins_dir=plugins_dir, ) ds.sqlite_functions.append( ('sleep', 1, lambda n: time.sleep(float(n))), ) yield ds.app().test_client
def app(tmpdir): dogs = str(pathlib.Path(tmpdir / "dogs.db")) news = str(pathlib.Path(tmpdir / "news.db")) sqlite_utils.Database(dogs)["dogs"].insert_all([{"name": "Cleo"}]) sqlite_utils.Database(news)["articles"].insert_all([ { "date": "2018-01-01", "headline": "First post" }, { "date": "2018-02-01", "headline": "Post the second" }, ]) ds = Datasette( [dogs, news], immutables=[], template_dir=str(pathlib.Path(__file__).parent / "test_templates"), ) return ds.app()
async def test_invalid_url(httpx_mock): def raise_timeout(request, ext): raise httpx.ReadTimeout(f"HTTP error occurred", request=request) httpx_mock.add_callback(raise_timeout, url="http://invalid") datasette = Datasette([], memory=True) app = datasette.app() async with httpx.AsyncClient(app=app) as client: csrftoken = await _get_csrftoken(client) # Submit the form post_response = await client.post( "http://localhost/-/indieauth", data={ "csrftoken": csrftoken, "me": "invalid" }, cookies={"ds_csrftoken": csrftoken}, allow_redirects=False, ) assert "Invalid IndieAuth identifier: HTTP error occurred" in post_response.text
async def test_indieauth_errors(httpx_mock, me, bodies, expected_error): for url, body in bodies.items(): httpx_mock.add_response( url=url, data=body.encode("utf-8"), ) datasette = Datasette([], memory=True) app = datasette.app() async with httpx.AsyncClient(app=app) as client: csrftoken = await _get_csrftoken(client) # Submit the form post_response = await client.post( "http://localhost/-/indieauth", data={ "csrftoken": csrftoken, "me": me }, cookies={"ds_csrftoken": csrftoken}, allow_redirects=False, ) assert ('<p class="message-error">{}'.format(expected_error) in post_response.text)
def app_client(sql_time_limit_ms=None, max_returned_rows=None): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, 'test_tables.db') conn = sqlite3.connect(filepath) conn.executescript(TABLES) os.chdir(os.path.dirname(filepath)) plugins_dir = os.path.join(tmpdir, 'plugins') os.mkdir(plugins_dir) open(os.path.join(plugins_dir, 'my_plugin.py'), 'w').write(PLUGIN) ds = Datasette( [filepath], metadata=METADATA, plugins_dir=plugins_dir, config={ 'default_page_size': 50, 'max_returned_rows': max_returned_rows or 100, 'sql_time_limit_ms': sql_time_limit_ms or 200, }) ds.sqlite_functions.append( ('sleep', 1, lambda n: time.sleep(float(n))), ) client = ds.app().test_client client.ds = ds yield client
async def test_import_table(tmpdir, httpx_mock): db_path = str(tmpdir / "test.db") httpx_mock.add_response( url="http://example/some/table.json?_shape=objects&_size=max", json={ "table": "mytable", "rows": [{ "foo": "bar" }], "primary_keys": [], "filtered_table_rows_count": 1, "next_url": None, }, headers={"content-type": "application/json"}, ) datasette = Datasette([db_path]) cookies = {"ds_actor": datasette.sign({"a": {"id": "root"}}, "actor")} async with httpx.AsyncClient(app=datasette.app()) as client: response = await client.get("http://localhost/-/import-table", cookies=cookies) assert 200 == response.status_code csrftoken = response.cookies["ds_csrftoken"] cookies["ds_csrftoken"] = csrftoken response = await client.post( "http://localhost/-/import-table", data={ "url": "http://example/some/table", "csrftoken": csrftoken, }, allow_redirects=False, cookies=cookies, ) assert response.status_code == 302 assert response.headers[ "location"] == "/test/mytable?_import_expected_rows=1"
def app_client_no_files(): ds = Datasette([]) client = TestClient(ds.app()) client.ds = ds yield client
async def test_indieauth_flow( httpx_mock, me, auth_response_status, auth_response_body, expected_profile, expected_error, ): httpx_mock.add_response( url=me.rstrip("/"), data= b'<link rel="authorization_endpoint" href="https://indieauth.simonwillison.net/auth">', ) httpx_mock.add_response( url="https://indieauth.simonwillison.net/auth", method="POST", data=auth_response_body.encode("utf-8"), status_code=auth_response_status, ) if not expected_error: httpx_mock.add_response( url="https://indieauth.simonwillison.net/index.php/author/simonw/", method="GET", data= b'<link rel="authorization_endpoint" href="https://indieauth.simonwillison.net/auth">', ) if "indieauth.simonwillison.com" in auth_response_body: httpx_mock.add_response( url="https://indieauth.simonwillison.com", method="GET", data= b'<link rel="authorization_endpoint" href="https://indieauth.simonwillison.net/auth">', ) datasette = Datasette([], memory=True) app = datasette.app() async with httpx.AsyncClient(app=app) as client: # Get CSRF token csrftoken = await _get_csrftoken(client) # Submit the form post_response = await client.post( "http://localhost/-/indieauth", data={ "csrftoken": csrftoken, "me": me }, cookies={"ds_csrftoken": csrftoken}, allow_redirects=False, ) # Should set a cookie and redirect assert post_response.status_code == 302 assert "ds_indieauth" in post_response.cookies ds_indieauth = post_response.cookies["ds_indieauth"] ds_indieauth_bits = datasette.unsign(ds_indieauth, "datasette-indieauth-cookie") verifier = ds_indieauth_bits["v"] assert ds_indieauth_bits["m"] == me # Verify the location is in the right shape location = post_response.headers["location"] assert location.startswith("https://indieauth.simonwillison.net/auth?") querystring = location.split("?", 1)[1] bits = dict(urllib.parse.parse_qsl(querystring)) assert bits["redirect_uri"] == "http://localhost/-/indieauth/done" assert bits["client_id"] == "http://localhost/-/indieauth" assert bits["me"] == me # Next step for user is to redirect to that page, login and redirect back # Simulate the redirect-back part response = await client.get( "http://localhost/-/indieauth/done", params={ "state": bits["state"], "code": "123", }, cookies={"ds_indieauth": ds_indieauth}, allow_redirects=False, ) # This should have made a POST to https://indieauth.simonwillison.net/auth last_post_request = [ r for r in httpx_mock.get_requests() if r.method == "POST" ][-1] post_bits = dict( urllib.parse.parse_qsl(last_post_request.read().decode("utf-8"))) assert post_bits == { "grant_type": "authorization_code", "code": "123", "client_id": "http://localhost/-/indieauth", "redirect_uri": "http://localhost/-/indieauth/done", "code_verifier": verifier, } # Should set cookie for "https://indieauth.simonwillison.net/index.php/author/simonw/" if expected_error: assert response.status_code == 200 assert expected_error in response.text else: assert response.status_code == 302 assert response.headers["location"] assert "ds_actor" in response.cookies expected_actor = { "me": "https://indieauth.simonwillison.net/index.php/author/simonw/", "display": "indieauth.simonwillison.net/index.php/author/simonw/", "indieauth_scope": "email", } expected_actor.update(expected_profile) assert datasette.unsign(response.cookies["ds_actor"], "actor") == { "a": expected_actor }