def make_app_client( sql_time_limit_ms=None, max_returned_rows=None, cors=False, memory=False, settings=None, filename="fixtures.db", is_immutable=False, extra_databases=None, inspect_data=None, static_mounts=None, template_dir=None, metadata=None, crossdb=False, ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) if is_immutable: files = [] immutables = [filepath] else: files = [filepath] immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) if extra_databases is not None: for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) sqlite3.connect(extra_filepath).executescript(extra_sql) # Insert at start to help test /-/databases ordering: files.insert(0, extra_filepath) os.chdir(os.path.dirname(filepath)) settings = settings or {} for key, value in { "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, # Default is 3 but this results in "too many open files" # errors when running the full test suite: "num_sql_threads": 1, }.items(): if key not in settings: settings[key] = value ds = Datasette( files, immutables=immutables, memory=memory, cors=cors, metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, settings=settings, inspect_data=inspect_data, static_mounts=static_mounts, template_dir=template_dir, crossdb=crossdb, ) yield TestClient(ds)
def make_app_client( sql_time_limit_ms=None, max_returned_rows=None, cors=False, memory=False, config=None, filename="fixtures.db", is_immutable=False, extra_databases=None, inspect_data=None, static_mounts=None, template_dir=None, metadata=None, ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) if is_immutable: files = [] immutables = [filepath] else: files = [filepath] immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) if supports_generated_columns(): conn.executescript(GENERATED_COLUMNS_SQL) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) if extra_databases is not None: for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) sqlite3.connect(extra_filepath).executescript(extra_sql) files.append(extra_filepath) os.chdir(os.path.dirname(filepath)) config = config or {} config.update({ "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, # Default is 3 but this results in "too many open files" # errors when running the full test suite: "num_sql_threads": 1, }) ds = Datasette( files, immutables=immutables, memory=memory, cors=cors, metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, config=config, inspect_data=inspect_data, static_mounts=static_mounts, template_dir=template_dir, ) ds.sqlite_functions.append( ("sleep", 1, lambda n: time.sleep(float(n)))) yield TestClient(ds)
def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir): "'datasette db.db db.db' should only attach one database, /db" runner = CliRunner() db_path = str(tmpdir / "db.db") sqlite3.connect(db_path).execute("vacuum") result = runner.invoke(cli, [db_path, db_path, "--get", "/-/databases.json"]) assert result.exit_code == 0, result.output databases = json.loads(result.output) assert {db["name"] for db in databases} == {"db"}
def test_serve_duplicate_database_names(ensure_eventloop, tmpdir): runner = CliRunner() db_1_path = str(tmpdir / "db.db") nested = tmpdir / "nested" nested.mkdir() db_2_path = str(tmpdir / "nested" / "db.db") for path in (db_1_path, db_2_path): sqlite3.connect(path).execute("vacuum") result = runner.invoke(cli, [db_1_path, db_2_path, "--get", "/-/databases.json"]) assert result.exit_code == 0, result.output databases = json.loads(result.output) assert {db["name"] for db in databases} == {"db", "db_2"}
async def test_execute_write_fn_connection_exception(tmpdir, app_client): path = str(tmpdir / "immutable.db") sqlite3.connect(path).execute("vacuum") db = Database(app_client.ds, path=path, is_mutable=False) app_client.ds.add_database(db, name="immutable-db") def write_fn(conn): assert False with pytest.raises(AssertionError): await db.execute_write_fn(write_fn, block=True) app_client.ds.remove_database("immutable-db")
def view_names_client(tmp_path_factory): tmpdir = tmp_path_factory.mktemp("test-view-names") templates = tmpdir / "templates" templates.mkdir() plugins = tmpdir / "plugins" plugins.mkdir() for template in ( "index.html", "database.html", "table.html", "row.html", "show_json.html", "query.html", ): (templates / template).write_text("view_name:{{ view_name }}", "utf-8") (plugins / "extra_vars.py").write_text( textwrap.dedent(""" from datasette import hookimpl @hookimpl def extra_template_vars(view_name): return {"view_name": view_name} """), "utf-8", ) db_path = str(tmpdir / "fixtures.db") conn = sqlite3.connect(db_path) conn.executescript(TABLES) return _TestClient( Datasette([db_path], template_dir=str(templates), plugins_dir=str(plugins)))
def test_weird_database_names(ensure_eventloop, tmpdir, filename): # https://github.com/simonw/datasette/issues/1181 runner = CliRunner() db_path = str(tmpdir / filename) sqlite3.connect(db_path).execute("vacuum") result1 = runner.invoke(cli, [db_path, "--get", "/"]) assert result1.exit_code == 0, result1.output filename_no_stem = filename.rsplit(".", 1)[0] expected_link = '<a href="/{}">{}</a>'.format( urllib.parse.quote(filename_no_stem), filename_no_stem) assert expected_link in result1.output # Now try hitting that database page result2 = runner.invoke( cli, [db_path, "--get", "/{}".format(urllib.parse.quote(filename_no_stem))]) assert result2.exit_code == 0, result2.output
def cli(db_filename, metadata, plugins_path, recreate): """Write out the fixtures database used by Datasette's test suite""" if metadata and not metadata.endswith(".json"): raise click.ClickException("Metadata should end with .json") if not db_filename.endswith(".db"): raise click.ClickException("Database file should end with .db") if pathlib.Path(db_filename).exists(): if not recreate: raise click.ClickException( f"{db_filename} already exists, use --recreate to reset it") else: pathlib.Path(db_filename).unlink() conn = sqlite3.connect(db_filename) conn.executescript(TABLES) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) if supports_generated_columns(): with conn: conn.executescript(GENERATED_COLUMNS_SQL) print(f"Test tables written to {db_filename}") if metadata: open(metadata, "w").write(json.dumps(METADATA, indent=4)) print(f"- metadata written to {metadata}") if plugins_path: path = pathlib.Path(plugins_path) if not path.exists(): path.mkdir() test_plugins = pathlib.Path(__file__).parent / "plugins" for filepath in test_plugins.glob("*.py"): newpath = path / filepath.name newpath.write_text(filepath.open().read()) print(f" Wrote plugin: {newpath}")
def test_table_columns(): conn = sqlite3.connect(":memory:") conn.executescript( """ create table places (id integer primary key, name text, bob integer) """ ) assert ["id", "name", "bob"] == utils.table_columns(conn, "places")
def test_plugins_async_template_function(restore_working_directory): with make_app_client( template_dir=str(pathlib.Path(__file__).parent / "test_templates")) as client: response = client.get("/-/metadata") assert response.status == 200 extra_from_awaitable_function = (Soup( response.body, "html.parser").select("pre.extra_from_awaitable_function")[0].text) expected = (sqlite3.connect(":memory:").execute( "select sqlite_version()").fetchone()[0]) assert expected == extra_from_awaitable_function
def config_dir_client(tmp_path_factory): config_dir = tmp_path_factory.mktemp("config-dir") plugins_dir = config_dir / "plugins" plugins_dir.mkdir() (plugins_dir / "hooray.py").write_text(PLUGIN, "utf-8") (plugins_dir / "non_py_file.txt").write_text(PLUGIN, "utf-8") (plugins_dir / ".mypy_cache").mkdir() templates_dir = config_dir / "templates" templates_dir.mkdir() (templates_dir / "row.html").write_text( "Show row here. Plugin says {{ from_plugin }}", "utf-8" ) static_dir = config_dir / "static" static_dir.mkdir() (static_dir / "hello.css").write_text(CSS, "utf-8") (config_dir / "metadata.json").write_text(json.dumps(METADATA), "utf-8") (config_dir / "settings.json").write_text(json.dumps(SETTINGS), "utf-8") for dbname in ("demo.db", "immutable.db"): db = sqlite3.connect(str(config_dir / dbname)) db.executescript( """ CREATE TABLE cities ( id integer primary key, name text ); INSERT INTO cities (id, name) VALUES (1, 'San Francisco') ; """ ) # Mark "immutable.db" as immutable (config_dir / "inspect-data.json").write_text( json.dumps( { "immutable": { "hash": "hash", "size": 8192, "file": "immutable.db", "tables": {"cities": {"count": 1}}, } } ), "utf-8", ) ds = Datasette([], config_dir=config_dir) yield _TestClient(ds)
async def test_initial_path_for_datasette(tmp_path_factory, dbs, expected_path): db_dir = tmp_path_factory.mktemp("dbs") one_table = str(db_dir / "one.db") sqlite3.connect(one_table).execute("create table one (id integer primary key)") two_tables = str(db_dir / "two.db") sqlite3.connect(two_tables).execute("create table two (id integer primary key)") sqlite3.connect(two_tables).execute("create table three (id integer primary key)") datasette = Datasette( [{"one_table": one_table, "two_tables": two_tables}[db] for db in dbs] ) path = await utils.initial_path_for_datasette(datasette) assert path == expected_path
def test_check_connection_passes(): conn = sqlite3.connect(":memory:") utils.check_connection(conn)
def test_check_connection_spatialite_raises(): path = str(pathlib.Path(__file__).parent / "spatialite.db") conn = sqlite3.connect(path) with pytest.raises(utils.SpatialiteConnectionProblem): utils.check_connection(conn)