def make_app_client( sql_time_limit_ms=None, max_returned_rows=None, cors=False, memory=False, config=None, filename="fixtures.db", is_immutable=False, extra_databases=None, inspect_data=None, static_mounts=None, template_dir=None, metadata=None, ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) if is_immutable: files = [] immutables = [filepath] else: files = [filepath] immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) if extra_databases is not None: for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) sqlite3.connect(extra_filepath).executescript(extra_sql) files.append(extra_filepath) os.chdir(os.path.dirname(filepath)) config = config or {} config.update( { "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, # Default is 3 but this results in "too many open files" # errors when running the full test suite: "num_sql_threads": 1, } ) ds = Datasette( files, immutables=immutables, memory=memory, cors=cors, metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, config=config, inspect_data=inspect_data, static_mounts=static_mounts, template_dir=template_dir, ) ds.sqlite_functions.append(("sleep", 1, lambda n: time.sleep(float(n)))) client = TestClient(ds.app()) client.ds = ds yield client
def make_app_client( sql_time_limit_ms=None, max_returned_rows=None, cors=False, memory=False, config=None, filename="fixtures.db", is_immutable=False, extra_databases=None, inspect_data=None, static_mounts=None, template_dir=None, ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) if is_immutable: files = [] immutables = [filepath] else: files = [filepath] immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) if extra_databases is not None: for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) sqlite3.connect(extra_filepath).executescript(extra_sql) files.append(extra_filepath) os.chdir(os.path.dirname(filepath)) plugins_dir = os.path.join(tmpdir, "plugins") os.mkdir(plugins_dir) open(os.path.join(plugins_dir, "my_plugin.py"), "w").write(PLUGIN1) open(os.path.join(plugins_dir, "my_plugin_2.py"), "w").write(PLUGIN2) config = config or {} config.update( { "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, } ) ds = Datasette( files, immutables=immutables, memory=memory, cors=cors, metadata=METADATA, plugins_dir=plugins_dir, config=config, inspect_data=inspect_data, static_mounts=static_mounts, template_dir=template_dir, ) ds.sqlite_functions.append(("sleep", 1, lambda n: time.sleep(float(n)))) client = TestClient(ds.app()) client.ds = ds yield client
async def test_execute_write_fn_connection_exception(tmpdir, app_client): path = str(tmpdir / "immutable.db") sqlite3.connect(path).execute("vacuum") db = Database(app_client.ds, path=path, is_mutable=False) app_client.ds.add_database("immutable-db", db) def write_fn(conn): assert False with pytest.raises(AssertionError): await db.execute_write_fn(write_fn, block=True) app_client.ds.remove_database("immutable-db")
def app_client( sql_time_limit_ms=None, max_returned_rows=None, cors=False, config=None, filename="fixtures.db", ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) conn = sqlite3.connect(filepath) conn.executescript(TABLES) os.chdir(os.path.dirname(filepath)) plugins_dir = os.path.join(tmpdir, "plugins") os.mkdir(plugins_dir) open(os.path.join(plugins_dir, "my_plugin.py"), "w").write(PLUGIN1) open(os.path.join(plugins_dir, "my_plugin_2.py"), "w").write(PLUGIN2) config = config or {} config.update( { "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, } ) ds = Datasette( [filepath], cors=cors, metadata=METADATA, plugins_dir=plugins_dir, config=config, ) ds.sqlite_functions.append(("sleep", 1, lambda n: time.sleep(float(n)))) client = TestClient(ds.app().test_client) client.ds = ds yield client
def cli(db_filename, metadata, plugins_path, recreate): "Write out the fixtures database used by Datasette's test suite" if metadata and not metadata.endswith(".json"): raise click.ClickException("Metadata should end with .json") if not db_filename.endswith(".db"): raise click.ClickException("Database file should end with .db") if pathlib.Path(db_filename).exists(): if not recreate: raise click.ClickException( "{} already exists, use --recreate to reset it".format( db_filename)) else: pathlib.Path(db_filename).unlink() conn = sqlite3.connect(db_filename) conn.executescript(TABLES) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) print("Test tables written to {}".format(db_filename)) if metadata: open(metadata, "w").write(json.dumps(METADATA, indent=4)) print("- metadata written to {}".format(metadata)) if plugins_path: path = pathlib.Path(plugins_path) if not path.exists(): path.mkdir() test_plugins = pathlib.Path(__file__).parent / "plugins" for filepath in test_plugins.glob("*.py"): newpath = path / filepath.name newpath.write_text(filepath.open().read()) print(" Wrote plugin: {}".format(newpath))
def view_names_client(tmp_path_factory): tmpdir = tmp_path_factory.mktemp("test-view-names") templates = tmpdir / "templates" templates.mkdir() plugins = tmpdir / "plugins" plugins.mkdir() for template in ( "index.html", "database.html", "table.html", "row.html", "show_json.html", "query.html", ): (templates / template).write_text("view_name:{{ view_name }}", "utf-8") (plugins / "extra_vars.py").write_text( textwrap.dedent(""" from datasette import hookimpl @hookimpl def extra_template_vars(view_name): return {"view_name": view_name} """), "utf-8", ) db_path = str(tmpdir / "fixtures.db") conn = sqlite3.connect(db_path) conn.executescript(TABLES) return _TestClient( Datasette([db_path], template_dir=str(templates), plugins_dir=str(plugins)).app())
def test_plugins_async_template_function(restore_working_directory): for client in make_app_client( template_dir=str(pathlib.Path(__file__).parent / "test_templates")): response = client.get("/-/metadata") assert response.status == 200 extra_from_awaitable_function = (Soup( response.body, "html.parser").select("pre.extra_from_awaitable_function")[0].text) expected = (sqlite3.connect(":memory:").execute( "select sqlite_version()").fetchone()[0]) assert expected == extra_from_awaitable_function
'INSERT INTO compound_three_primary_keys VALUES ("{a}", "{b}", "{c}", "{content}");'.format( a=a, b=b, c=c, content=content ) for a, b, c, content in generate_compound_rows(1001) ]) + '\n'.join([ '''INSERT INTO sortable VALUES ( "{pk1}", "{pk2}", "{content}", {sortable}, {sortable_with_nulls}, {sortable_with_nulls_2}, "{text}"); '''.format( **row ).replace('None', 'null') for row in generate_sortable_rows(201) ]) if __name__ == '__main__': # Can be called with data.db OR data.db metadata.json db_filename = sys.argv[-1] metadata_filename = None if db_filename.endswith(".json"): metadata_filename = db_filename db_filename = sys.argv[-2] if db_filename.endswith(".db"): conn = sqlite3.connect(db_filename) conn.executescript(TABLES) print("Test tables written to {}".format(db_filename)) if metadata_filename: open(metadata_filename, 'w').write(json.dumps(METADATA)) print("- metadata written to {}".format(metadata_filename)) else: print("Usage: {} db_to_write.db [metadata_to_write.json]".format( sys.argv[0] ))
def ds_instance(): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, "fixtures.db") conn = sqlite3.connect(filepath) conn.executescript(TABLES) yield Datasette([filepath])