def cli(db_filename, metadata, plugins_path, recreate): """Write out the fixtures database used by Datasette's test suite""" if metadata and not metadata.endswith(".json"): raise click.ClickException("Metadata should end with .json") if not db_filename.endswith(".db"): raise click.ClickException("Database file should end with .db") if pathlib.Path(db_filename).exists(): if not recreate: raise click.ClickException( f"{db_filename} already exists, use --recreate to reset it") else: pathlib.Path(db_filename).unlink() conn = sqlite3.connect(db_filename) conn.executescript(TABLES) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) if supports_generated_columns(): with conn: conn.executescript(GENERATED_COLUMNS_SQL) print(f"Test tables written to {db_filename}") if metadata: open(metadata, "w").write(json.dumps(METADATA, indent=4)) print(f"- metadata written to {metadata}") if plugins_path: path = pathlib.Path(plugins_path) if not path.exists(): path.mkdir() test_plugins = pathlib.Path(__file__).parent / "plugins" for filepath in test_plugins.glob("*.py"): newpath = path / filepath.name newpath.write_text(filepath.open().read()) print(f" Wrote plugin: {newpath}")
async def test_table_names(db): table_names = await db.table_names() assert (table_names == [ "simple_primary_key", "primary_key_multiple_columns", "primary_key_multiple_columns_explicit_label", "compound_primary_key", "compound_three_primary_keys", "foreign_key_references", "sortable", "no_primary_key", "123_starts_with_digits", "Table With Space In Name", "table/with/slashes.csv", "complex_foreign_keys", "custom_foreign_key_label", "units", "tags", "searchable", "searchable_tags", "searchable_fts", "searchable_fts_segments", "searchable_fts_segdir", "searchable_fts_docsize", "searchable_fts_stat", "select", "infinity", "facet_cities", "facetable", "binary_data", "roadside_attractions", "attraction_characteristic", "roadside_attraction_characteristics", ] + (["generated_columns"] if supports_generated_columns() else []))
def make_app_client( sql_time_limit_ms=None, max_returned_rows=None, cors=False, memory=False, config=None, filename="fixtures.db", is_immutable=False, extra_databases=None, inspect_data=None, static_mounts=None, template_dir=None, metadata=None, ): with tempfile.TemporaryDirectory() as tmpdir: filepath = os.path.join(tmpdir, filename) if is_immutable: files = [] immutables = [filepath] else: files = [filepath] immutables = [] conn = sqlite3.connect(filepath) conn.executescript(TABLES) if supports_generated_columns(): conn.executescript(GENERATED_COLUMNS_SQL) for sql, params in TABLE_PARAMETERIZED_SQL: with conn: conn.execute(sql, params) if extra_databases is not None: for extra_filename, extra_sql in extra_databases.items(): extra_filepath = os.path.join(tmpdir, extra_filename) sqlite3.connect(extra_filepath).executescript(extra_sql) files.append(extra_filepath) os.chdir(os.path.dirname(filepath)) config = config or {} config.update({ "default_page_size": 50, "max_returned_rows": max_returned_rows or 100, "sql_time_limit_ms": sql_time_limit_ms or 200, # Default is 3 but this results in "too many open files" # errors when running the full test suite: "num_sql_threads": 1, }) ds = Datasette( files, immutables=immutables, memory=memory, cors=cors, metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, config=config, inspect_data=inspect_data, static_mounts=static_mounts, template_dir=template_dir, ) ds.sqlite_functions.append( ("sleep", 1, lambda n: time.sleep(float(n)))) yield TestClient(ds)