async def test_database_memory_name(app_client): ds = app_client.ds foo1 = ds.add_database(Database(ds, memory_name="foo")) foo2 = ds.add_database(Database(ds, memory_name="foo")) bar1 = ds.add_database(Database(ds, memory_name="bar")) bar2 = ds.add_database(Database(ds, memory_name="bar")) for db in (foo1, foo2, bar1, bar2): table_names = await db.table_names() assert table_names == [] # Now create a table in foo await foo1.execute_write("create table foo (t text)", block=True) assert await foo1.table_names() == ["foo"] assert await foo2.table_names() == ["foo"] assert await bar1.table_names() == [] assert await bar2.table_names() == []
async def ds_tiles(metadata=None): datasette = Datasette([], metadata=metadata or {}, memory=True) for db_name, tiles in ( ("world", [[2, 1, 1]]), ("country", [[2, 2, 1], [2, 2, 2]]), ("city1", [[2, 2, 1]]), ("city2", [[2, 3, 3]]), ): db = datasette.add_database(Database(datasette, memory_name=db_name)) # During test runs database tables may exist already if await db.table_exists("tiles"): continue await db.execute_write(CREATE_TILES_TABLE, block=True) await db.execute_write(CREATE_METADATA_TABLE, block=True) for pair in (("name", db_name), ("format", "png")): await db.execute_write( "insert into metadata (name, value) values (?, ?)", pair, block=True, ) for tile in tiles: await db.execute_write( "insert into tiles (zoom_level, tile_column, tile_row, tile_data) values (?, ?, ?, ?)", tile + ["tms:{}:{}".format(db_name, "/".join(map(str, tile)))], block=True, ) await datasette.invoke_startup() return datasette
def get_status_database(datasette, plugin_config): """ Get a database, based on the `database` plugin setting (or the first mutable DB in the list of databases) to read from. """ # NOTE: This does not create the DB if it doesn't exist! Use # the import endpoint first. database = plugin_config.get("status_database") if not database: # For the moment just use the first database that's not immutable database = [ name for name, db in datasette.databases.items() if db.is_mutable and not database ][0] try: return datasette.databases[database] except KeyError: pass database_path = os.path.join(get_dbpath(plugin_config), f"{database}.db") sqlite3.connect(database_path) datasette.add_database( Database(datasette, path=database_path, is_mutable=True), name=database, ) return datasette.databases[database]
async def test_in_memory_databases_forbid_writes(app_client): ds = app_client.ds db = ds.add_database(Database(ds, memory_name="test")) with pytest.raises(sqlite3.OperationalError): await db.execute("create table foo (t text)") assert await db.table_names() == [] # Using db.execute_write() should work: await db.execute_write("create table foo (t text)", block=True) assert await db.table_names() == ["foo"]
async def test_detect_mtiles_databases(i, create_table, should_be_mtiles): datasette = Datasette([]) name = "db_{}".format(i) db = datasette.add_database(Database(datasette, memory_name=name)) if create_table: await db.execute_write(create_table, block=True) result = await detect_mtiles_databases(datasette) expected = [name] if should_be_mtiles else [] assert result == expected
async def test_facet_size(): ds = Datasette([], memory=True, settings={"max_returned_rows": 50}) db = ds.add_database(Database(ds, memory_name="test_facet_size")) await db.execute_write( "create table neighbourhoods(city text, neighbourhood text)", block=True ) for i in range(1, 51): for j in range(1, 4): await db.execute_write( "insert into neighbourhoods (city, neighbourhood) values (?, ?)", ["City {}".format(i), "Neighbourhood {}".format(j)], block=True, ) response = await ds.client.get("/test_facet_size/neighbourhoods.json") data = response.json() assert data["suggested_facets"] == [ { "name": "neighbourhood", "toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet=neighbourhood", } ] # Bump up _facet_size= to suggest city too response2 = await ds.client.get( "/test_facet_size/neighbourhoods.json?_facet_size=50" ) data2 = response2.json() assert sorted(data2["suggested_facets"], key=lambda f: f["name"]) == [ { "name": "city", "toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=city", }, { "name": "neighbourhood", "toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=neighbourhood", }, ] # Facet by city should return expected number of results response3 = await ds.client.get( "/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=city" ) data3 = response3.json() assert len(data3["facet_results"]["city"]["results"]) == 50 # Reduce max_returned_rows and check that it's respected ds._settings["max_returned_rows"] = 20 response4 = await ds.client.get( "/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=city" ) data4 = response4.json() assert len(data4["facet_results"]["city"]["results"]) == 20 # Test _facet_size=max response5 = await ds.client.get( "/test_facet_size/neighbourhoods.json?_facet_size=max&_facet=city" ) data5 = response5.json() assert len(data5["facet_results"]["city"]["results"]) == 20
async def test_execute_write_fn_connection_exception(tmpdir, app_client): path = str(tmpdir / "immutable.db") sqlite3.connect(path).execute("vacuum") db = Database(app_client.ds, path=path, is_mutable=False) app_client.ds.add_database(db, name="immutable-db") def write_fn(conn): assert False with pytest.raises(AssertionError): await db.execute_write_fn(write_fn, block=True) app_client.ds.remove_database("immutable-db")
async def test_array_facet_handle_duplicate_tags(): ds = Datasette([], memory=True) db = ds.add_database(Database(ds, memory_name="test_array_facet")) await db.execute_write("create table otters(name text, tags text)") for name, tags in ( ("Charles", ["friendly", "cunning", "friendly"]), ("Shaun", ["cunning", "empathetic", "friendly"]), ("Tracy", ["empathetic", "eager"]), ): await db.execute_write( "insert into otters (name, tags) values (?, ?)", [name, json.dumps(tags)] ) response = await ds.client.get("/test_array_facet/otters.json?_facet_array=tags") assert response.json()["facet_results"]["tags"] == { "name": "tags", "type": "array", "results": [ { "value": "cunning", "label": "cunning", "count": 2, "toggle_url": "http://localhost/test_array_facet/otters.json?_facet_array=tags&tags__arraycontains=cunning", "selected": False, }, { "value": "empathetic", "label": "empathetic", "count": 2, "toggle_url": "http://localhost/test_array_facet/otters.json?_facet_array=tags&tags__arraycontains=empathetic", "selected": False, }, { "value": "friendly", "label": "friendly", "count": 2, "toggle_url": "http://localhost/test_array_facet/otters.json?_facet_array=tags&tags__arraycontains=friendly", "selected": False, }, { "value": "eager", "label": "eager", "count": 1, "toggle_url": "http://localhost/test_array_facet/otters.json?_facet_array=tags&tags__arraycontains=eager", "selected": False, }, ], "hideable": True, "toggle_url": "/test_array_facet/otters.json", "truncated": False, }
async def test_json_array_with_blanks_and_nulls(): ds = Datasette([], memory=True) db = ds.add_database(Database(ds, memory_name="test_json_array")) await db.execute_write("create table foo(json_column text)") for value in ('["a", "b", "c"]', '["a", "b"]', "", None): await db.execute_write("insert into foo (json_column) values (?)", [value]) response = await ds.client.get("/test_json_array/foo.json") data = response.json() assert data["suggested_facets"] == [ { "name": "json_column", "type": "array", "toggle_url": "http://localhost/test_json_array/foo.json?_facet_array=json_column", } ]
def startup(datasette): datasette.add_database( Database(datasette, path=str(BASEMAP), is_mutable=False))
def test_is_mutable(app_client): assert Database(app_client.ds, is_memory=True, is_mutable=True).is_mutable is True assert Database(app_client.ds, is_memory=True, is_mutable=False).is_mutable is False
def test_mtime_ns_is_none_for_memory(app_client): memory_db = Database(app_client.ds, is_memory=True) assert memory_db.is_memory is True assert None is memory_db.mtime_ns
def run_cli_import(conn): set_status(conn, "Running CSV import...") exitcode = -1 output = None message = None try: with tempfile.NamedTemporaryFile() as temp: temp.write(csv.file.read()) temp.flush() args.append(temp.name) args.append(outfile_db) # run the import command, capturing stdout with Capturing() as output: exitcode = command.main( args=args, prog_name="cli", standalone_mode=False ) if exitcode is not None: exitcode = int(exitcode) # detect a failure to write DB where tool returns success # code this makes it so we don't have to read the # CLI output to figure out if the command succeeded or not if not os.path.exists(outfile_db) and not exitcode: exitcode = -2 except Exception as e: print("Exception", e) exitcode = -2 message = str(e) set_status(conn, "Adding database to internal DB list...") # Adds this DB to the internel DBs list if basename not in datasette.databases: print("Adding database", basename) datasette.add_database( Database(datasette, path=outfile_db, is_mutable=True), name=basename, ) # print("Database added successfully!") # try: # loop = asyncio.get_running_loop() # except RuntimeError: # loop = asyncio.new_event_loop() # print("Running schema refresh...") # loop.run_until_complete(datasette.refresh_schemas()) # print("Schema refresh complete!") csvspath = get_csvspath(plugin_config) if csvspath: set_status(conn, "Saving CSV to server directory...") csv_db_name = args[-1].replace(".db", "") csv_table_name = csv_db_name if "-t" in formdata: csv_table_name = formdata["-t"] if "--table" in formdata: csv_table_name = formdata["--table"] outfile_csv = os.path.join( csvspath, f"{csv_db_name}--{csv_table_name}.csv" ) outfile_args = os.path.join( csvspath, f"{csv_db_name}--{csv_table_name}.json" ) # success! save our configs and CSV print("Writing CSV", outfile_csv) with open(outfile_csv, "wb") as f: csv.file.seek(0) f.write(csv.file.read()) print("Writing args to", outfile_args) with open(outfile_args, "w") as f: f.write(json.dumps(args, indent=2)) if get_use_live_metadata(plugin_config): print("Running live-config integration...") set_status( conn, "Running live-config plugin integration..." ) # add the permission table, grant access to current user only # this will create the DB if not exists print("Opening DB:", outfile_db) out_db = sqlite_utils.Database(sqlite3.connect(outfile_db)) try: out_db["__metadata"].get("allow") except sqlite_utils.db.NotFoundError: # don't overwrite, only create out_db["__metadata"].insert({ "key": "tables", "value": json.dumps({ "__metadata": { "hidden": True } }), }, pk="key", alter=True, replace=False, ignore=True) if get_use_live_permissions(plugin_config): print("Setting live-permissions plugin status...") set_status( conn, "Running live-permissions plugin integration..." ) print("Running set_perms_for_live_permissions with basename:", basename) set_perms_for_live_permissions(datasette, request.actor, basename) print("set_perms_for_live_permissions complete!") if not message: message = "Import successful!" if not exitcode else "Failure" print("Updating status", message) status_database = sqlite_utils.Database(conn) status_database[status_table].update( task_id, { "completed": str(datetime.datetime.utcnow()), "exitcode": exitcode, "status": "completed", "message": message, "output": "\n".join(output), }, )