def test_import_bare(pg_repo_local): # Check import without checking anything out, just by manipulating metadata and running LQs against # source images. # Create a new schema and import 'fruits' OUTPUT.init() # Make sure the existing table is preserved. OUTPUT.run_sql("CREATE TABLE sentinel (key INTEGER)") OUTPUT.commit() pg_repo_local.uncheckout() OUTPUT.uncheckout() OUTPUT.import_tables( tables=["imported_fruits"], source_repository=pg_repo_local, image_hash=pg_repo_local.images["latest"].image_hash, source_tables=["SELECT * FROM fruits WHERE fruit_id = 1"], parent_hash=OUTPUT.images["latest"].image_hash, do_checkout=False, table_queries=[True], ) assert OUTPUT.head is None assert pg_repo_local.head is None assert sorted(OUTPUT.images["latest"].get_tables()) == [ "imported_fruits", "sentinel" ] assert list(OUTPUT.images["latest"].get_table("imported_fruits").query( columns=["name"], quals=[])) == [{ "name": "apple" }]
def test_layered_querying_json_arrays(local_engine_empty): OUTPUT.init() OUTPUT.run_sql( "CREATE TABLE test (key INTEGER PRIMARY KEY, " "value JSONB, arr_value INTEGER[], arr_2d_value TEXT[][])" ) OUTPUT.run_sql( "INSERT INTO test VALUES (1, %s, %s, %s)", (json.dumps({"a": 1, "b": 2.5}), [1, 2, 3], [["one", "two"], ["three", "four"]]), ) OUTPUT.commit() OUTPUT.run_sql( "INSERT INTO test VALUES (2, %s, %s, %s)", ( json.dumps({"a": "one", "b": "two point five"}), [4, 5, 6], [["five", "six"], ["seven", "eight"]], ), ) head = OUTPUT.commit() OUTPUT.uncheckout() head.checkout(layered=True) assert OUTPUT.run_sql("SELECT * FROM test ORDER BY key") == [ (1, {"a": 1, "b": 2.5}, [1, 2, 3], [["one", "two"], ["three", "four"]]), (2, {"a": "one", "b": "two point five"}, [4, 5, 6], [["five", "six"], ["seven", "eight"]]), ]