def test_import_table_meta_overrides(data_archive_readonly, tmp_path, cli_runner, chdir, geopackage): with data_archive_readonly("gpkg-au-census") as data: repo_path = tmp_path / "emptydir" r = cli_runner.invoke(["init", repo_path]) assert r.exit_code == 0 with chdir(repo_path): original_xml_metadata = '<gmd:MD_Metadata xmlns:gco="http://www.isotc211.org/2005/gco" xmlns:gmd="http://www.isotc211.org/2005/gmd" xmlns:gml="http://www.opengis.net/gml" xmlns:gts="http://www.isotc211.org/2005/gts" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns="http://www.isotc211.org/2005/gmd" />' table_info_json = json.dumps({ "census2016_sdhca_ot_ced_short": { "title": "test title", "description": "test description", "xmlMetadata": original_xml_metadata, } }) r = cli_runner.invoke([ "import", data / "census2016_sdhca_ot_short.gpkg", "census2016_sdhca_ot_ced_short", "--table-info", table_info_json, ], ) assert r.exit_code == 0, r cli_runner.invoke(["checkout"]) repo = pygit2.Repository(str(repo_path)) wc = WorkingCopy.get(repo) db = geopackage(wc.path) cur = db.cursor() title, description = cur.execute(""" SELECT c.identifier, c.description FROM gpkg_contents c WHERE c.table_name = 'census2016_sdhca_ot_ced_short' """).fetchone() assert title == "census2016_sdhca_ot_ced_short: test title" assert description == "test description" xml_metadata = cur.execute(""" SELECT m.metadata FROM gpkg_metadata m JOIN gpkg_metadata_reference r ON m.id = r.md_file_id WHERE r.table_name = 'census2016_sdhca_ot_ced_short' """).fetchone()[0] assert xml_metadata == original_xml_metadata
def test_checkout_workingcopy( version, archive, table, commit_sha, data_archive, tmp_path, cli_runner, geopackage ): """ Checkout a working copy to edit """ if version == "2.0": archive += "2" with data_archive(archive) as repo_path: H.clear_working_copy() wc = tmp_path / f"{table}.gpkg" r = cli_runner.invoke(["checkout", f"--path={wc}"]) assert r.exit_code == 0, r lines = r.stdout.splitlines() # TODO: restore these # assert re.match(fr"Checkout HEAD to .+ as GPKG \.\.\.$", lines[0]) # assert re.match(fr"Commit: {commit_sha}$", lines[1]) assert wc.exists() db = geopackage(wc) assert H.row_count(db, table) > 0 repo = pygit2.Repository(str(repo_path)) assert repo.is_bare assert repo.head.name == "refs/heads/master" assert repo.head.shorthand == "master" head_tree = repo.head.peel(pygit2.Tree) wc_tree_id = ( db.cursor() .execute( """SELECT value FROM ".sno-meta" WHERE table_name='*' AND key='tree';""" ) .fetchone()[0] ) assert wc_tree_id == head_tree.hex wc = WorkingCopy.open(repo) assert wc.assert_db_tree_match(head_tree)
def test_checkout_workingcopy(version, archive, table, commit_sha, data_archive, tmp_path, cli_runner, geopackage): """ Checkout a working copy to edit """ if version == "2": archive += "2" sno_state_table = "gpkg_sno_state" else: sno_state_table = ".sno-meta" with data_archive(archive) as repo_path: H.clear_working_copy() repo = pygit2.Repository(str(repo_path)) r = cli_runner.invoke(["checkout"]) wc = Path(repo.config["sno.workingcopy.path"]) assert r.exit_code == 0, r assert r.stdout.splitlines() == [f'Creating working copy at {wc} ...'] assert wc.exists() db = geopackage(wc) assert H.row_count(db, table) > 0 assert repo.is_bare assert repo.head.name == "refs/heads/master" assert repo.head.shorthand == "master" head_tree = repo.head.peel(pygit2.Tree) wc_tree_id = (db.cursor().execute( f"""SELECT value FROM "{sno_state_table}" WHERE table_name='*' AND key='tree';""" ).fetchone()[0]) assert wc_tree_id == head_tree.hex wc = WorkingCopy.get(repo) assert wc.assert_db_tree_match(head_tree) rs = RepositoryStructure(repo) cols, pk_col = wc._get_columns(rs[table]) expected_col_spec = f'"{pk_col}" INTEGER PRIMARY KEY AUTOINCREMENT' assert cols[pk_col] in (expected_col_spec, f"{expected_col_spec} NOT NULL")
def test_import_existing_wc( data_archive, data_working_copy, geopackage, cli_runner, insert, tmp_path, request, chdir, ): """ Import a new dataset into a repo with an existing working copy. Dataset should get checked out """ with data_working_copy("points") as (repo_path, wcdb): with data_archive("gpkg-polygons") as source_path, chdir(repo_path): r = cli_runner.invoke([ "import", f"GPKG:{source_path / 'nz-waca-adjustments.gpkg'}", H.POLYGONS.LAYER, ]) assert r.exit_code == 0, r repo = pygit2.Repository(str(repo_path)) wc = WorkingCopy.get(repo) db = geopackage(wcdb) assert H.row_count(db, "nz_waca_adjustments") > 0 head_tree = repo.head.peel(pygit2.Tree) with db: dbcur = db.cursor() dbcur.execute( """SELECT value FROM ".sno-meta" WHERE table_name='*' AND key='tree';""" ) wc_tree_id = dbcur.fetchone()[0] assert wc_tree_id == head_tree.hex assert wc.assert_db_tree_match(head_tree) r = cli_runner.invoke(["status"]) assert r.exit_code == 0, r assert r.stdout.splitlines( )[-1] == "Nothing to commit, working copy clean" with db: dbcur = db.cursor() dbcur.execute( "DELETE FROM nz_waca_adjustments WHERE rowid IN (SELECT rowid FROM nz_waca_adjustments ORDER BY id LIMIT 10);" ) dbcur.execute("SELECT changes()") assert dbcur.fetchone()[0] == 10 with data_archive("gpkg-polygons") as source_path, chdir(repo_path): r = cli_runner.invoke([ "import", f"GPKG:{source_path / 'nz-waca-adjustments.gpkg'}", f"{H.POLYGONS.LAYER}:waca2", ]) assert r.exit_code == 0, r assert H.row_count(db, "waca2") > 0 head_tree = repo.head.peel(pygit2.Tree) with db: dbcur = db.cursor() dbcur.execute( """SELECT value FROM ".sno-meta" WHERE table_name='*' AND key='tree';""" ) wc_tree_id = dbcur.fetchone()[0] assert wc_tree_id == head_tree.hex assert wc.assert_db_tree_match(head_tree) r = cli_runner.invoke(["status"]) assert r.exit_code == 0, r assert r.stdout.splitlines()[-3:] == [ " nz_waca_adjustments:", " feature:", " 10 deletes", ]
def test_init_import_table_ogr_types(data_archive_readonly, tmp_path, cli_runner, repo_version): with data_archive_readonly("types") as data: repo_path = tmp_path / "repo" r = cli_runner.invoke([ "init", f"--repo-version={repo_version}", "--import", data / "types.gpkg", str(repo_path), ], ) assert r.exit_code == 0, r.stderr # There's a bunch of wacky types in here, let's check them repo = pygit2.Repository(str(repo_path)) wc = WorkingCopy.get(repo) with wc.session() as db: table_info = [ dict(row) for row in db.cursor().execute("PRAGMA table_info('types');") ] assert table_info == [ { "cid": 0, "name": "fid", "type": "INTEGER", "notnull": 1, "dflt_value": None, "pk": 1, }, { "cid": 1, "name": "int16", "type": "SMALLINT", "notnull": 0, "dflt_value": None, "pk": 0, }, { "cid": 2, "name": "int32", "type": "MEDIUMINT", "notnull": 0, "dflt_value": None, "pk": 0, }, { "cid": 3, "name": "int64", "type": "INTEGER", "notnull": 0, "dflt_value": None, "pk": 0, }, { "cid": 4, "name": "boolean", "type": "BOOLEAN", "notnull": 0, "dflt_value": None, "pk": 0, }, { "cid": 5, "name": "double", "type": "REAL", "notnull": 0, "dflt_value": None, "pk": 0, }, { "cid": 6, "name": "float32", "type": "FLOAT", "notnull": 0, "dflt_value": None, "pk": 0, }, { "cid": 7, "name": "string", "type": "TEXT", "notnull": 0, "dflt_value": None, "pk": 0, }, { "cid": 8, "name": "blob", "type": "BLOB", "notnull": 0, "dflt_value": None, "pk": 0, }, { "cid": 9, "name": "date", "type": "DATE", "notnull": 0, "dflt_value": None, "pk": 0, }, { "cid": 10, "name": "datetime", "type": "DATETIME", "notnull": 0, "dflt_value": None, "pk": 0, }, { "cid": 11, "name": "time", "type": "TEXT", "notnull": 0, "dflt_value": None, "pk": 0, }, ]
def test_init_import_table_ogr_types(data_archive_readonly, tmp_path, cli_runner): with data_archive_readonly("types") as data: repo_path = tmp_path / "repo" r = cli_runner.invoke( ["init", "--import", data / "types.gpkg", str(repo_path)], ) assert r.exit_code == 0, r # There's a bunch of wacky types in here, let's check them repo = pygit2.Repository(str(repo_path)) wc = WorkingCopy.open(repo) with wc.session() as db: table_info = [ dict(row) for row in db.cursor().execute("PRAGMA table_info('types');") ] assert table_info == [ { 'cid': 0, 'name': 'fid', 'type': 'INTEGER', 'notnull': 1, 'dflt_value': None, 'pk': 1, }, { 'cid': 1, 'name': 'int16', 'type': 'SMALLINT', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, { 'cid': 2, 'name': 'int32', 'type': 'MEDIUMINT', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, { 'cid': 3, 'name': 'int64', 'type': 'INTEGER', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, { 'cid': 4, 'name': 'boolean', 'type': 'BOOLEAN', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, { 'cid': 5, 'name': 'double', 'type': 'REAL', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, { 'cid': 6, 'name': 'float32', 'type': 'REAL', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, { 'cid': 7, 'name': 'string', 'type': 'TEXT', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, { 'cid': 8, 'name': 'blob', 'type': 'BLOB', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, { 'cid': 9, 'name': 'date', 'type': 'DATE', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, { 'cid': 10, 'name': 'datetime', 'type': 'DATETIME', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, { 'cid': 11, 'name': 'time', 'type': 'TEXT', 'notnull': 0, 'dflt_value': None, 'pk': 0, }, ]
def test_commit( repo_version, archive, layer, partial, data_working_copy, geopackage, cli_runner, request, edit_points, edit_polygons, edit_table, ): """ commit outstanding changes from the working copy """ versioned_archive = archive + "2" if repo_version == "2" else archive with data_working_copy(versioned_archive) as (repo_dir, wc_path): # empty r = cli_runner.invoke(["commit", "-m", "test-commit-empty"]) assert r.exit_code == NO_CHANGES, r assert r.stderr.splitlines() == ["Error: No changes to commit"] # empty r = cli_runner.invoke(["commit", "-m", "test-commit-empty", "--allow-empty"]) assert r.exit_code == 0, r # make some changes db = geopackage(wc_path) with db: cur = db.cursor() try: edit_func = locals()[f"edit_{archive}"] pk_del = edit_func(cur) except KeyError: raise NotImplementedError(f"No edit_{archive}") print(f"deleted fid={pk_del}") repo = pygit2.Repository(str(repo_dir)) rs = RepositoryStructure(repo) wc = rs.working_copy original_change_count = _count_tracking_table_changes(db, wc, layer) if partial: r = cli_runner.invoke( ["commit", "-m", "test-commit-1", "-o", "json", f"{layer}:{pk_del}"] ) else: r = cli_runner.invoke(["commit", "-m", "test-commit-1", "-o", "json"]) assert r.exit_code == 0, r commit_id = json.loads(r.stdout)["sno.commit/v1"]["commit"] print("commit:", commit_id) assert str(repo.head.target) == commit_id commit = repo.head.peel(pygit2.Commit) assert commit.message == "test-commit-1" assert time.time() - commit.commit_time < 3 dataset = rs[layer] tree = repo.head.peel(pygit2.Tree) assert dataset.encode_1pk_to_path(pk_del) not in tree wc = WorkingCopy.get(repo) wc.assert_db_tree_match(tree) change_count = _count_tracking_table_changes(db, wc, layer) if partial: # All but one change should still be in the tracking table assert change_count == original_change_count - 1 # Changes should still be visible in the working copy: r = cli_runner.invoke(["diff", "--exit-code"]) assert r.exit_code == 1, r assert r.stdout != "" else: assert ( change_count == 0 ), f"Changes still listed in {wc.TRACKING_TABLE} after full commit" r = cli_runner.invoke(["diff", "--exit-code"]) assert r.exit_code == 0, r assert r.stdout == ""