예제 #1
0
def test_switch_pre_import_post_import(data_working_copy,
                                       data_archive_readonly, cli_runner):
    with data_archive_readonly("gpkg-au-census") as data:
        with data_working_copy("polygons") as (repo_path, wc_path):
            wc = KartRepo(repo_path).working_copy

            r = cli_runner.invoke([
                "import",
                data / "census2016_sdhca_ot_short.gpkg",
                "census2016_sdhca_ot_ced_short",
            ])
            assert r.exit_code == 0, r.stderr
            r = cli_runner.invoke(["checkout", "HEAD^"])
            assert r.exit_code == 0, r.stderr

            with wc.session() as sess:
                count = sess.scalar(
                    f"""SELECT COUNT(name) FROM sqlite_master where type='table' AND name='census2016_sdhca_ot_ced_short';"""
                )
                assert count == 0

            r = cli_runner.invoke(["checkout", "main"])
            assert r.exit_code == 0, r.stderr

            with wc.session() as sess:
                count = sess.scalar(
                    f"""SELECT COUNT(name) FROM sqlite_master where type='table' AND name='census2016_sdhca_ot_ced_short';"""
                )
                assert count == 1
예제 #2
0
def test_switch_with_trivial_schema_change(data_working_copy, cli_runner):
    # Column renames are one of the only schema changes we can do without having to recreate the whole table.
    with data_working_copy("points") as (repo_path, wc_path):
        wc = KartRepo(repo_path).working_copy
        with wc.session() as sess:
            sess.execute(
                f"""ALTER TABLE "{H.POINTS.LAYER}" RENAME "name_ascii" TO "name_latin1";"""
            )

        r = cli_runner.invoke(["commit", "-m", "change schema"])
        assert r.exit_code == 0, r.stderr
        r = cli_runner.invoke(["checkout", "HEAD^"])
        assert r.exit_code == 0, r.stderr
        with wc.session() as sess:
            name = sess.scalar(
                f"""SELECT name FROM pragma_table_info('{H.POINTS.LAYER}') WHERE cid = 3;"""
            )
            assert name == "name_ascii"

        r = cli_runner.invoke(["checkout", "main"])
        assert r.exit_code == 0, r.stderr
        with wc.session() as sess:
            name = sess.scalar(
                f"""SELECT name FROM pragma_table_info('{H.POINTS.LAYER}') WHERE cid = 3;"""
            )
            assert name == "name_latin1"
예제 #3
0
def test_geopackage_locking_edit(data_working_copy, cli_runner, monkeypatch):
    with data_working_copy("points") as (repo_path, wc_path):
        wc = KartRepo(repo_path).working_copy

        is_checked = False
        orig_func = BaseWorkingCopy._write_features

        def _wrap(*args, **kwargs):
            nonlocal is_checked
            if not is_checked:
                with pytest.raises(sqlalchemy.exc.OperationalError,
                                   match=r"database is locked"):
                    with wc.session() as sess:
                        sess.execute(
                            "UPDATE gpkg_contents SET table_name=table_name;")
                is_checked = True

            return orig_func(*args, **kwargs)

        monkeypatch.setattr(BaseWorkingCopy, "_write_features", _wrap)

        r = cli_runner.invoke(["checkout", H.POINTS.HEAD1_SHA])
        assert r.exit_code == 0, r
        assert is_checked

        with wc.session() as sess:
            assert H.last_change_time(sess) == "2019-06-11T11:03:58.000000Z"
예제 #4
0
def test_switch_with_meta_items(data_working_copy, cli_runner):
    with data_working_copy("points") as (repo_path, wc_path):
        wc = KartRepo(repo_path).working_copy
        with wc.session() as sess:
            sess.execute(
                """UPDATE gpkg_contents SET identifier = 'new identifier', description='new description'"""
            )

        r = cli_runner.invoke(
            ["commit", "-m", "change identifier and description"])
        assert r.exit_code == 0, r.stderr
        r = cli_runner.invoke(["checkout", "HEAD^"])
        assert r.exit_code == 0, r.stderr

        with wc.session() as sess:
            r = sess.execute(
                """SELECT identifier, description FROM gpkg_contents""")
            identifier, description = r.fetchone()
            assert identifier == "NZ Pa Points (Topo, 1:50k)"
            assert description.startswith("Defensive earthworks")

        r = cli_runner.invoke(["checkout", "main"])
        assert r.exit_code == 0, r.stderr

        with wc.session() as sess:
            r = sess.execute(
                """SELECT identifier, description FROM gpkg_contents""")
            identifier, description = r.fetchone()
            assert identifier == "new identifier"
            assert description == "new description"
예제 #5
0
def test_fast_import(data_archive, tmp_path, cli_runner, chdir):
    table = H.POINTS.LAYER
    with data_archive("gpkg-points") as data:
        # list tables
        repo_path = tmp_path / "repo"
        repo_path.mkdir()

        with chdir(repo_path):
            r = cli_runner.invoke(["init"])
            assert r.exit_code == 0, r

            repo = KartRepo(repo_path)

            source = TableImportSource.open(data /
                                            "nz-pa-points-topo-150k.gpkg",
                                            table=table)

            fast_import.fast_import_tables(repo, [source], from_commit=None)

            assert not repo.is_empty
            assert repo.head.name == "refs/heads/main"
            assert repo.head.shorthand == "main"

            dataset = repo.datasets()[table]
            assert dataset.VERSION == 3

            # has a single commit
            assert len([c for c in repo.walk(repo.head.target)]) == 1
            assert list(dataset.meta_items())

            # has the right number of features
            feature_count = sum(1 for f in dataset.features())
            assert feature_count == source.feature_count
예제 #6
0
def test_git_disabled(tmp_path, cli_runner, chdir):
    """ Create an empty Kart repository. """
    repo_path = tmp_path / "test_repo"
    repo_path.mkdir()

    # empty dir
    r = cli_runner.invoke(["init", str(repo_path)])
    assert r.exit_code == 0, r
    assert (repo_path / ".kart" / "HEAD").exists()

    repo = KartRepo(repo_path)

    with chdir(repo_path):
        # env={} means we don't inherit the environment of this process,
        # so it behaves as it would if the user typed it at the command line.
        r = subprocess.run(["git", "gc"], capture_output=True, encoding="utf-8", env={})
        assert r.returncode != 0
        assert "index uses kart extension, which we do not understand" in r.stderr
        assert "fatal:" in r.stderr

        # Whereas this runs with our custom environment, including GIT_INDEX_FILE
        r = subprocess.run(["git", "gc"], capture_output=True, encoding="utf-8")
        assert r.returncode == 0, r.stderr

        r = subprocess.run(["git", "gc"], capture_output=True, encoding="utf-8", env={})
        assert r.returncode != 0
        assert "index uses kart extension, which we do not understand" in r.stderr
        assert "fatal:" in r.stderr

    # Internally, this runs git-gc with the unlocked git index.
    repo.gc()

    # git-gc shouldn't create an index where there wasn't one already.
    assert not (repo_path / ".kart" / "unlocked_index").exists()
예제 #7
0
def test_checkout_workingcopy(archive, table, commit_sha, data_archive,
                              tmp_path, cli_runner):
    """ Checkout a working copy to edit """
    with data_archive(archive) as repo_path:
        H.clear_working_copy()

        repo = KartRepo(repo_path)
        dataset = repo.datasets()[table]
        geom_cols = dataset.schema.geometry_columns

        r = cli_runner.invoke(["checkout"])
        wc_path = Path(repo.config["kart.workingcopy.location"])
        assert r.exit_code == 0, r
        assert r.stdout.splitlines() == [
            f"Creating working copy at {wc_path} ..."
        ]
        assert wc_path.exists()
        wc = repo.working_copy

        assert repo.head.name == "refs/heads/main"
        assert repo.head.shorthand == "main"
        assert wc.get_db_tree() == repo.head_tree.hex

        if geom_cols:
            with wc.session() as sess:
                spatial_index_count = sess.execute(
                    f"""SELECT COUNT(*) FROM "rtree_{table}_{geom_cols[0].name}";"""
                ).scalar()
                assert spatial_index_count == dataset.feature_count

        table_spec = KartAdapter_GPKG.v2_schema_to_sql_spec(dataset.schema)
        expected_col_spec = f"{KartAdapter_GPKG.quote(dataset.primary_key)} INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL"
        assert expected_col_spec in table_spec
예제 #8
0
def _test_postgis_import(
        repo_path,
        cli_runner,
        chdir,
        *,
        table_name,
        pk_name="id",
        pk_size=64,
        import_args=(),
):
    r = cli_runner.invoke(["init", repo_path])
    assert r.exit_code == 0, r
    with chdir(repo_path):
        r = cli_runner.invoke([
            "import",
            os.environ["KART_POSTGRES_URL"],
            table_name,
            *import_args,
        ])
        assert r.exit_code == 0, r
    # now check metadata
    repo = KartRepo(repo_path)
    dataset = repo.datasets()[table_name]

    meta_items = dict(dataset.meta_items())
    meta_item_keys = set(meta_items.keys())
    assert "schema.json" in meta_item_keys
    crs_keys = meta_item_keys - {"title", "description", "schema.json"}
    assert len(crs_keys) == 1
    crs_key = next(iter(crs_keys))
    assert crs_key.startswith("crs/EPSG:") and crs_key.endswith(".wkt")
예제 #9
0
def test_postgis_import_with_sampled_geometry_dimension(
    postgis_db,
    data_archive,
    tmp_path,
    cli_runner,
    request,
    chdir,
):
    with postgis_db.connect() as conn:
        conn.execute("""DROP TABLE IF EXISTS points_xyz CASCADE;""")
        conn.execute(
            """CREATE TABLE points_xyz (fid BIGINT PRIMARY KEY, shape GEOMETRY);"""
        )
        conn.execute(
            """INSERT INTO points_xyz (fid, shape) VALUES (1, ST_GeomFromText('POINT(1 2 3)', 4326));"""
        )

        _test_postgis_import(
            tmp_path / "repo",
            cli_runner,
            chdir,
            table_name="points_xyz",
            pk_name="fid",
            pk_size=64,
            import_args=["--primary-key=fid"],
        )

        repo = KartRepo(tmp_path / "repo")
        dataset = repo.datasets()["points_xyz"]
        [geom_col] = dataset.schema.geometry_columns
        assert geom_col.extra_type_info["geometryType"] == "GEOMETRY Z"

        conn.execute("""DROP TABLE IF EXISTS points_xyz CASCADE;""")
예제 #10
0
def test_import_into_empty_branch(data_archive, cli_runner, chdir, tmp_path):
    repo_path = tmp_path / "repo"
    repo_path.mkdir()

    r = cli_runner.invoke(["init", "--bare", repo_path])
    assert r.exit_code == 0

    with data_archive("gpkg-points") as data:
        with chdir(repo_path):
            r = cli_runner.invoke(
                ["import", data / "nz-pa-points-topo-150k.gpkg"])
            assert r.exit_code == 0, r

            # delete the main branch.
            # HEAD still points to it, but that's okay - this just means
            # the branch is empty.
            # We still need to be able to import from this state.
            repo = KartRepo(repo_path)
            repo.references.delete("refs/heads/main")
            assert repo.head_is_unborn

            r = cli_runner.invoke(
                ["import", data / "nz-pa-points-topo-150k.gpkg"])
            assert r.exit_code == 0, r

            repo = KartRepo(repo_path)
            assert repo.head_commit
예제 #11
0
def test_spatially_filtered_merge(data_archive, cli_runner):
    # Make sure spatially filtered merges work (that is, make sure writing merged indexes with missing features works).
    # See https://github.com/koordinates/kart/issues/550
    with data_archive("points-with-feature-envelopes") as repo1_path:
        repo1_url = f"file://{repo1_path.resolve()}"

        with data_archive("points-spatial-filtered") as repo2_path:
            repo2 = KartRepo(repo2_path)
            repo2.config["remote.origin.url"] = repo1_url

            ds = repo2.datasets()[H.POINTS.LAYER]

            local_feature_count = local_features(ds)
            assert local_feature_count != H.POINTS.ROWCOUNT
            assert local_feature_count == 817

            r = cli_runner.invoke(["-C", repo2_path, "create-workingcopy"])
            assert r.exit_code == 0, r.stderr

            r = cli_runner.invoke(["-C", repo2_path, "checkout", "-b", "left"])
            assert r.exit_code == 0, r.stderr

            with repo2.working_copy.session() as sess:
                assert H.row_count(sess, H.POINTS.LAYER) == 302
                sess.execute(
                    f"DELETE FROM {H.POINTS.LAYER} WHERE fid % 3 != 0;")

            r = cli_runner.invoke(
                ["-C", repo2_path, "commit", "-m", "left-commit"])
            assert r.exit_code == 0, r.stderr

            r = cli_runner.invoke(
                ["-C", repo2_path, "checkout", "-b", "right", "HEAD^"])
            assert r.exit_code == 0, r.stderr

            with repo2.working_copy.session() as sess:
                assert H.row_count(sess, H.POINTS.LAYER) == 302
                sess.execute(
                    f"DELETE FROM {H.POINTS.LAYER} WHERE fid % 3 != 1;")

            r = cli_runner.invoke(
                ["-C", repo2_path, "commit", "-m", "right-commit"])
            assert r.exit_code == 0, r.stderr

            r = cli_runner.invoke(
                ["-C", repo2_path, "merge", "left", "-m", "merged"])
            assert r.exit_code == 0, r.stderr

            # Make sure we can do a full-read of the new commit without any problems -
            # See https://github.com/koordinates/kart/issues/552 which explains why running create-workingcopy
            # can fail after a commit in a spatial-filted repo (unless we are careful and use promisor-packfiles),
            # whereas running diff will generally succeed regardless.
            r = cli_runner.invoke(
                ["-C", repo2_path, "create-workingcopy", "--delete-existing"])
            assert r.exit_code == 0, r.stderr
예제 #12
0
    def clear_working_copy(cls, repo_path="."):
        """ Delete any existing working copy & associated config """
        repo = KartRepo(repo_path)
        wc = repo.get_working_copy(allow_invalid_state=True)
        if wc:
            print(
                f"Deleting existing working copy: {repo.workingcopy_location}")
            wc.delete()

        if repo.WORKINGCOPY_LOCATION_KEY in repo.config:
            del repo.config[repo.WORKINGCOPY_LOCATION_KEY]
예제 #13
0
def test_tag(data_working_copy, cli_runner):
    """ review commit history """
    with data_working_copy("points") as (repo_dir, wc):
        # create a tag
        r = cli_runner.invoke(["tag", "version1"])
        assert r.exit_code == 0, r

        repo = KartRepo(repo_dir)
        assert "refs/tags/version1" in repo.references
        ref = repo.lookup_reference_dwim("version1")
        assert ref.target.hex == H.POINTS.HEAD_SHA
예제 #14
0
def test_import_replace_existing_with_column_renames(
    data_archive,
    tmp_path,
    cli_runner,
    chdir,
):
    with data_archive("gpkg-polygons") as data:
        repo_path = tmp_path / "emptydir"
        r = cli_runner.invoke(["init", repo_path])
        assert r.exit_code == 0
        with chdir(repo_path):
            r = cli_runner.invoke([
                "import",
                data / "nz-waca-adjustments.gpkg",
                "nz_waca_adjustments:mytable",
            ])
            assert r.exit_code == 0, r.stderr

            # Now rename
            # * doesn't include the `survey_reference` column
            # * has the columns in a different order
            # * has a new column
            with Db_GPKG.create_engine(
                    data / "nz-waca-adjustments.gpkg").connect() as conn:
                conn.execute("""
                    ALTER TABLE "nz_waca_adjustments" RENAME COLUMN "survey_reference" TO "renamed_survey_reference";
                    """)

            r = cli_runner.invoke([
                "import",
                "--replace-existing",
                data / "nz-waca-adjustments.gpkg",
                "nz_waca_adjustments:mytable",
            ])
            assert r.exit_code == 0, r.stderr
            r = cli_runner.invoke(["show", "-o", "json"])
            assert r.exit_code == 0, r.stderr
            diff = json.loads(r.stdout)["kart.diff/v1+hexwkb"]["mytable"]

            # The schema changed, but the features didn't.
            assert diff["meta"]["schema.json"]
            assert not diff.get("feature")

            repo = KartRepo(repo_path)
            head_rs = repo.structure("HEAD")
            old_rs = repo.structure("HEAD^")
            assert head_rs.tree != old_rs.tree
            new_feature_tree = head_rs.tree / "mytable/.table-dataset/feature"
            old_feature_tree = old_rs.tree / "mytable/.table-dataset/feature"
            assert new_feature_tree == old_feature_tree
예제 #15
0
def test_pk_encoder_legacy_hashed(data_archive_readonly):
    archive_path = Path("upgrade") / "v2.kart" / "points.tgz"
    with data_archive_readonly(archive_path) as repo_path:
        repo = KartRepo(repo_path)
        ds = repo.datasets()["nz_pa_points_topo_150k"]
        e = ds.feature_path_encoder
        assert isinstance(e, MsgpackHashPathEncoder)
        assert e.encoding == "hex"
        assert e.branches == 256
        assert e.levels == 2
        assert (ds.encode_1pk_to_path(1181) ==
                "nz_pa_points_topo_150k/.sno-dataset/feature/7b/36/kc0EnQ==")
        assert (ds.encode_1pk_to_path("Dave") ==
                "nz_pa_points_topo_150k/.sno-dataset/feature/b2/fe/kaREYXZl")
예제 #16
0
def test_fetch(
    data_archive_readonly,
    data_working_copy,
    cli_runner,
    insert,
    tmp_path,
    request,
):
    with data_working_copy("points") as (path1, wc):
        subprocess.run(["git", "init", "--bare", str(tmp_path)], check=True)

        r = cli_runner.invoke(["remote", "add", "myremote", tmp_path])
        assert r.exit_code == 0, r

        with Db_GPKG.create_engine(wc).connect() as conn:
            commit_id = insert(conn)

        r = cli_runner.invoke(["push", "--set-upstream", "myremote", "main"])
        assert r.exit_code == 0, r

    with data_working_copy("points") as (path2, wc):
        repo = KartRepo(path2)
        h = repo.head.target.hex

        r = cli_runner.invoke(["remote", "add", "myremote", tmp_path])
        assert r.exit_code == 0, r

        r = cli_runner.invoke(["fetch", "myremote"])
        assert r.exit_code == 0, r

        H.git_graph(request, "post-fetch")

        assert repo.head.name == "refs/heads/main"
        assert repo.head.target.hex == h

        remote_branch = repo.lookup_reference_dwim("myremote/main")
        assert remote_branch.target.hex == commit_id

        fetch_head = repo.lookup_reference("FETCH_HEAD")
        assert fetch_head.target.hex == commit_id

        # merge
        r = cli_runner.invoke(["merge", "myremote/main"])
        assert r.exit_code == 0, r

        assert repo.head.name == "refs/heads/main"
        assert repo.head.target.hex == commit_id
        commit = repo.head_commit
        assert len(commit.parents) == 1
        assert commit.parents[0].hex == h
예제 #17
0
def test_import_multiple(data_archive, chdir, cli_runner, tmp_path):
    repo_path = tmp_path / "repo"
    repo_path.mkdir()

    with chdir(repo_path):
        r = cli_runner.invoke(["init"])
        assert r.exit_code == 0, r

    repo = KartRepo(repo_path)
    assert repo.is_empty

    LAYERS = (
        ("gpkg-points", "nz-pa-points-topo-150k.gpkg", H.POINTS.LAYER),
        ("gpkg-polygons", "nz-waca-adjustments.gpkg", H.POLYGONS.LAYER),
    )

    datasets = []
    for i, (archive, source_gpkg, table) in enumerate(LAYERS):
        with data_archive(archive) as data:
            with chdir(repo_path):
                r = cli_runner.invoke(
                    ["import", f"GPKG:{data / source_gpkg}", table])
                assert r.exit_code == 0, r

                datasets.append(
                    _import_check(
                        repo_path,
                        table,
                        f"{data / source_gpkg}",
                    ))

                assert len([c for c in repo.walk(repo.head.target)]) == i + 1

                if i + 1 == len(LAYERS):
                    r = cli_runner.invoke(
                        ["import", f"GPKG:{data / source_gpkg}", table])
                    assert r.exit_code == INVALID_OPERATION

    # has two commits
    assert len([c for c in repo.walk(repo.head.target)]) == len(LAYERS)

    tree = repo.head_tree

    for i, ds in enumerate(datasets):
        assert ds.path == LAYERS[i][2]

        feature = next(ds.features())
        f_path = ds.encode_1pk_to_path(feature[ds.primary_key])
        assert tree / f_path
예제 #18
0
def test_change_spatial_filter(data_archive, cli_runner, insert):
    with data_archive("polygons.tgz") as repo_path:
        repo = KartRepo(repo_path)
        H.clear_working_copy()

        r = cli_runner.invoke(["checkout", "main"])
        assert r.exit_code == 0, r.stderr

        with repo.working_copy.session() as sess:
            assert H.row_count(sess, H.POLYGONS.LAYER) == H.POLYGONS.ROWCOUNT

        geom = SPATIAL_FILTER_GEOMETRY["polygons"]
        crs = SPATIAL_FILTER_CRS["polygons"]
        r = cli_runner.invoke(
            ["checkout", "main", f"--spatial-filter={crs};{geom}"])
        assert r.exit_code == 0, r.stderr

        with repo.working_copy.session() as sess:
            assert H.row_count(sess, H.POLYGONS.LAYER) == 44

        geom = SPATIAL_FILTER_GEOMETRY["polygons"]
        crs = SPATIAL_FILTER_CRS["polygons"]
        r = cli_runner.invoke(["checkout", "main", "--spatial-filter="])
        assert r.exit_code == 0, r.stderr

        with repo.working_copy.session() as sess:
            assert H.row_count(sess, H.POLYGONS.LAYER) == H.POLYGONS.ROWCOUNT
            insert(sess, commit=False)

        r = cli_runner.invoke(
            ["checkout", "main", f"--spatial-filter={crs};{geom}"])
        assert r.exit_code == INVALID_OPERATION
        assert "You have uncommitted changes in your working copy" in r.stderr
예제 #19
0
    def _data_working_copy(archive_path, force_new=False):
        nonlocal incr

        archive_path = get_archive_path(archive_path)
        with data_archive(archive_path) as repo_dir:
            repo = KartRepo(repo_dir)
            if repo.working_copy:
                wc_path = repo.working_copy.full_path
                if force_new:
                    L.info("force_new is set, deleting existing WC: %s",
                           wc_path)
                    del repo.working_copy
                    assert not hasattr(repo, "_working_copy")
                    del wc_path

            if not repo.working_copy:
                wc_path = (
                    tmp_path_factory.mktemp(request.node.name, str(incr)) /
                    archive_path.with_suffix(".gpkg").name)
                incr += 1
                L.info("Creating working copy at %s", wc_path)
                r = cli_runner.invoke(
                    ["create-workingcopy", wc_path, "--delete-existing"])
                assert r.exit_code == 0, r.stderr

            del repo

            L.info("data_working_copy: %s %s", repo_dir, wc_path)
            yield repo_dir, wc_path
예제 #20
0
def test_apply_with_no_working_copy(data_archive, cli_runner):
    patch_filename = "updates-only.kartpatch"
    message = "Change the Coromandel"
    author = {"name": "Someone", "time": 1561040913, "offset": 60}
    with data_archive("points") as repo_dir:
        patch_path = patches / patch_filename
        r = cli_runner.invoke(["apply", patch_path])
        assert r.exit_code == 0, r.stderr

        repo = KartRepo(repo_dir)
        commit = repo.head_commit

        # the author details all come from the patch, including timestamp
        assert commit.message == message
        assert commit.author.name == author["name"]
        assert commit.author.time == author["time"]
        assert commit.author.offset == author["offset"]

        # the committer timestamp doesn't come from the patch
        assert commit.committer.time > commit.author.time
        bits = r.stdout.split()
        assert bits[0] == "Commit"

        # Check that the `kart create-patch` output is the same as our original patch file had.
        r = cli_runner.invoke(["create-patch", "HEAD"])
        assert r.exit_code == 0, r.stderr
        patch = json.loads(r.stdout)
        original_patch = json.load(patch_path.open("r", encoding="utf-8"))

        assert patch["kart.patch/v1"] == original_patch["kart.patch/v1"]
        assert patch["kart.diff/v1+hexwkb"] == original_patch["kart.diff/v1+hexwkb"]
def test_empty_geometry_roundtrip(data_archive, cli_runner, new_sqlserver_db_schema):
    with data_archive("empty-geometry") as repo_path:
        repo = KartRepo(repo_path)
        H.clear_working_copy()

        with new_sqlserver_db_schema() as (sqlserver_url, sqlserver_schema):
            repo.config["kart.workingcopy.location"] = sqlserver_url
            r = cli_runner.invoke(["checkout"])

            with repo.working_copy.session() as sess:
                # We don't diff values unless they're marked as dirty in the WC - move the row to make it dirty.
                sess.execute(
                    f'UPDATE {sqlserver_schema}.point_test SET "PK"="PK" + 1000;'
                )
                sess.execute(
                    f'UPDATE {sqlserver_schema}.point_test SET "PK"="PK" - 1000;'
                )
                sess.execute(
                    f'UPDATE {sqlserver_schema}.polygon_test SET "PK"="PK" + 1000;'
                )
                sess.execute(
                    f'UPDATE {sqlserver_schema}.polygon_test SET "PK"="PK" - 1000;'
                )

            # If values roundtripping code isn't working for certain types,
            # we could get spurious diffs on those values.
            r = cli_runner.invoke(["diff", "--exit-code"])
            assert r.exit_code == 0, r.stdout
def test_init_import(
    existing_schema,
    new_sqlserver_db_schema,
    data_archive,
    tmp_path,
    cli_runner,
):
    """ Import the GeoPackage (eg. `kx-foo-layer.gpkg`) into a Kart repository. """
    repo_path = tmp_path / "repo"
    repo_path.mkdir()

    with data_archive("gpkg-points") as data:
        with new_sqlserver_db_schema(create=existing_schema) as (
            sqlserver_url,
            sqlserver_schema,
        ):
            r = cli_runner.invoke(
                [
                    "init",
                    "--import",
                    f"gpkg:{data / 'nz-pa-points-topo-150k.gpkg'}",
                    str(repo_path),
                    f"--workingcopy-path={sqlserver_url}",
                ]
            )
            assert r.exit_code == 0, r.stderr
            assert (repo_path / ".kart" / "HEAD").exists()

            repo = KartRepo(repo_path)
            wc = repo.working_copy
            assert wc.status() & WorkingCopyStatus.INITIALISED
            assert wc.status() & WorkingCopyStatus.HAS_DATA

            assert wc.location == sqlserver_url
예제 #23
0
def test_fsck(data_working_copy, cli_runner):
    with data_working_copy("points") as (repo_path, wc):
        repo = KartRepo(repo_path)

        r = cli_runner.invoke(["fsck"])
        assert r.exit_code == 0, r.stdout

        with repo.working_copy.session() as sess:
            assert H.row_count(sess, H.POINTS.LAYER) == H.POINTS.ROWCOUNT
            assert H.row_count(sess, "gpkg_kart_track") == 0

            # introduce a feature mismatch
            sess.execute(
                f"UPDATE {H.POINTS.LAYER} SET name='fred' WHERE fid=1;")
            sess.execute("""DELETE FROM "gpkg_kart_track" WHERE pk='1';""")

        r = cli_runner.invoke(["fsck"])
        assert r.exit_code == 1, r

        r = cli_runner.invoke(
            ["fsck", "--reset-dataset=nz_pa_points_topo_150k"])
        assert r.exit_code == 0, r

        with repo.working_copy.session() as sess:
            assert H.row_count(sess, H.POINTS.LAYER) == H.POINTS.ROWCOUNT
            assert H.row_count(sess, "gpkg_kart_track") == 0

        r = cli_runner.invoke(["fsck"])
        assert r.exit_code == 0, r
예제 #24
0
def test_merge_fastforward(data, data_working_copy, cli_runner, insert,
                           request):
    with data_working_copy(data.ARCHIVE) as (repo_path, wc):
        repo = KartRepo(repo_path)
        # new branch
        r = cli_runner.invoke(["checkout", "-b", "changes"])
        assert r.exit_code == 0, r
        assert repo.head.name == "refs/heads/changes"

        h = repo.head.target.hex

        # make some changes
        with repo.working_copy.session() as sess:
            insert(sess)
            insert(sess)
            commit_id = insert(sess)

        H.git_graph(request, "pre-merge")
        assert repo.head.target.hex == commit_id

        r = cli_runner.invoke(["checkout", "main"])
        assert r.exit_code == 0, r
        assert repo.head.target.hex != commit_id

        r = cli_runner.invoke(["merge", "--ff-only", "changes"])
        assert r.exit_code == 0, r

        H.git_graph(request, "post-merge")

        assert repo.head.name == "refs/heads/main"
        assert repo.head.target.hex == commit_id
        c = repo.head_commit
        assert len(c.parents) == 1
        assert c.parents[0].parents[0].parents[0].hex == h
예제 #25
0
def test_merge_state_lock(data_archive, cli_runner):
    with data_archive("conflicts/points.tgz") as repo_path:
        repo = KartRepo(repo_path)
        # Repo state: normal
        # kart checkout works, but kart conflicts and kart resolve do not.
        assert repo.state == KartRepoState.NORMAL

        r = cli_runner.invoke(["checkout", "ours_branch"])
        assert r.exit_code == SUCCESS
        r = cli_runner.invoke(["conflicts"])
        assert r.exit_code == INVALID_OPERATION
        r = cli_runner.invoke(["resolve", "dummy_conflict", "--with=delete"])
        assert r.exit_code == INVALID_OPERATION

        r = cli_runner.invoke(["merge", "theirs_branch"])
        assert r.exit_code == SUCCESS

        # Repo state: merging
        assert repo.state == KartRepoState.MERGING

        # kart checkout is locked, but kart conflicts and kart resolve work.
        r = cli_runner.invoke(["checkout", "ours_branch"])
        assert r.exit_code == INVALID_OPERATION
        r = cli_runner.invoke(["conflicts"])
        assert r.exit_code == SUCCESS
        r = cli_runner.invoke(["resolve", "dummy_conflict", "--with=delete"])
        assert r.exit_code == NO_CONFLICT  # "dummy_conflict" is not a real conflict
예제 #26
0
def test_walk_tree_3(data_archive):
    with data_archive("points"):
        r = KartRepo(".")
        root_tree = r.head_tree

        for i, (tree, path, dirs,
                blobs) in enumerate(walk_tree(root_tree, "root",
                                              topdown=False)):
            # print(i, tree, path, dirs, blobs)
            if i == 0:
                assert path == "/".join([
                    "root",
                    "nz_pa_points_topo_150k",
                    ".table-dataset",
                    "feature",
                    "A",
                    "A",
                    "A",
                    "A",
                ])
                assert tree == (root_tree / "nz_pa_points_topo_150k" /
                                ".table-dataset" / "feature" / "A" / "A" /
                                "A" / "A")
                assert dirs == []
                assert blobs[0:5] == ["kQ0=", "kQ4=", "kQ8=", "kQE=", "kQI="]
            elif i == 1:
                assert path == "/".join([
                    "root",
                    "nz_pa_points_topo_150k",
                    ".table-dataset",
                    "feature",
                    "A",
                    "A",
                    "A",
                    "B",
                ])
                assert tree == (root_tree / "nz_pa_points_topo_150k" /
                                ".table-dataset" / "feature" / "A" / "A" /
                                "A" / "B")
                assert dirs == []
                assert blobs[0:5] == ["kU0=", "kU4=", "kU8=", "kUA=", "kUE="]
            elif i == 34:
                assert path == "/".join([
                    "root",
                    "nz_pa_points_topo_150k",
                    ".table-dataset",
                    "feature",
                    "A",
                    "A",
                    "A",
                ])
                assert tree == (root_tree / "nz_pa_points_topo_150k" /
                                ".table-dataset" / "feature" / "A" / "A" / "A")
                assert "".join(dirs) == "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefgh"
                assert blobs == []

        o = subprocess.check_output(["git", "ls-tree", "-r", "-d", "HEAD"])
        count = len(o.splitlines())
        assert i == count
예제 #27
0
def test_pk_conflict_due_to_spatial_filter(data_archive, cli_runner, insert,
                                           edit_points):
    with data_archive("points.tgz") as repo_path:
        repo = KartRepo(repo_path)
        H.clear_working_copy()
        repo.config["kart.spatialfilter.geometry"] = SPATIAL_FILTER_GEOMETRY[
            "points"]
        repo.config["kart.spatialfilter.crs"] = SPATIAL_FILTER_CRS["points"]

        r = cli_runner.invoke(["checkout", "main"])
        assert r.exit_code == 0, r.stderr
        head_tree_id = repo.head_tree.id

        with repo.working_copy.session() as sess:
            assert H.row_count(sess, H.POINTS.LAYER) == 302
            # Both of these new features are outside the spatial filter.
            # One of them - PK=1 - is a conflict with an existing feature (that is outside the spatial filter).
            insert(sess, commit=False, with_pk=1)
            insert(sess, commit=False, with_pk=98001)
            assert H.row_count(sess, H.POINTS.LAYER) == 304

        r = cli_runner.invoke(["status", "-o", "json"])
        assert r.exit_code == 0, r.stderr
        change_status = json.loads(
            r.stdout)["kart.status/v1"]["workingCopy"]["changes"]
        feature_changes = change_status["nz_pa_points_topo_150k"]["feature"]
        assert feature_changes == {"inserts": 1, "primaryKeyConflicts": 1}

        r = cli_runner.invoke(["status"])
        assert r.exit_code == 0, r.stderr
        assert "1 inserts" in r.stdout
        assert "1 primary key conflicts" in r.stdout

        r = cli_runner.invoke(["diff"])
        assert r.exit_code == 0
        assert "Warning: " in r.stderr
        assert (
            "In dataset nz_pa_points_topo_150k the conflicting primary key values are: 1"
            in r.stderr)

        r = cli_runner.invoke(["commit", "-m", "test"])
        assert r.exit_code == SPATIAL_FILTER_PK_CONFLICT
        assert (
            "In dataset nz_pa_points_topo_150k the conflicting primary key values are: 1"
            in r.stderr)
        assert "Aborting commit due to conflicting primary key values" in r.stderr
        assert repo.head_tree.id == head_tree_id

        r = cli_runner.invoke(["commit", "-m", "test", "--allow-pk-conflicts"])
        assert r.exit_code == 0
        assert repo.head_tree.id != head_tree_id

        assert (
            "Removing 2 features from the working copy that no longer match the spatial filter..."
            in r.stdout)

        with repo.working_copy.session() as sess:
            assert H.row_count(sess, H.POINTS.LAYER) == 302
예제 #28
0
def test_init_import_detached_head(data_working_copy, data_archive, chdir,
                                   cli_runner):
    with data_working_copy("points") as (repo_path, wcdb):
        with data_archive("gpkg-polygons") as source_path, chdir(repo_path):
            r = cli_runner.invoke(["checkout", "HEAD^"])
            repo = KartRepo(repo_path)
            assert repo.head_is_detached
            initial_head = repo.head.target.hex

            r = cli_runner.invoke([
                "import",
                f"GPKG:{source_path / 'nz-waca-adjustments.gpkg'}",
                H.POLYGONS.LAYER,
            ])
            assert r.exit_code == 0, r
            assert repo.head_is_detached
            assert repo.head.target.hex != initial_head
            assert repo.revparse_single("HEAD^").hex == initial_head
예제 #29
0
def test_merge_index_roundtrip(data_archive, cli_runner):
    # Difficult to create conflict indexes directly - easier to create them by doing a merge:
    with data_archive("conflicts/polygons.tgz") as repo_path:
        repo = KartRepo(repo_path)
        ancestor = CommitWithReference.resolve(repo, "ancestor_branch")
        ours = CommitWithReference.resolve(repo, "ours_branch")
        theirs = CommitWithReference.resolve(repo, "theirs_branch")

        ancestor_id = repo.merge_base(ours.id, theirs.id)
        assert ancestor_id.hex == ancestor.id.hex

        index = repo.merge_trees(ancestor.tree, ours.tree, theirs.tree)
        assert index.conflicts

        # Create a MergeIndex object, and roundtrip it into a tree and back.
        orig = MergeIndex.from_pygit2_index(index)
        assert len(orig.entries) == 237
        assert len(orig.conflicts) == 4
        assert len(orig.resolves) == 0
        assert len(orig.unresolved_conflicts) == 4

        orig.write("test.conflict.index")
        r1 = MergeIndex.read("test.conflict.index")
        assert r1 is not orig
        assert r1 == orig

        # Simulate resolving some conflicts:
        items = list(r1.conflicts.items())
        key, conflict = items[0]
        # Resolve conflict 0 by accepting our version.
        r1.add_resolve(key, [conflict.ours])
        # Resolve conflict 1 by deleting it entirely.
        key, conflict = items[1]
        r1.add_resolve(key, [])
        assert r1 != orig
        assert len(r1.entries) == 237
        assert len(r1.conflicts) == 4
        assert len(r1.resolves) == 2
        assert len(r1.unresolved_conflicts) == 2

        # Roundtrip again
        r1.write("test.conflict.index")
        r2 = MergeIndex.read("test.conflict.index")
        assert r2 == r1
예제 #30
0
def test_status_merging(data_archive, cli_runner):
    with data_archive("conflicts/points.tgz") as repo_path:
        repo = KartRepo(repo_path)
        r = cli_runner.invoke(["merge", "theirs_branch"])
        assert r.exit_code == 0, r

        assert repo.state == KartRepoState.MERGING
        assert text_status(cli_runner) == [
            "On branch ours_branch",
            "",
            'Repository is in "merging" state.',
            'Merging branch "theirs_branch" into ours_branch',
            "Conflicts:",
            "",
            "nz_pa_points_topo_150k:",
            "    nz_pa_points_topo_150k:feature: 4 conflicts",
            "",
            "View conflicts with `kart conflicts` and resolve them with `kart resolve`.",
            "Once no conflicts remain, complete this merge with `kart merge --continue`.",
            "Or use `kart merge --abort` to return to the previous state.",
        ]

        ancestor = CommitWithReference.resolve(repo, "ancestor_branch")
        ours = CommitWithReference.resolve(repo, "ours_branch")
        theirs = CommitWithReference.resolve(repo, "theirs_branch")
        assert json_status(cli_runner) == {
            "kart.status/v1": {
                "abbrevCommit": ours.short_id,
                "commit": ours.id.hex,
                "branch": "ours_branch",
                "upstream": None,
                "state": "merging",
                "merging": {
                    "ancestor": {
                        "abbrevCommit": ancestor.short_id,
                        "commit": ancestor.id.hex,
                    },
                    "ours": {
                        "abbrevCommit": ours.short_id,
                        "commit": ours.id.hex,
                        "branch": "ours_branch",
                    },
                    "theirs": {
                        "abbrevCommit": theirs.short_id,
                        "commit": theirs.id.hex,
                        "branch": "theirs_branch",
                    },
                },
                "conflicts": {
                    "nz_pa_points_topo_150k": {
                        "feature": 4
                    }
                },
                "spatialFilter": None,
            }
        }