Beispiel #1
0
def test_import_table_meta_overrides(data_archive_readonly, tmp_path,
                                     cli_runner, chdir, geopackage):
    with data_archive_readonly("gpkg-au-census") as data:
        repo_path = tmp_path / 'emptydir'
        r = cli_runner.invoke(["init", repo_path])
        assert r.exit_code == 0
        with chdir(repo_path):
            original_xml_metadata = "<gmd:MD_Metadata xmlns:gco=\"http://www.isotc211.org/2005/gco\" xmlns:gmd=\"http://www.isotc211.org/2005/gmd\" xmlns:gml=\"http://www.opengis.net/gml\" xmlns:gts=\"http://www.isotc211.org/2005/gts\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns=\"http://www.isotc211.org/2005/gmd\" />"
            table_info_json = json.dumps({
                'census2016_sdhca_ot_ced_short': {
                    'title': 'test title',
                    'description': 'test description',
                    'xmlMetadata': original_xml_metadata,
                }
            })
            r = cli_runner.invoke([
                "import",
                data / "census2016_sdhca_ot_short.gpkg",
                "census2016_sdhca_ot_ced_short",
                "--table-info",
                table_info_json,
            ], )
            assert r.exit_code == 0, r

            cli_runner.invoke(["checkout"])

            repo = pygit2.Repository(str(repo_path))
            wc = WorkingCopy.open(repo)
            db = geopackage(wc.path)
            cur = db.cursor()
            title, description = cur.execute("""
                SELECT c.identifier, c.description
                FROM gpkg_contents c
                WHERE c.table_name = 'census2016_sdhca_ot_ced_short'
                """).fetchone()
            assert title == 'census2016_sdhca_ot_ced_short: test title'
            assert description == 'test description'

            xml_metadata = cur.execute("""
                SELECT m.metadata
                FROM gpkg_metadata m JOIN gpkg_metadata_reference r
                ON m.id = r.md_file_id
                WHERE r.table_name = 'census2016_sdhca_ot_ced_short'
                """).fetchone()[0]
            assert xml_metadata == original_xml_metadata
Beispiel #2
0
def test_checkout_workingcopy(
    version, archive, table, commit_sha, data_archive, tmp_path, cli_runner, geopackage
):
    """ Checkout a working copy to edit """
    if version == "2.0":
        archive += "2"

    with data_archive(archive) as repo_path:
        H.clear_working_copy()

        wc = tmp_path / f"{table}.gpkg"
        r = cli_runner.invoke(["checkout", f"--path={wc}"])
        assert r.exit_code == 0, r
        lines = r.stdout.splitlines()
        # TODO: restore these
        # assert re.match(fr"Checkout HEAD to .+ as GPKG \.\.\.$", lines[0])
        # assert re.match(fr"Commit: {commit_sha}$", lines[1])

        assert wc.exists()
        db = geopackage(wc)
        assert H.row_count(db, table) > 0

        repo = pygit2.Repository(str(repo_path))
        assert repo.is_bare

        assert repo.head.name == "refs/heads/master"
        assert repo.head.shorthand == "master"

        head_tree = repo.head.peel(pygit2.Tree)

        wc_tree_id = (
            db.cursor()
            .execute(
                """SELECT value FROM ".sno-meta" WHERE table_name='*' AND key='tree';"""
            )
            .fetchone()[0]
        )
        assert wc_tree_id == head_tree.hex

        wc = WorkingCopy.open(repo)
        assert wc.assert_db_tree_match(head_tree)
Beispiel #3
0
def test_import_existing_wc(
    data_archive,
    data_working_copy,
    geopackage,
    cli_runner,
    insert,
    tmp_path,
    request,
    chdir,
):
    """ Import a new dataset into a repo with an existing working copy. Dataset should get checked out """
    with data_working_copy("points") as (repo_path, wcdb):
        with data_archive("gpkg-polygons") as source_path, chdir(repo_path):
            r = cli_runner.invoke([
                "import",
                f"GPKG:{source_path / 'nz-waca-adjustments.gpkg'}",
                H.POLYGONS.LAYER,
            ])
            assert r.exit_code == 0, r

        repo = pygit2.Repository(str(repo_path))
        wc = WorkingCopy.open(repo)
        db = geopackage(wcdb)

        assert H.row_count(db, "nz_waca_adjustments") > 0

        head_tree = repo.head.peel(pygit2.Tree)
        with db:
            dbcur = db.cursor()
            dbcur.execute(
                """SELECT value FROM ".sno-meta" WHERE table_name='*' AND key='tree';"""
            )
            wc_tree_id = dbcur.fetchone()[0]
        assert wc_tree_id == head_tree.hex
        assert wc.assert_db_tree_match(head_tree)

        r = cli_runner.invoke(["status"])
        assert r.exit_code == 0, r
        assert r.stdout.splitlines(
        )[-1] == "Nothing to commit, working copy clean"

        with db:
            dbcur = db.cursor()
            dbcur.execute(
                "DELETE FROM nz_waca_adjustments WHERE rowid IN (SELECT rowid FROM nz_waca_adjustments ORDER BY id LIMIT 10);"
            )
            dbcur.execute("SELECT changes()")
            assert dbcur.fetchone()[0] == 10

        with data_archive("gpkg-polygons") as source_path, chdir(repo_path):
            r = cli_runner.invoke([
                "import",
                f"GPKG:{source_path / 'nz-waca-adjustments.gpkg'}",
                f"{H.POLYGONS.LAYER}:waca2",
            ])
            assert r.exit_code == 0, r

        assert H.row_count(db, "waca2") > 0

        head_tree = repo.head.peel(pygit2.Tree)
        with db:
            dbcur = db.cursor()
            dbcur.execute(
                """SELECT value FROM ".sno-meta" WHERE table_name='*' AND key='tree';"""
            )
            wc_tree_id = dbcur.fetchone()[0]
        assert wc_tree_id == head_tree.hex
        assert wc.assert_db_tree_match(head_tree)

        r = cli_runner.invoke(["status"])
        assert r.exit_code == 0, r
        assert r.stdout.splitlines()[-2:] == [
            "  nz_waca_adjustments/",
            "    deleted:   10 features",
        ]
Beispiel #4
0
def test_init_import_table_ogr_types(data_archive_readonly, tmp_path,
                                     cli_runner):
    with data_archive_readonly("types") as data:
        repo_path = tmp_path / "repo"
        r = cli_runner.invoke(
            ["init", "--import", data / "types.gpkg",
             str(repo_path)], )
        assert r.exit_code == 0, r

        # There's a bunch of wacky types in here, let's check them
        repo = pygit2.Repository(str(repo_path))
        wc = WorkingCopy.open(repo)
        with wc.session() as db:
            table_info = [
                dict(row)
                for row in db.cursor().execute("PRAGMA table_info('types');")
            ]
        assert table_info == [
            {
                'cid': 0,
                'name': 'fid',
                'type': 'INTEGER',
                'notnull': 1,
                'dflt_value': None,
                'pk': 1,
            },
            {
                'cid': 1,
                'name': 'int16',
                'type': 'SMALLINT',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
            {
                'cid': 2,
                'name': 'int32',
                'type': 'MEDIUMINT',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
            {
                'cid': 3,
                'name': 'int64',
                'type': 'INTEGER',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
            {
                'cid': 4,
                'name': 'boolean',
                'type': 'BOOLEAN',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
            {
                'cid': 5,
                'name': 'double',
                'type': 'REAL',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
            {
                'cid': 6,
                'name': 'float32',
                'type': 'REAL',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
            {
                'cid': 7,
                'name': 'string',
                'type': 'TEXT',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
            {
                'cid': 8,
                'name': 'blob',
                'type': 'BLOB',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
            {
                'cid': 9,
                'name': 'date',
                'type': 'DATE',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
            {
                'cid': 10,
                'name': 'datetime',
                'type': 'DATETIME',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
            {
                'cid': 11,
                'name': 'time',
                'type': 'TEXT',
                'notnull': 0,
                'dflt_value': None,
                'pk': 0,
            },
        ]