Ejemplo n.º 1
0
def debug_index(repo, arg):
    """
    Use kart spatial-filter index --debug=OBJECT to learn more about how a particular object is being indexed.
    Usage:
        --debug=COMMIT:DATASET_PATH:FEATURE_OID
        --debug=COMMIT:DATASET_PATH:FEATURE_PRIMARY_KEY
        --debug=HEX_ENCODED_BINARY_ENVELOPE
        --debug=W,S,E,N  (4 floats)
    """
    from kart.promisor_utils import object_is_promised

    if ":" in arg:
        _debug_feature(repo, arg)
    elif "," in arg:
        _debug_envelope(arg)
    elif all(c in "0123456789abcdefABCDEF" for c in arg):
        try:
            _ = repo[arg]
        except KeyError as e:
            if object_is_promised(e):
                raise InvalidOperation("Can't index promised object")
            _debug_encoded_envelope(arg)
        else:
            _debug_feature(repo, arg)
    elif arg.startswith('b"') or arg.startswith("b'"):
        _debug_encoded_envelope(arg)
    else:
        raise click.UsageError(debug_index.__doc__)
Ejemplo n.º 2
0
def _convert_tile_to_copc_lfs_blob(source, dest):
    """
    Converts a LAS/LAZ file of some sort as source to a COPC.LAZ file at dest.
    Returns the SHA256 and length of the COPC.LAZ file.
    """
    # Note that this requires a cutting edge PDAL - e.g. 63eb89ab3f504e8af7259bf60c8158363c99b6e3.
    import pdal

    config = [
        {
            "type": "readers.las",
            "filename": str(source),
        },
        {
            "type": "writers.copc",
            "filename": str(dest),
            "forward": "all",
        },
    ]
    pipeline = pdal.Pipeline(json.dumps(config))
    try:
        pipeline.execute()
    except RuntimeError as e:
        raise InvalidOperation(f"Error converting {source}\n{e}",
                               exit_code=INVALID_FILE_FORMAT)
    assert dest.is_file()

    return get_hash_and_size_of_file(dest)
Ejemplo n.º 3
0
    def check_valid_creation_location(cls, wc_location, repo):
        cls.check_valid_location(wc_location, repo)

        gpkg_path = (repo.workdir_path / wc_location).resolve()
        if gpkg_path.exists():
            desc = "path" if gpkg_path.is_dir() else "GPKG file"
            raise InvalidOperation(
                f"Error creating GPKG working copy at {wc_location} - {desc} already exists"
            )
Ejemplo n.º 4
0
def _non_homogenous_error(attribute_name, detail):
    if not isinstance(detail, str):
        detail = " vs ".join(str(d) for d in detail)

    click.echo()  # Go to next line to get past the progress output.
    click.echo("Only the import of homogenous datasets is supported.",
               err=True)
    click.echo(f"The input files have more than one {attribute_name}:",
               err=True)
    click.echo(detail, err=True)
    raise InvalidOperation("Non-homogenous dataset supplied",
                           exit_code=INVALID_FILE_FORMAT)
Ejemplo n.º 5
0
def ensure_supported_repo_wide_version(version):
    from kart.cli import get_version

    if not MIN_SUPPORTED_VERSION <= version <= MAX_SUPPORTED_VERSION:
        message = (
            f"This Kart repo uses Datasets v{version}, "
            f"but Kart {get_version()} only supports Datasets {SUPPORTED_VERSION_DESC}.\n"
        )
        if version < MIN_SUPPORTED_VERSION and version >= MIN_RECOGNIZED_VERSION:
            message += "Use `kart upgrade SOURCE DEST` to upgrade this repo to the supported version."
        else:
            message += "Get the latest version of Kart to work with this repo."
        raise InvalidOperation(message, exit_code=UNSUPPORTED_VERSION)
Ejemplo n.º 6
0
 def get_geometry_transform(self, target_crs):
     """
     Find the transform to reproject this dataset into the target CRS.
     Returns None if the CRS for this dataset is unknown.
     """
     crs_definition = self.get_crs_definition()
     if crs_definition is None:
         return None
     try:
         src_crs = crs_util.make_crs(crs_definition)
         return osr.CoordinateTransformation(src_crs, target_crs)
     except RuntimeError as e:
         raise InvalidOperation(
             f"Can't reproject dataset {self.path!r} into target CRS: {e}")
Ejemplo n.º 7
0
    def ensure_only_supported_capabilities(self):
        # TODO - loosen this restriction. A dataset with capabilities that we don't support should (at worst) be treated
        # the same as any other unsupported dataset.
        capabilities = self.get_meta_item("capabilities.json", missing_ok=True)
        if capabilities is not None:
            from .cli import get_version
            from .output_util import dump_json_output

            click.echo(
                f"The dataset at {self.path} requires the following capabilities which Kart {get_version()} does not support:",
                err=True,
            )
            dump_json_output(capabilities, sys.stderr)
            raise InvalidOperation(
                "Download the latest Kart to work with this dataset",
                exit_code=UNSUPPORTED_VERSION,
            )
Ejemplo n.º 8
0
def _debug_feature(repo, arg):
    from kart.promisor_utils import object_is_promised

    parts = arg.split(":", maxsplit=2)
    if len(parts) < 3:
        raise click.UsageError(
            "--debug=FEATURE_OID is not supported - try --debug=COMMIT:DATASET_PATH:FEATURE_OID"
        )

    commit_id, ds_path, pk = parts
    commit_id = repo[commit_id].peel(pygit2.Commit).id.hex
    ds = repo.datasets(commit_id)[ds_path]

    try:
        _ = repo[pk]
    except KeyError as e:
        if object_is_promised(e):
            raise InvalidOperation("Can't index promised object")
        path = ds.encode_pks_to_path(ds.schema.sanitise_pks(pk), relative=True)
        feature_oid = ds.get_blob_at(path).id.hex
    else:
        # Actually this is a feature_oid
        feature_oid = pk

    trunc = _truncate_oid(repo)
    feature_desc = f"{commit_id[:trunc]}:{ds_path}:{feature_oid[:trunc]}"
    click.echo(f"Feature {feature_desc}")

    crs_helper = CrsHelper(repo)
    transforms = crs_helper.transforms_for_dataset_at_commit(
        ds_path, commit_id, verbose=True
    )

    geometry = get_geometry(repo, repo[feature_oid])
    envelope = _get_envelope_for_indexing_verbose(geometry, transforms, feature_oid)

    if envelope is not None:
        click.echo()
        click.echo(f"Final envelope: {envelope}")
        _debug_envelope(envelope)
Ejemplo n.º 9
0
def feature_as_geojson(
    row,
    pk_value,
    ds_path=None,
    change_type=None,
    geometry_transform=None,
):
    """
    Turns a row into a dict representing a GeoJSON feature.
    """
    change_id = str(pk_value)
    if ds_path:
        change_id = f"{ds_path}:feature:{pk_value}:{change_type}"
    f = {
        "type": "Feature",
        "geometry": None,
        "properties": {},
        "id": change_id,
    }

    for k in row.keys():
        v = row[k]
        if isinstance(v, Geometry):
            g = v.to_ogr()
            if geometry_transform is not None:
                # reproject
                try:
                    g.Transform(geometry_transform)
                except RuntimeError as e:
                    raise InvalidOperation(
                        f"Can't reproject geometry at '{change_id}' into target CRS"
                    ) from e
            json_str = g.ExportToJson()
            f["geometry"] = json.loads(json_str) if json_str else None
        elif isinstance(v, bytes):
            f["properties"][k] = bytes.hex(v)
        else:
            f["properties"][k] = v

    return f
Ejemplo n.º 10
0
def feature_as_json(row, pk_value, geometry_transform=None):
    """
    Turns a row into a dict for serialization as JSON.
    The geometry is serialized as hexWKB.
    """
    for k, v in row.items():
        if isinstance(v, Geometry):
            if geometry_transform is None:
                v = v.to_hex_wkb()
            else:
                # reproject
                ogr_geom = v.to_ogr()
                try:
                    ogr_geom.Transform(geometry_transform)
                except RuntimeError as e:
                    raise InvalidOperation(
                        f"Can't reproject geometry with ID '{pk_value}' into target CRS"
                    ) from e
                v = ogr_to_hex_wkb(ogr_geom)
        elif isinstance(v, bytes):
            v = bytes.hex(v)
        yield k, v
Ejemplo n.º 11
0
def build_annotations(ctx, all_reachable):
    """
    Builds annotations against commits; stores the annotations in a sqlite database.

    If --all-reachable is not specified, commits hashes or refs should be supplied on stdin.
    """
    repo = ctx.obj.repo
    if all_reachable:
        click.echo("Enumerating reachable commits...")
        commits = list(gen_reachable_commits(repo))
    else:
        if sys.stdin.isatty():
            # don't just hang silently if a user typed this in an interactive shell without piping stdin
            click.echo("Reading commit hashes from stdin...")
        commits = list(
            repo.revparse_single(line.strip()).peel(pygit2.Commit)
            for line in sys.stdin
            if line.strip()
        )
    if commits:
        with annotations_session(repo) as session:
            if not is_db_writable(session):
                # not much point in this command if it can't write to the db
                raise InvalidOperation(
                    "Annotations database is readonly; can't continue"
                )
            click.echo("Building feature change counts...")
            for i, commit in enumerate(commits):
                click.echo(
                    f"({i+1}/{len(commits)}): {commit.short_id} {commit.message.splitlines()[0]}"
                )
                estimate_diff_feature_counts(
                    repo,
                    commit.parents[0] if commit.parents else repo.empty_tree,
                    commit,
                    accuracy="exact",
                )
    click.echo("done.")
Ejemplo n.º 12
0
    def apply_meta_diff(self,
                        meta_diff,
                        object_builder,
                        *,
                        resolve_missing_values_from_ds=False):
        """Apply a meta diff to this dataset. Checks for conflicts."""
        if not meta_diff:
            return
        self._check_meta_diff_is_commitable(meta_diff)

        has_conflicts = False

        # Apply diff to hidden meta items folder: <dataset>/.table-dataset/meta/<item-name>
        with object_builder.chdir(f"{self.inner_path}/{self.META_PATH}"):
            has_conflicts |= self._apply_meta_deltas_to_tree(
                (d for d in meta_diff.values()
                 if d.key not in self.ATTACHMENT_META_ITEMS),
                object_builder,
                self.meta_tree if self.inner_tree is not None else None,
                resolve_missing_values_from_ds=resolve_missing_values_from_ds,
            )

        # Apply diff to visible attachment meta items: <dataset>/<item-name>
        with object_builder.chdir(self.path):
            has_conflicts |= self._apply_meta_deltas_to_tree(
                (d for d in meta_diff.values()
                 if d.key in self.ATTACHMENT_META_ITEMS),
                object_builder,
                self.attachment_tree,
                resolve_missing_values_from_ds=resolve_missing_values_from_ds,
            )

        if has_conflicts:
            raise InvalidOperation(
                "Patch does not apply",
                exit_code=PATCH_DOES_NOT_APPLY,
            )
Ejemplo n.º 13
0
def upgrade(ctx, source, dest, in_place):
    """
    Upgrade a repository for an earlier version of Kart to be compatible with the latest version.
    The current repository structure of Kart is known as Datasets V2, which is used from kart/Kart 0.5 onwards.

    Usage:
    kart upgrade SOURCE DEST
    """
    source = Path(source)
    dest = Path(dest)

    if in_place:
        dest = source

    if not in_place and dest.exists() and any(dest.iterdir()):
        raise InvalidOperation(f'"{dest}" isn\'t empty', param_hint="DEST")

    try:
        source_repo = KartRepo(source)
    except NotFound:
        raise click.BadParameter(
            f"'{source}': not an existing Kart repository",
            param_hint="SOURCE")

    source_version = source_repo.table_dataset_version
    if source_version == DEFAULT_NEW_REPO_VERSION:
        raise InvalidOperation(
            f"Cannot upgrade: source repository is already at latest known version (Datasets V{source_version})"
        )

    if source_version > DEFAULT_NEW_REPO_VERSION:
        # Repo is too advanced for this version of Kart to understand, we can't upgrade it.
        # This prints a good error messsage explaining the whole situation.
        source_repo.ensure_supported_version()

    source_dataset_class = dataset_class_for_legacy_version(
        source_version, in_place)

    if not source_dataset_class:
        raise InvalidOperation(
            f"Unrecognised source repository version: {source_version}")

    # action!
    if in_place:
        dest_repo = ForceLatestVersionRepo(dest)
    else:
        click.secho(f"Initialising {dest} ...", bold=True)
        dest.mkdir()
        dest_repo = KartRepo.init_repository(dest,
                                             wc_location=None,
                                             bare=source_repo.is_bare)

    # walk _all_ references
    source_walker = source_repo.walk(
        None, pygit2.GIT_SORT_TOPOLOGICAL | pygit2.GIT_SORT_REVERSE)
    for ref in source_repo.listall_reference_objects():
        source_walker.push(ref.resolve().target)

    commit_map = {}

    click.secho("\nWriting new commits ...", bold=True)
    i = -1
    for i, source_commit in enumerate(source_walker):
        dest_parents = []
        for parent_id in source_commit.parent_ids:
            try:
                dest_parents.append(commit_map[parent_id.hex])
            except KeyError:
                raise ValueError(
                    f"Commit {i} ({source_commit.id}): Haven't seen parent ({parent_id})"
                )

        _upgrade_commit(
            ctx,
            i,
            source_repo,
            source_commit,
            source_dataset_class,
            dest_parents,
            dest_repo,
            commit_map,
        )

    click.echo(f"{i+1} commits processed.")

    click.secho("\nUpdating references ...", bold=True)
    for ref in source_repo.listall_reference_objects():
        if ref.type == pygit2.GIT_REF_OID:
            # real references
            target = commit_map[ref.target.hex]
            dest_repo.references.create(ref.name, target, True)  # overwrite
            click.echo(f"  {ref.name} ({ref.target.hex[:8]} → {target[:8]})")

    for ref in source_repo.listall_reference_objects():
        if ref.type == pygit2.GIT_REF_SYMBOLIC:
            dest_repo.references.create(ref.name, ref.target)
            click.echo(f"  {ref.name} → {ref.target}")

    if i >= 0:
        if source_repo.head_is_detached:
            dest_repo.set_head(
                pygit2.Oid(hex=commit_map[source_repo.head.target.hex]))
        else:
            dest_repo.set_head(source_repo.head.name)

        click.secho("\nCompacting repository ...", bold=True)
        if in_place:
            # old reflogs will refer to old objects, which prevents them from getting gc'd.
            # so we clear out the old reflogs here.
            # this *does* mean you can't go back, hence the 'irreversible' in the --in-place help.
            dest_repo.invoke_git("reflog", "expire",
                                 "--expire-unreachable=now", "--all")

        dest_repo.gc("--prune=now")

    if source_repo.workingcopy_location:
        click.secho("\nCreating working copy ...", bold=True)
        subctx = click.Context(ctx.command, parent=ctx)
        subctx.ensure_object(context.Context)
        subctx.obj.user_repo_path = str(dest)
        subctx.invoke(checkout.create_workingcopy)

    if in_place:
        dest_repo.config[KartConfigKeys.KART_REPOSTRUCTURE_VERSION] = str(
            DEFAULT_NEW_REPO_VERSION)

    click.secho("\nUpgrade complete", fg="green", bold=True)
Ejemplo n.º 14
0
    def apply_feature_diff(
        self,
        feature_diff,
        object_builder,
        *,
        schema=None,
        resolve_missing_values_from_ds=None,
    ):
        """Applies a feature diff."""
        if not feature_diff:
            return

        schema_changed_since_patch = False
        if resolve_missing_values_from_ds is not None:
            schema_changed_since_patch = (resolve_missing_values_from_ds.schema
                                          != self.schema)

        with object_builder.chdir(self.inner_path):
            # Applying diffs works even if there is no tree yet created for the dataset,
            # as is the case when the dataset is first being created right now.
            tree = self.inner_tree or ()

            encode_kwargs = {}
            if schema is not None:
                encode_kwargs = {"schema": schema}

            has_conflicts = False
            for delta in feature_diff.values():
                old_key = delta.old_key
                new_key = delta.new_key
                old_path = (self.encode_1pk_to_path(old_key, relative=True)
                            if old_key is not None else None)
                new_path = (self.encode_1pk_to_path(new_key, relative=True)
                            if new_key is not None else None)

                # Conflict detection
                if delta.type == "delete" and old_path not in tree:
                    has_conflicts = True
                    click.echo(
                        f"{self.path}: Trying to delete nonexistent feature: {old_key}",
                        err=True,
                    )
                    continue

                if delta.type == "insert" and new_path in tree:
                    if self.check_feature_insertion_for_conflicts(
                            delta,
                            new_path=new_path,
                            schema_changed_since_patch=
                            schema_changed_since_patch,
                            resolve_missing_values_from_ds=
                            resolve_missing_values_from_ds,
                    ):
                        has_conflicts = True
                        continue

                if delta.type == "update" and old_path not in tree:
                    has_conflicts = True
                    click.echo(
                        f"{self.path}: Trying to update nonexistent feature: {old_key}",
                        err=True,
                    )
                    continue

                if (delta.type == "update"
                        and self.get_feature(old_key) != delta.old_value):
                    has_conflicts = True
                    click.echo(
                        f"{self.path}: Trying to update already-changed feature: {old_key}",
                        err=True,
                    )
                    continue

                # Actually write the feature diff:
                if old_path and old_path != new_path:
                    object_builder.remove(old_path)
                if delta.new_value:
                    path, data = self.encode_feature(delta.new.value,
                                                     relative=True,
                                                     **encode_kwargs)
                    object_builder.insert(path, data)

            if has_conflicts:
                raise InvalidOperation(
                    "Patch does not apply",
                    exit_code=PATCH_DOES_NOT_APPLY,
                )
Ejemplo n.º 15
0
def point_cloud_import(ctx, convert_to_copc, ds_path, sources):
    """
    Experimental command for importing point cloud datasets. Work-in-progress.
    Will eventually be merged with the main `import` command.

    SOURCES should be one or more LAZ or LAS files (or wildcards that match multiple LAZ or LAS files).
    """
    import pdal

    repo = ctx.obj.repo

    # TODO - improve path validation to make sure datasets of any type don't collide with each other
    # or with attachments.
    validate_dataset_paths([ds_path])

    for source in sources:
        if not (Path() / source).is_file():
            raise NotFound(f"No data found at {source}",
                           exit_code=NO_IMPORT_SOURCE)

    compressed_set = ListBasedSet()
    version_set = ListBasedSet()
    copc_version_set = ListBasedSet()
    pdrf_set = ListBasedSet()
    pdr_length_set = ListBasedSet()
    crs_set = ListBasedSet()
    transform = None
    schema = None
    crs_name = None

    per_source_info = {}

    for source in sources:
        click.echo(f"Checking {source}...          \r", nl=False)
        config = [{
            "type": "readers.las",
            "filename": source,
            "count": 0,  # Don't read any individual points.
        }]
        if schema is None:
            config.append({"type": "filters.info"})

        pipeline = pdal.Pipeline(json.dumps(config))
        try:
            pipeline.execute()
        except RuntimeError:
            raise InvalidOperation(f"Error reading {source}",
                                   exit_code=INVALID_FILE_FORMAT)

        metadata = _unwrap_metadata(pipeline.metadata)

        info = metadata["readers.las"]

        compressed_set.add(info["compressed"])
        if len(compressed_set) > 1:
            raise _non_homogenous_error("filetype", "LAS vs LAZ")

        version = f"{info['major_version']}.{info['minor_version']}"
        version_set.add(version)
        if len(version_set) > 1:
            raise _non_homogenous_error("version", version_set)

        copc_version_set.add(get_copc_version(info))
        if len(copc_version_set) > 1:
            raise _non_homogenous_error("COPC version", copc_version_set)

        pdrf_set.add(info["dataformat_id"])
        if len(pdrf_set) > 1:
            raise _non_homogenous_error("Point Data Record Format", pdrf_set)

        pdr_length_set.add(info["point_length"])
        if len(pdr_length_set) > 1:
            raise _non_homogenous_error("Point Data Record Length",
                                        pdr_length_set)

        crs_set.add(info["srs"]["wkt"])
        if len(crs_set) > 1:
            raise _non_homogenous_error(
                "CRS",
                "\n vs \n".join((format_wkt_for_output(wkt, sys.stderr)
                                 for wkt in crs_set)),
            )

        if transform is None:
            transform = _make_transform_to_crs84(crs_set.only())

        native_envelope = get_native_envelope(info)
        crs84_envelope = _transform_3d_envelope(transform, native_envelope)
        per_source_info[source] = {
            "count": info["count"],
            "native_envelope": native_envelope,
            "crs84_envelope": crs84_envelope,
        }

        if schema is None:
            crs_name = get_identifier_str(crs_set.only())
            schema = metadata["filters.info"]["schema"]
            schema["CRS"] = crs_name

    click.echo()

    version = version_set.only()
    copc_version = copc_version_set.only()
    is_laz = compressed_set.only() is True
    is_copc = is_laz and copc_version != NOT_COPC

    if is_copc:
        # Keep native format.
        import_func = get_hash_and_size_of_file_while_copying
        kart_format = f"pc:v1/copc-{copc_version}.0"
    elif is_laz:
        # Optionally Convert to COPC 1.0 if requested
        import_func = (_convert_tile_to_copc_lfs_blob if convert_to_copc else
                       get_hash_and_size_of_file_while_copying)
        kart_format = "pc:v1/copc-1.0" if convert_to_copc else f"pc:v1/laz-{version}"
    else:  # LAS
        if not convert_to_copc:
            raise InvalidOperation(
                "LAS datasets are not supported - dataset must be converted to LAZ / COPC",
                exit_code=INVALID_FILE_FORMAT,
            )
        import_func = _convert_tile_to_copc_lfs_blob
        kart_format = "pc:v1/copc-1.0"

    import_ext = ".copc.laz" if "copc" in kart_format else ".laz"

    # Set up LFS hooks.
    # TODO: This could eventually be moved to `kart init`.
    if not (repo.gitdir_path / "hooks" / "pre-push").is_file():
        subprocess.check_call(
            ["git", "-C",
             str(repo.gitdir_path), "lfs", "install", "hooks"])

    # We still need to write .kart.repostructure.version unfortunately, even though it's only relevant to tabular datasets.
    assert repo.table_dataset_version in SUPPORTED_VERSIONS
    extra_blobs = (extra_blobs_for_version(repo.table_dataset_version)
                   if not repo.head_commit else [])

    header = generate_header(
        repo,
        None,
        f"Importing {len(sources)} LAZ tiles as {ds_path}",
        repo.head_branch,
        repo.head_commit,
    )

    ds_inner_path = f"{ds_path}/.point-cloud-dataset.v1"

    lfs_tmp_path = repo.gitdir_path / "lfs" / "objects" / "tmp"
    lfs_tmp_path.mkdir(parents=True, exist_ok=True)

    with git_fast_import(repo,
                         *FastImportSettings().as_args(), "--quiet") as proc:
        proc.stdin.write(header.encode("utf8"))

        for i, blob_path in write_blobs_to_stream(proc.stdin, extra_blobs):
            pass

        for source in sources:
            click.echo(f"Importing {source}...")

            tmp_object_path = lfs_tmp_path / str(uuid.uuid4())
            oid, size = import_func(source, tmp_object_path)
            actual_object_path = get_local_path_from_lfs_hash(repo, oid)
            actual_object_path.parents[0].mkdir(parents=True, exist_ok=True)
            tmp_object_path.rename(actual_object_path)

            # TODO - is this the right prefix and name?
            tilename = os.path.splitext(
                os.path.basename(source))[0] + import_ext
            tile_prefix = hexhash(tilename)[0:2]
            blob_path = f"{ds_inner_path}/tile/{tile_prefix}/{tilename}"
            info = per_source_info[source]
            pointer_dict = {
                "version": "https://git-lfs.github.com/spec/v1",
                # TODO - available.<URL-IDX> <URL>
                "kart.extent.crs84": _format_array(info["crs84_envelope"]),
                "kart.extent.native": _format_array(info["native_envelope"]),
                "kart.format": kart_format,
                "kart.pc.count": info["count"],
                "oid": f"sha256:{oid}",
                "size": size,
            }
            write_blob_to_stream(proc.stdin, blob_path,
                                 dict_to_pointer_file_bytes(pointer_dict))

        write_blob_to_stream(proc.stdin, f"{ds_inner_path}/meta/schema.json",
                             json_pack(schema))
        write_blob_to_stream(
            proc.stdin,
            f"{ds_inner_path}/meta/crs/{crs_name}.wkt",
            ensure_bytes(normalise_wkt(crs_set.only())),
        )

    click.echo("Updating working copy...")
    reset_wc_if_needed(repo)

    # TODO - fix up reset code - there should be a single function you can call that updates all working copies.
    tabular_wc = repo.get_working_copy(allow_uncreated=True)
    if tabular_wc is not None:
        tabular_wc.reset(repo.head_commit)