def write_config( self, wc_location=None, bare=False, spatial_filter_spec=None, table_dataset_version=None, ): # Whichever of these variable is written, controls whether this repo is kart branded or not. version_key = KartConfigKeys.BRANDED_REPOSTRUCTURE_VERSION_KEYS[ self.BRANDING_FOR_NEW_REPOS] if table_dataset_version is None: table_dataset_version = self.table_dataset_version self.config[version_key] = str(table_dataset_version) # Bare-style Kart repos are always implemented as bare git repos: if self.is_bare_style: self.config["core.bare"] = True # Force writing to reflogs: self.config["core.logAllRefUpdates"] = "always" # Write working copy config: from kart.working_copy.base import BaseWorkingCopy BaseWorkingCopy.write_config(self, wc_location, bare) if spatial_filter_spec: spatial_filter_spec.write_config(self)
def get_working_copy( self, allow_uncreated=False, allow_invalid_state=False, allow_unconnectable=False, ): from kart.working_copy.base import BaseWorkingCopy return BaseWorkingCopy.get( self, allow_uncreated=allow_uncreated, allow_invalid_state=allow_invalid_state, allow_unconnectable=allow_unconnectable, )
def checkout(ctx, new_branch, force, discard_changes, do_guess, spatial_filter_spec, refish): """ Switch branches or restore working tree files """ repo = ctx.obj.repo # refish could be: # - branch name # - tag name # - remote branch # - HEAD # - HEAD~1/etc # - 'c0ffee' commit ref # - 'refs/tags/1.2.3' some other refspec try: if refish: resolved = CommitWithReference.resolve(repo, refish) else: resolved = CommitWithReference.resolve(repo, "HEAD") except NotFound: # Guess: that the user wants create a new local branch to track a remote remote_branch = (_find_remote_branch_by_name(repo, refish) if do_guess and refish else None) if remote_branch: new_branch = refish refish = remote_branch.shorthand resolved = CommitWithReference.resolve(repo, refish) else: raise commit = resolved.commit head_ref = resolved.reference.name if resolved.reference else commit.id do_switch_commit = repo.head_commit != commit do_switch_spatial_filter = False do_refetch = False promisor_remote = None if spatial_filter_spec is not None: resolved_spatial_filter_spec = spatial_filter_spec.resolve(repo) do_switch_spatial_filter = ( not resolved_spatial_filter_spec.matches_working_copy(repo)) fetched_envelope = get_partial_clone_envelope(repo) do_refetch = (fetched_envelope and not resolved_spatial_filter_spec.is_within_envelope( fetched_envelope)) force = force or discard_changes if (do_switch_commit or do_switch_spatial_filter) and not force: ctx.obj.check_not_dirty(help_message=_DISCARD_CHANGES_HELP_MESSAGE) if new_branch and new_branch in repo.branches: raise click.BadParameter( f"A branch named '{new_branch}' already exists.", param_hint="branch") # Finished pre-flight checks - start action: if do_refetch: from .promisor_utils import get_promisor_remote spec = resolved_spatial_filter_spec.partial_clone_filter_spec() spec_desc = (f"git spatial filter extension {spec}" if "spatial" in spec else spec) click.echo( f"Fetching missing but required features for new spatial filter using {spec_desc}" ) promisor_remote = get_promisor_remote(repo) repo.invoke_git("fetch", promisor_remote, "--repair", spec) if new_branch: if _is_in_branches(refish, repo.branches.remote): click.echo( f"Creating new branch '{new_branch}' to track '{refish}'...") new_branch = repo.create_branch(new_branch, commit, force) new_branch.upstream = repo.branches.remote[refish] elif refish: click.echo( f"Creating new branch '{new_branch}' from '{refish}'...") new_branch = repo.create_branch(new_branch, commit, force) else: click.echo(f"Creating new branch '{new_branch}'...") new_branch = repo.create_branch(new_branch, commit, force) head_ref = new_branch.name from kart.working_copy.base import BaseWorkingCopy if spatial_filter_spec is not None: spatial_filter_spec.write_config(repo, update_remote=promisor_remote) BaseWorkingCopy.ensure_config_exists(repo) reset_wc_if_needed(repo, commit, discard_changes=discard_changes) repo.set_head(head_ref)
def create_workingcopy(ctx, delete_existing, discard_changes, new_wc_loc): """ Create a new working copy - if one already exists it will be deleted. Usage: kart create-workingcopy [LOCATION] LOCATION should be one of the following: - PATH.gpkg for a GPKG file. - postgresql://HOST/DBNAME/DBSCHEMA for a PostGIS database. - mssql://HOST/DBNAME/DBSCHEMA for a SQL Server database. If no location is supplied, the location from the repo config at "kart.workingcopy.location" will be used. If no location is configured, a GPKG working copy will be created with a default name based on the repository name. """ from kart.working_copy.base import BaseWorkingCopy repo = ctx.obj.repo if repo.head_is_unborn: raise InvalidOperation( "Can't create a working copy for an empty repository — first import some data with `kart import`" ) old_wc_loc = repo.workingcopy_location if not new_wc_loc and old_wc_loc is not None: new_wc_loc = old_wc_loc elif not new_wc_loc: new_wc_loc = BaseWorkingCopy.default_location(repo) if new_wc_loc != old_wc_loc: BaseWorkingCopy.check_valid_creation_location(new_wc_loc, repo) if BaseWorkingCopy.clearly_doesnt_exist(old_wc_loc, repo): old_wc_loc = None if old_wc_loc: old_wc = BaseWorkingCopy.get_at_location( repo, old_wc_loc, allow_uncreated=True, allow_invalid_state=True, allow_unconnectable=True, ) if delete_existing is None: if get_input_mode() is not InputMode.INTERACTIVE: if old_wc_loc == new_wc_loc: help_message = ( "Specify --delete-existing to delete and recreate it.") else: help_message = "Either delete it with --delete-existing, or just abandon it with --no-delete-existing." raise click.UsageError( f"A working copy is already configured at {old_wc}\n{help_message}" ) click.echo(f"A working copy is already configured at {old_wc}") delete_existing = click.confirm( "Delete the existing working copy before creating a new one?", default=True, ) check_if_dirty = not discard_changes if delete_existing is False: allow_unconnectable = old_wc_loc != new_wc_loc status = old_wc.status(allow_unconnectable=allow_unconnectable, check_if_dirty=check_if_dirty) if old_wc_loc == new_wc_loc and status & WorkingCopyStatus.WC_EXISTS: raise InvalidOperation( f"Cannot recreate working copy at same location {old_wc} if --no-delete-existing is set." ) if not discard_changes and (status & WorkingCopyStatus.DIRTY): raise InvalidOperation( f"You have uncommitted changes at {old_wc}.\n" + _DISCARD_CHANGES_HELP_MESSAGE) if delete_existing is True: try: status = old_wc.status(check_if_dirty=check_if_dirty) except DbConnectionError as e: click.echo( f"Encountered an error while trying to delete existing working copy at {old_wc}" ) click.echo( "To simply abandon the existing working copy, use --no-delete-existing." ) raise e if not discard_changes and (status & WorkingCopyStatus.DIRTY): raise InvalidOperation( f"You have uncommitted changes at {old_wc}.\n" + _DISCARD_CHANGES_HELP_MESSAGE) if status & WorkingCopyStatus.WC_EXISTS: click.echo(f"Deleting existing working copy at {old_wc}") keep_db_schema_if_possible = old_wc_loc == new_wc_loc old_wc.delete( keep_db_schema_if_possible=keep_db_schema_if_possible) BaseWorkingCopy.write_config(repo, new_wc_loc) reset_wc_if_needed(repo, repo.head_commit) # This command is used in tests and by other commands, so we have to be extra careful to # tidy up properly - otherwise, tests can fail (on Windows especially) due to PermissionError. repo.free() del repo
def clone( ctx, bare, do_checkout, wc_location, do_progress, depth, filterspec, branch, spatial_filter_spec, spatial_filter_after_clone, url, directory, ): """ Clone a repository into a new directory """ repo_path = Path(directory or get_directory_from_url(url, is_bare=bare)).resolve() if repo_path.exists() and any(repo_path.iterdir()): raise InvalidOperation(f'"{repo_path}" isn\'t empty', param_hint="directory") from kart.working_copy.base import BaseWorkingCopy BaseWorkingCopy.check_valid_creation_location(wc_location, PotentialRepo(repo_path)) if not repo_path.exists(): repo_path.mkdir(parents=True) args = ["--progress" if do_progress else "--quiet"] if depth is not None: args.append(f"--depth={depth}") if branch is not None: args.append(f"--branch={branch}") if filterspec is not None: # git itself does reasonable validation of this, so we don't bother here # e.g. "fatal: invalid filter-spec 'hello'" # for the various forms it can take, see # https://git-scm.com/docs/git-rev-list#Documentation/git-rev-list.txt---filterltfilter-specgt args.append(f"--filter={filterspec}") repo = KartRepo.clone_repository( url, repo_path, args, wc_location, bare, spatial_filter_spec=spatial_filter_spec, spatial_filter_after_clone=spatial_filter_after_clone, ) # Create working copy, if needed. head_commit = repo.head_commit if head_commit is not None and do_checkout and not bare: checkout.reset_wc_if_needed(repo, head_commit) # Experimental point-cloud datasets: if os.environ.get("X_KART_POINT_CLOUDS"): lfs_override = os.environ.get("X_KART_SET_LFS_FOR_NEW_REPOS") if lfs_override: repo.config["lfs.url"] = lfs_override repo.invoke_git("lfs", "fetch") from kart.point_cloud.checkout import reset_wc_if_needed reset_wc_if_needed(repo)
def init( ctx, message, directory, import_from, do_checkout, bare, initial_branch, wc_location, max_delta_depth, num_processes, spatial_filter_spec, ): """ Initialise a new repository and optionally import data. DIRECTORY must be empty. Defaults to the current directory. """ if directory is None: directory = os.curdir repo_path = Path(directory).resolve() if repo_path.exists() and any(repo_path.iterdir()): raise InvalidOperation(f'"{repo_path}" isn\'t empty', param_hint="directory") from kart.working_copy.base import BaseWorkingCopy BaseWorkingCopy.check_valid_creation_location(wc_location, PotentialRepo(repo_path)) if not repo_path.exists(): repo_path.mkdir(parents=True) if import_from: check_git_user(repo=None) base_source = TableImportSource.open(import_from) # Import all tables. # If you need finer grained control than this, # use `kart init` and *then* `kart import` as a separate command. tables = base_source.get_tables().keys() sources = [base_source.clone_for_table(t) for t in tables] # Create the repository repo = KartRepo.init_repository( repo_path, wc_location, bare, initial_branch=initial_branch, spatial_filter_spec=spatial_filter_spec, ) if import_from: validate_dataset_paths([s.dest_path for s in sources]) fast_import_tables( repo, sources, settings=FastImportSettings(num_processes=num_processes, max_delta_depth=max_delta_depth), from_commit=None, message=message, ) head_commit = repo.head_commit if do_checkout and not bare: checkout.reset_wc_if_needed(repo, head_commit) else: click.echo( f"Created an empty repository at {repo_path} — import some data with `kart import`" ) # Experimental point-cloud datasets: if os.environ.get("X_KART_POINT_CLOUDS"): lfs_override = os.environ.get("X_KART_SET_LFS_FOR_NEW_REPOS") if lfs_override: repo.config["lfs.url"] = lfs_override
def clone_repository( cls, clone_url, repo_root_path, clone_args, wc_location=None, bare=False, spatial_filter_spec=None, spatial_filter_after_clone=False, ): repo_root_path = repo_root_path.resolve() cls._ensure_exists_and_empty(repo_root_path) if not bare: from kart.working_copy.base import BaseWorkingCopy BaseWorkingCopy.check_valid_creation_location( wc_location, PotentialRepo(repo_root_path)) extra_args = [] is_spatial_filter_clone = False if spatial_filter_spec is not None: # Make sure we fetch any spatial filters that might exist - we need those straight away. # TODO - This is a bit magic, look into further. We might need it always - or there might be another way. extra_args = [ "-c", "remote.origin.fetch=+refs/filters/*:refs/filters/*", ] if not spatial_filter_after_clone: is_spatial_filter_clone = True partial_clone_spec = spatial_filter_spec.partial_clone_filter_spec( ) extra_args.append(partial_clone_spec) click.echo( f"Cloning using git spatial filter extension: {partial_clone_spec}", err=True, ) if bare: kart_repo = cls._clone_with_git_command( [ "git", "clone", "--bare", *extra_args, *clone_args, clone_url, str(repo_root_path), ], gitdir_path=repo_root_path, is_spatial_filter_clone=is_spatial_filter_clone, ) else: dot_kart_path = (repo_root_path if bare else repo_root_path / cls.DIRNAME_FOR_NEW_REPOS) dot_clone_path = repo_root_path / ".clone" kart_repo = cls._clone_with_git_command( [ "git", "clone", "--no-checkout", f"--separate-git-dir={dot_kart_path}", *extra_args, *clone_args, clone_url, str(dot_clone_path), ], gitdir_path=dot_kart_path, temp_workdir_path=dot_clone_path, is_spatial_filter_clone=is_spatial_filter_clone, ) kart_repo.lock_git_index() kart_repo.write_config(wc_location, bare, spatial_filter_spec) kart_repo.write_attributes() kart_repo.write_readme() kart_repo.activate() return kart_repo
def init_repository( cls, repo_root_path, wc_location=None, bare=False, initial_branch=None, spatial_filter_spec=None, ): """ Initialise a new Kart repo. A Kart repo is basically a git repo, except - - git internals are stored in .kart instead of .git (.git is a file that contains a reference to .kart, this is allowed by git) - datasets are stored in /.sno-dataset/ trees according to a particular dataset format version - see DATASETS_v2.md. But, this only matters when there are commits. At this stage they are not yet present. - there is a blob called .sno.repository.version that contains the dataset format version number - but, this written in the first commit. At this stage it is not yet present. - there is property in the repo config called kart.repostructure.version that contains the dataset format version number, which is used until the sno.repository.version blob is written. - there are extra properties in the repo config about where / how the working copy is written. - the .kart/index file has been extended to stop git messing things up - see LOCKED_EMPTY_GIT_INDEX. """ repo_root_path = repo_root_path.resolve() cls._ensure_exists_and_empty(repo_root_path) if not bare: from kart.working_copy.base import BaseWorkingCopy BaseWorkingCopy.check_valid_creation_location( wc_location, PotentialRepo(repo_root_path)) extra_args = [] if initial_branch is not None: extra_args += [f"--initial-branch={initial_branch}"] if bare: # Create bare-style repo: kart_repo = cls._create_with_git_command( [ "git", "init", "--bare", *extra_args, str(repo_root_path), ], gitdir_path=repo_root_path, ) else: # Create tidy-style repo: dot_kart_path = repo_root_path / cls.DIRNAME_FOR_NEW_REPOS dot_init_path = repo_root_path / ".init" kart_repo = cls._create_with_git_command( [ "git", "init", f"--separate-git-dir={dot_kart_path}", *extra_args, str(dot_init_path), ], gitdir_path=dot_kart_path, temp_workdir_path=dot_init_path, ) kart_repo.lock_git_index() kart_repo.write_config( wc_location, bare, spatial_filter_spec, table_dataset_version=DEFAULT_NEW_REPO_VERSION, ) kart_repo.write_attributes() kart_repo.write_readme() kart_repo.activate() return kart_repo