def __init__(self, config: Config, path: str = None) -> None: self.__config: Config = config try: if not path: self.__repo = Repo(config.root_path) else: self.__repo = Repo(path) assert not self.__repo.bare self.origin = self.__repo.remote('origin') except InvalidGitRepositoryError: log_error("Path is not a git repository. Please go to valid git repository!") sys.exit()
def get_repo_information(repo_root): gitrepo = Repo(repo_root) branch = gitrepo.active_branch commit = gitrepo.head.commit.hexsha url = gitrepo.remotes.origin.url org, repo = org_repo_from_url(url) return dict(branch=branch, commit=commit, org=org, repo=repo)
def checkout_repository(tmpdir, org, repo, branch): if branch is None: branch = 'master' path = os.path.join(tmpdir, org, repo, branch) url = '[email protected]:%s/%s.git' % (org, repo) url = 'https://github.com/%s/%s.git' % (org, repo) try: if not os.path.exists(path): checkout(path, url, branch) else: m = os.path.getmtime(path) age = time.time() - m if age < 10 * 60: pass # msg = 'Do not checkout repo if young.' # logger.debug(msg) else: # msg = 'Checkout repo of age %s.' % age # logger.debug(msg) repo = Repo(path) try: repo.remotes.origin.pull() os.utime(path, None) except: pass return path except GitCommandError as e: msg = 'Could not checkout repository %s/%s' % (org, repo) msg += '\n using url %s' % url raise_wrapped(CouldNotCheckoutRepo, e, msg, compact=True)
def pytest_generate_tests(self, metafunc): """ Generate test instances for all repositories to be checked. """ if 'github_repo' in metafunc.fixturenames: if not metafunc.config.option.org and not metafunc.config.option.repo: metafunc.parametrize( "git_repo", [Repo('.')], ) metafunc.parametrize( "github_repo", [None], ) else: metafunc.parametrize( "github_repo", self.get_repos(), ) if 'oep' in metafunc.fixturenames: metafunc.parametrize( "oep", metafunc.config.option.oep, ids=["OEP-{}".format(oep) for oep in metafunc.config.option.oep], )
def do_init(): for repository_name, repository_remote in settings.toclone.items(): git_dest = '' if repository_name in settings.inventorydirs: git_dest = settings.inventorydirs[repository_name] elif repository_name in settings.playbookdirs: git_dest = settings.playbookdirs[repository_name] elif repository_name in settings.localdirs: git_dest = settings.localdirs[repository_name] else: print( 'there is no corresponding directory defined in your config for {}' .format(repository)) if not git_dest: print('could not find git_dest') continue if os.path.isdir('{}/.git'.format(git_dest)): print('update repository {}'.format(git_dest)) git = Repo(git_dest).git git.pull() else: print('clone repository {}'.format(git_dest)) os.makedirs(git_dest, exist_ok=True) Repo.clone_from(repository_remote, git_dest)
def clone(): try: print(f'Using existing repo in {settings.REPOSITORY_DIR}') Repo(settings.REPOSITORY_DIR) except: print('Repo does not exist') Repo.clone_from(settings.GIT_REPOSITORY, settings.REPOSITORY_DIR)
def get_repo_information(repo_root): """ Returns a dictionary with fields branch, commit, org, repo Raises RepoInfoException. """ gitrepo = Repo(repo_root) try: try: branch = gitrepo.active_branch except TypeError: # TypeError: HEAD is a detached symbolic reference as it points # to '4bcaf737955277b156a5bacdd80d1805e4b8bb25' branch = None commit = gitrepo.head.commit.hexsha try: origin = gitrepo.remotes.origin except AttributeError: raise ValueError('No remote "origin".') url = origin.url except ValueError as e: msg = 'Could not get branch, commit, url. Maybe the repo is not initialized.' raise_wrapped(RepoInfoException, e, msg, compact=True) # now github can use urls that do not end in '.git' if 'github' in url and not url.endswith('.git'): url = url + '.git' try: org, repo = org_repo_from_url(url) except NotImplementedError: org, repo = None, None return dict(branch=branch, commit=commit, org=org, repo=repo)
def update(self): if not self.is_downloaded(): logging.debug(colors.red('{} is not downloaded'.format(self.name))) return try: Repo(self.path).remote().pull() logging.info(colors.green('{} has been updated'.format(self.name))) except Exception as e: logging.error(colors.red('Update failed: ' + str(e)))
def fetch_pull_repo(repo_path: str, ref_name: str) -> None: repo = Repo(repo_path) repo_name = os.path.basename(repo.working_dir) logger.info(f'Fetching {repo_name} changes') repo.remotes.origin.fetch() logger.info(f'Checkouting {repo_path} to {ref_name}') repo.git.checkout(ref_name) if check_is_branch(repo, ref_name): repo.remotes.origin.pull()
def push(repo_path: str, message="update"): ''' Instructional push to an active branch ''' repo = Repo(repo_path) uncommited = get_uncommitted(repo) modified, untracked = uncommited["modified"], uncommited["untracked"] deleted, modified = get_deleted_files(modified) all_files = modified + deleted + untracked n_all = len(all_files) # Type cumulative sum n_modified = len(modified) n_deleted = len(deleted) n_untracked = len(untracked) t_csum = np.cumsum([n_modified, n_deleted, n_untracked]) border_msg(f"Active branch: {repo.active_branch}") # Print uncommited (list by number) for i, file in enumerate(all_files): # Print header if i == 0 and (n_modified + n_deleted != 0): print("\n● Modified:") elif (i == n_modified + n_deleted) and n_untracked != 0: print("\n● Untracked:") print(f"[{i}] {file}", end="") if t_csum[0] <= i < t_csum[1]: print(" - deleted") else: print("") if n_all != 0: committed_ids = [int(i) for i in input("\nCommit ids: ").split()] commit_message = input("Commit message: ") else: print("Nothing to commit.") committed_files = [] committed_del_files = [] for i, file in enumerate(all_files): if i in committed_ids: if t_csum[1] <= i < t_csum[2]: committed_del_files.append(file) else: committed_files.append(file) repo.git.add(committed_files, update=True) repo.index.add(committed_del_files) if n_all != 0: repo.index.commit(commit_message) repo.git.push("origin", repo.active_branch) print( f"Woohoo!~ All commits are pushed to {repo.remotes.origin.url}| branch: {repo.active_branch}" )
def setup_bkrepo(repo_url, repo_path): try: repo = Repo(repo_path) except NoSuchPathError: logging.info('git clone: {}'.format(repo_url)) repo = Repo.clone_from(repo_url, repo_path) else: logging.info('git pull: {}'.format(repo_url)) repo.remote().pull() return repo
def test_fetch_remote(tmp_path): from git import Repo from gitswitch.repository import Repository from os import path (local_path, remote_path) = get_dirs(tmp_path) Repository(remote_path, None, False) Repository(local_path, remote_path, False) assert path.exists(local_path), 'Path should at least exist' Repo(local_path) # will throw if it is no valid git repo
def from_serializable(self, data: Any) -> Any: out = self.object_type("name") for item in data: tmp = PushInfo(None, None, None, None) for key in self.__response_keys: setattr(tmp, key, item[key]) for key in self.__response_keys_special: if key == "local_ref": setattr(tmp, key, HEAD(Repo(item[key][0]))) out.append(tmp) return out
def test_init(tmp_path): from git import Repo from gitswitch.repository import Repository from os import path (local_path, _) = get_dirs(tmp_path) repo = Repository(local_path, None, True) repo.init() assert path.exists(local_path), 'Path should at least exist' Repo(local_path) # will throw if it is no valid git repo
def git_update() -> NoReturn: uad_path = "/".join(cfg.debloater_list_path.split("/")[0:-1]) if not os.path.exists(uad_path): Repo.clone_from( "https://gitlab.com/W1nst0n/universal-android-debloater", uad_path) log_info(f"[GIT] cloned the repo 'universal android debloater'", logger="debuglog") else: repo = Repo(uad_path) repo.git.pull() log_info(f"[GIT] updated local repo of debloat-scripts", logger="debuglog")
def __init__(self, cloning_path: Path) -> None: """Create a new ExamplesRepo instance.""" self.cloning_path = cloning_path try: self.repo = Repo(self.cloning_path) except NoSuchPathError: self.repo = None # type: ignore logger.debug( f"`cloning_path`: {self.cloning_path} was empty, " f"but ExamplesRepo was created. " "Ensure a pull is performed before doing any other operations." )
def git_submodule(name, path, url, branch): #print sys.path from git.repo.base import Repo d = str(Path('.').abspath()) repo = Repo(d) # current repo from git.objects.submodule.base import Submodule s = Submodule.add(repo,name,path, url, branch) #s = self.repo.lookup_submodule(SUBM_PATH) #self.repo.listall_submodules() return str(Path(path).abspath())
def main(self): if not self.nested_command: print('Error: no command given') self.help() return 1 cli.ExistingDirectory(self.repoDirectory) self.repo = Repo(self.repoDirectory) if self.propertyFiles: propertyFiles = self.propertyFiles else: propertyFiles = ['build.properties'] self.buildProps = BuildProperties(self.repo.working_tree_dir, propertyFiles) return 0
def resolve(self, source): if not self.package_linker.package_folder: raise ValueError( '"package_folder" is required but not specified, see -w parameter.' ) repo_uri = source[len(self.scheme):] repo_uri, branch_or_tag, sub_path = _normalize_uri(repo_uri) # branch = branch_or_tag # tag = None # if branch_or_tag and ':' in branch_or_tag: # idx = branch_or_tag.index(':') # tag = branch_or_tag[idx + 1:] # branch = branch_or_tag[:idx] repo_id = repo_uri if branch_or_tag: repo_id = '%s.%s' % (repo_uri, branch_or_tag) repo_id = repo_id.replace('.', '_').replace(':', '_').replace('/', '_') repo_path = os.path.join(self.package_linker.package_folder, repo_id) utils.mkdir_p(self.package_linker.package_folder) # Check if the repo already exists repo = None if os.path.isdir(repo_path): try: repo = Repo(repo_path) if not hasattr( repo.remotes, 'origin') or not repo_uri == repo.remotes.origin.url: raise RuntimeError('Invalid existing repository %s' % repo_path) else: repo.remotes.origin.pull() except: utils.rmdir(repo_path) repo = None # the repository does not exist. if not repo: repo = Repo.clone_from(repo_uri, repo_path) self._swith_branch_or_tag(repo, branch_or_tag) return os.path.join(repo_path, sub_path)
def get_repo_information(repo_root): """ Returns a dictionary with fields branch, commit, org, repo Raises RepoInfoException. """ # print('Creating a Repo object for root %s' % repo_root) gitrepo = Repo(repo_root) try: try: branch = str(gitrepo.active_branch) except TypeError: # TypeError: HEAD is a detached symbolic reference as it points # to '4bcaf737955277b156a5bacdd80d1805e4b8bb25' branch = None commit = gitrepo.head.commit.hexsha try: origin = gitrepo.remotes.origin except AttributeError: raise ValueError('No remote "origin".') url = origin.url except ValueError as e: msg = 'Could not get branch, commit, url. Maybe the repo is not initialized.' raise_wrapped(RepoInfoException, e, msg, compact=True) raise # now github can use urls that do not end in '.git' if 'github' in url and not url.endswith('.git'): url += '.git' try: org, repo = org_repo_from_url(url) except NotImplementedError: org, repo = None, None author_name = gitrepo.head.commit.author.name author_email = gitrepo.head.commit.author.email committed_date = gitrepo.head.commit.committed_date # avoid expensive garbage collection gitrepo.git = None return dict(branch=branch, commit=commit, org=org, repo=repo, committed_date=committed_date, author_name=author_name, author_email=author_email)
def praise_command(filename=None): try: repo = Repo(path=os.getcwd(), search_parent_directories=True) except InvalidGitRepositoryError: print('Not a git repository') return try: output = praise(filename, repo) terminal_height, terminal_width = os.popen('stty size', 'r').read().split() lines = output.split('\n') if len(lines) > int(terminal_height): click.echo_via_pager(output) else: click.echo(output) except Exception: print(filename, 'is a binary file.')
def include_git(self, github_repo, file_path, branch, local_path): self.clone_git(github_repo, branch) tmpRoot = self.root + '/' + github_repo + '/' + branch repo = Repo(tmpRoot) rev_d = repo.git.log('--pretty=%as', '-1', file_path) if self.appendcount: self.include_file(tmpRoot + "/" + file_path, local_path, rev_date=rev_d) else: self.include_file(tmpRoot + "/" + file_path, local_path, edit_url=github_repo + '/blob/' + branch + '/' + file_path, rev_date=rev_d)
def __init__(self, **kwargs): url = kwargs.get("url") local_path = kwargs.get("local_path") self.work_dir = None if url is None and local_path is None: raise ValueError("Must supply one of url and local_path") if url and local_path: raise ValueError("Can only supply one of url and local_path") if url: self.work_dir = tempfile.mkdtemp() self.repo = Repo.clone_from(url, self.work_dir) else: self.work_dir = None self.repo = Repo(local_path) origin = self.repo.remote(name="origin") origin.fetch()
def __init__( self, root=None, folders=None, name=None, url=None, ): self.DEFAULT_BRANCH = 'master' if root is None: self.root = tempfile.mkdtemp(prefix="bugle_" + name + "_") self.is_root_tmp = True else: self.root = root self.is_root_tmp = False if name is None: self.name = os.path.basename(root) else: self.name = name self.url = url # Make sure folder is a git repo if self.is_root_tmp and self.url is None: raise Exception("New folder with no repo URL") self.git = Git(self.root) self.git_clone() self.repo = Repo(self.root) if folders is None: self.folders = ['.'] else: self.folders = folders self.files = {} self.branches = {} self.total_count = {} self.branch_count = {} print "creating repo %s under %s, folders %s" % (name, self.root, self.folders)
from setuptools import setup from git.repo.base import Repo from os.path import dirname, realpath, exists import os vcs = Repo(dirname(realpath(__file__))) urls = [u for u in vcs.remote().urls] if len(urls) < 1: raise NotImplementedError() versionnum = ( len([c for c in vcs.iter_commits()]) - 116 # version 0.0.* had 115 revisions - 57 # version 0.1.* had 56 revisions - 71 # version 0.2.* had 70 revisions ) versionstr = "0.3.%d" % versionnum print("Current version %s" % versionstr) logfile = os.path.join(os.sep, "var", "log", "simple_shuffle.log") # HACK: This requires that the permissions be changed manually, needs to be # fixed. How to determine the user executing a command as sudo? if not exists(logfile): open(logfile, 'w').close() setup(name="Simple Shuffle", version=versionstr, author="D. Scott Boggs", author_email="*****@*****.**", description="Shuffles a folder of music. That is all.", license="GPLv3",
def install(self): # First check if we need to install from the registry if self.registry_app: # Check if the app is in our config already if self.name in self.config.apps: # app has been installed before if self.config.verbose > 0: typer.secho( f"App {self.name} has been installed before", err=False, fg=typer.colors.BRIGHT_BLACK, ) self.source = self.config.apps[self.name]["source"] self.destination = self.config.apps[self.name]["destination"] if self.config.verbose > 0: typer.secho( f"App source from config: {self.source}", err=False, fg=typer.colors.BRIGHT_BLACK, ) else: # We will install it from the registry # Set source self.source = f"{self.config.appstore_url}/{self.name}" # Set destination: ~/.backplane/contexts/defaults/{self.name} self.destination = os.path.join(self.config.app_dir, self.name) if self.source != self.destination: # Loading app from external source app_path = self.destination cwd = os.getcwd() try: # Check if app already exists if os.path.exists(app_path): typer.echo(f"found existing app in {app_path}") # Change to app_dir os.chdir(app_path) # Pull typer.echo(f"pulling updates from {self.source}") repo = Repo(app_path) assert repo.__class__ is Repo repo.remotes.origin.pull() # Return to previous directory os.chdir(cwd) else: typer.echo(f"cloning from {self.source}") try: repo = Repo.clone_from(self.source, app_path) except Exception as e: raise CannotInstallApp( f"Failed to clone from {self.source}: {e}") assert repo.__class__ is Repo # Set app path self.path = app_path # Set app name from git remote remote_url = repo.remotes[0].config_reader.get( "url") # e.g. 'https://github.com/abc123/MyRepo.git' self.name = os.path.splitext( os.path.basename(remote_url))[0] # 'MyRepo' except Exception as e: raise CannotInstallApp( f"Failed to install app from {self.source}: {e}") # Save app to user config try: custom_config = {"apps": {}} custom_config["apps"][self.name] = { "destination": self.destination, "source": self.source, "params": {}, } self.config.write(custom_config) if self.config.verbose > 0: typer.secho( f"Saving new config to {self.config.config_path}", err=False, fg=typer.colors.BRIGHT_BLACK, ) except Exception as e: raise CannotInstallApp(f"Cannot save config: {e}") # Install the app try: install_command = [ "docker-compose", "-p", self.name, ] if self.config.verbose: install_command.append("--verbose") env_file = os.path.join(self.destination, ".env") if os.path.exists(env_file): install_command.append("--env-file") install_command.append(str(env_file)) # Check existence of compose files compose_file = os.path.join(self.destination, self.compose_file) app_config = {} if os.path.exists(compose_file): install_command.append("-f") install_command.append(str(compose_file)) # Load config app_config = anyconfig.load(compose_file) else: raise CannotInstallApp(f"{compose_file} not found") install_command.append("up") install_command.append("-d") # Check if build is necessary build = False # Augment os.environ["BUILD_DATE"] = datetime.datetime.utcnow().isoformat() # os.environ["BUILD_VERSION"] # BUILD_VERSION # VCS_REF for service in app_config["services"]: service_config = app_config["services"][service] if "build" in service_config: build = True if build: install_command.append("--build") install_command.append("--force-recreate") os.environ["DOCKER_BUILDKIT"] = "1" if self.config.verbose > 0: typer.secho( f"Installation Command: {' '.join(install_command)}", err=False, fg=typer.colors.BRIGHT_BLACK, ) # Start installation try: result = subprocess.Popen(install_command, stdout=subprocess.PIPE) while True: try: output = next(result.stdout) typer.echo(output.decode().strip()) except StopIteration: # Get returncode from process result.communicate()[0] break if result.returncode == 0: typer.echo("Deployment complete.") typer.echo( f"You can access your application at {','.join(self.getAppURLs())}" ) # Get logs if self.config.verbose: logs_command = [ "docker-compose", "-p", self.name, "-f", compose_file, "logs", "--tail", "50", ] result = subprocess.Popen(logs_command, stdout=subprocess.PIPE) while True: try: output = next(result.stdout) typer.echo(output.decode().strip()) except StopIteration: print("Logs complete.") break else: raise CannotInstallApp( f"Deployment failed with code {result.returncode}.") except Exception as e: raise CannotInstallApp(f"failed to install {self.name}: {e}") except Exception as e: raise CannotInstallApp(f"failed to install {self.name}: {e}")
def main(): physical_devices = tf.config.list_physical_devices('GPU') tf.config.experimental.set_memory_growth(physical_devices[0], True) args = parse_args() stages = 3 + args.with_spn #tf.config.experimental_run_functions_eagerly(True) # ds = TFRecordsDataset('driving.tfrecords.shard*',training=True) # ds = ds.map(lambda imgL, imgR, dispL: ((imgL, imgR), dispL)) # train_size = int(0.7 * SAMPLES) # #ds = ds.shuffle(args.train_bsize, seed=args.seed, reshuffle_each_iteration=False) # train_ds = ds.take(train_size).batch(args.train_bsize) # test_ds = ds.skip(train_size).batch(args.train_bsize) # val_ds = ds.skip(train_size).take(args.train_bsize).batch(args.train_bsize) train_cache_file = args.train_ds.split('.')[0] train_ds = TFRecordsDataset(args.train_ds, training=True)\ .map(random_crop, num_parallel_calls=4)\ .shuffle(args.train_bsize*8, reshuffle_each_iteration=True)\ .batch(args.train_bsize,drop_remainder=True)\ .prefetch(3)\ .take(args.train_bsize*2) test_cache_file = args.test_ds.split('.')[0] test_ds = TFRecordsDataset(args.test_ds, training=True)\ .map(center_crop, num_parallel_calls=4)\ .batch(args.train_bsize, drop_remainder=True)\ .prefetch(3)\ .take(args.train_bsize*2) val_ds = test_ds.take(1) model_builder = AnyNet( batch_size=args.train_bsize, unet_conv2d_filters=args.unet_conv2d_filters, unet_nblocks=args.unet_nblocks, cspn_conv3d_filters=args.cspn_conv3d_filters, local_max_disps=args.local_max_disps, global_max_disp=args.global_max_disp, loss_weights=args.loss_weights, stages=stages, ) input_shape = train_ds.element_spec[0][0].shape model = model_builder.build(input_shape=input_shape) initial_learning_rate = args.learning_rate end_learning_rate = args.learning_rate_end decay_steps = args.learning_rate_end learning_rate_fn = tf.keras.optimizers.schedules.PolynomialDecay( initial_learning_rate, decay_steps, end_learning_rate, power=0.5 ) optimizer = keras.optimizers.Adam(learning_rate=learning_rate_fn, epsilon=args.epsilon) if args.resume: # --checkpoint logs/2020-07-17T02:57:23.979082/model.01-57.78.hdf5 log_name = args.checkpoint.split('/') log_dir = '/'.join(log_name[:2]) else: log_name = str(datetime.now()).replace(' ', 'T') log_dir = f'./logs/{log_name}' rmse0 = keras.metrics.RootMeanSquaredError(name="rmse_0") rmse1 = keras.metrics.RootMeanSquaredError(name="rmse_1") rmse2 = keras.metrics.RootMeanSquaredError(name="rmse_2") rmse_agg = keras.metrics.RootMeanSquaredError(name="rmse_agg") def included_pixel_avg(y_true, y_pred): return masked_pixel_ratio(y_true, args.global_max_disp) metrics = { 'disparity-0': [rmse0, rmse_agg], 'disparity-1': [rmse1, rmse_agg], 'disparity-2': [rmse2, rmse_agg, included_pixel_avg], } model.compile( optimizer=optimizer, loss={ f'disparity-{i}': L1DisparityMaskLoss( i, args.global_max_disp ) for i in range(0, stages) }, loss_weights={ f'disparity-{i}': args.loss_weights[i] for i in range(0, stages) }, metrics=metrics ) callbacks = [ DepthMapImageCallback(val_ds, args.train_bsize, args.train_bsize, frequency=10, log_dir=log_dir), keras.callbacks.TensorBoard( log_dir=log_dir, histogram_freq=5, profile_batch='60,70' ), tf.keras.callbacks.ModelCheckpoint( #filepath=log_dir+'/model.{epoch:02d}-{val_loss:.2f}.hdf5', filepath=log_dir+'/{epoch:02d}-{val_loss:.2f}.ckpt', #+'/model.{epoch:02d}-{val_loss:.2f}.hdf5' save_best_only=True, mode='min', save_weights_only=True, verbose=1 ) ] if args.checkpoint: weights_file = tf.train.latest_checkpoint(args.checkpoint) model.load_weights(weights_file) #model = tf.keras.models.load_model(args.checkpoint) if args.mlflow: import mlflow.tensorflow from git.repo.base import Repo repo = Repo('.') diff = repo.git.diff('HEAD~1') f_diff = ['\t'] + diff.splitlines() f_diff = '\n\t'.join(f_diff) with mlflow.start_run(run_name=log_name): mlflow.log_params(vars(args)) mlflow.tensorflow.autolog(every_n_iter=10) mlflow.set_tag('mlflow.note.content', f_diff) mlflow.set_tag('tensorboard', log_name) model.fit( train_ds, epochs=args.epochs, batch_size=args.train_bsize, validation_data=test_ds, callbacks=callbacks, initial_epoch=args.initial_epoch ) else: model.fit( train_ds, epochs=args.epochs, batch_size=args.train_bsize, validation_data=test_ds, callbacks=callbacks, initial_epoch=args.initial_epoch )
def is_git_repository(self): try: return Repo(self.path) != None except: return False
def git_repo(self, request, github_repo, branch=None, remote='origin', checkout_root=None): """ py.test fixture to clone a GitHub based repo onto the local disk. Arguments: github_repo (:class:`~github3.GitHub`): The repo to read from branch (str): The branch to check out Returns: A :class:`~git.repo.base.Repo` object, with the master branch checked out and up to date with the remote. """ if checkout_root is None: checkout_root = request.config.option.checkout_root if not os.path.exists(checkout_root): os.makedirs(checkout_root) repo_dir = os.path.join( os.path.join(checkout_root, github_repo.owner.name), github_repo.name ) if github_repo.private: repo_url = github_repo.ssh_url else: repo_url = github_repo.clone_url if not os.path.exists(repo_dir): repo = Repo.clone_from(repo_url, repo_dir) else: repo = Repo(repo_dir) if github_repo not in SYNCED: try: remote_obj = repo.remote(remote) except ValueError: repo.create_remote(remote, repo_url) remote_obj = repo.remote(remote) if remote_obj.fetch != repo_url: remote_obj.set_url(repo_url) remote_obj.fetch() SYNCED.add(github_repo) if branch is None: branch = github_repo.default_branch head = repo.head remote_branch = RemoteReference(repo, 'refs/remotes/{}/{}'.format(remote, branch)) local_branch = Head(repo, 'refs/heads/{}'.format(branch)) try: if head.commit != remote_branch.commit: local_branch.commit = remote_branch.commit local_branch.checkout() except ValueError: pytest.xfail("Branch {} is empty".format(branch)) return repo
async def do_validate(cli: CLIUX, opts: ValidateOptions): try: cwd = opts.get_cwd() # working directory repo = Repo(cwd) # git repo validate_remotes(repo=repo) (active_branch, default_branch) = await validate_branches_and_merge_bases(cli=cli, repo=repo, opts=opts) (latest_remote_sha, active_branch_tracked_ref) = analyze_review_branch_tracking_situation( cli, repo, active_branch) has_latest_commits_from_upstream = git_does_commit_exist_locally( repo=repo, sha=latest_remote_sha) if (has_latest_commits_from_upstream == False): offer_to_fetch_from_upstream( cli=cli, repo=repo, opts=opts, active_branch=active_branch, active_branch_tracked_ref=active_branch_tracked_ref) cli.info( f"Comparing {active_branch.name} against {active_branch_tracked_ref.name}" ) merge_base = validate_merge_bases_with_default_branch( cli, repo, repo.active_branch.name, active_branch_tracked_ref.name) cli.debug(f"Identified common commit {merge_base.hexsha[0:8]}") cli.debug(f"Local sha: {active_branch.commit.hexsha}") cli.debug(f"Upstream sha: {active_branch_tracked_ref.commit.hexsha}") new_local_commits = get_truncated_log(repo, active_branch.commit, merge_base.hexsha) new_upstream_commits = get_truncated_log( repo, active_branch_tracked_ref.commit, merge_base.hexsha) cli.debug(f"new local commits: {new_local_commits}") cli.debug(f"new upstream commits: {new_upstream_commits}") if (len(new_local_commits) > opts.get_commit_count_hard_fail_threshold()): raise LikelyUserErrorException( "Very large number of review branch commits", f"""An very large {len(new_local_commits)} number of commits were detected on review branch { active_branch.name }, which were not found on tracked branch {active_branch_tracked_ref.name }. {format_highlight("This may be an indication of an improper rebase!")} This warning is presented whenever more than {format_integer(opts.get_commit_count_hard_fail_threshold()) } new commits that have not yet been pushed are found on a review branch. Please take a close look at your review branch, and ensure you don't see any duplicate commits that are already on { default_branch.name}""") elif (len(new_local_commits) > opts.get_commit_count_soft_fail_threshold()): raise UserBypassableWarning( "Large number of review branch commits", f"""An unusually large {format_integer(len(new_local_commits))} number of commits were detected on review branch { active_branch.name }, which were not found on tracked branch {active_branch_tracked_ref.name }. {format_highlight("This may be an indication of an improper rebase!")} This warning is presented whenever more than {opts.get_commit_count_soft_fail_threshold() } new commits, that have not yet been pushed, are found on a review branch. Please take a close look at your review branch, and ensure you don't see any duplicate commits that are already on { default_branch.name}""") except UserBypassException as ex: cli.handle_user_bypass_exception(ex) except UserBypassableWarning as ex: cli.handle_user_bypassable_warning( ex, bypass_response=("continue" if opts.should_auto_bypass_commit_count_soft_fail() else None)) except NonApplicableSituationException as ex: cli.handle_non_applicable_situation_exception(ex) except UnhandledSituationException as ex: cli.handle_unhandled_situation_exception(ex) except InvalidGitRepositoryError: cli.error( f"""git_guardrails is only intended for use within a git repository directory {cwd} does not seem to be a git repository""")