def cli_copy( image: str, local_git_repo: str = './docker-mirror', git_bin: str = 'git', commit: bool = True, push: bool = True, debug: bool = False, ): from git import Git code = f'FROM {image}' git_sub_path = image.replace(':', '/') git_full_path = os.path.join(git_sub_path, "Dockerfile") dest = os.path.join(local_git_repo, git_full_path) if debug: print(f"writting {code} into {dest}") write_file(dest, code) git = Git( git_bin=git_bin, cwd=local_git_repo, ) git.add(git_full_path) if commit: git.commit(f"[Add] {image}", False) if push: git.push()
def test_commit_succesfully_with_multiple_objects(self): mock_repo = MagicMock() mock_index = MagicMock() mock_remotes = MagicMock() mock_repo.index = mock_index mock_repo.remotes.origin = mock_remotes with patch.multiple('git', Repo=MagicMock(return_value=mock_repo)): git = Git('~/path/to/repo') objects = ['simple_object', 'more_complex_one'] commit_message = 'simple commit message' git.commit(objects, commit_message) git.commit(objects, commit_message, action='remove') mock_index.add.assert_called_once_with(objects) mock_index.remove.assert_called_once_with(objects) mock_index.commit.has_calls( [call(commit_message), call(commit_message)]) eq_(mock_remotes.fetch.call_count, 2) eq_(mock_remotes.pull.call_count, 2) eq_(mock_remotes.push.call_count, 2)
def commit_and_push(path: Path): """ Commits all files in the path directory and pushes to origin/master :param path: The path to the directory """ repo = Git(path.absolute()) today = get_today() repo.add(".") repo.commit("-m", f"Reduction files for {today}") repo.push("--set-upstream", "origin", "master")
def test_commit_with_no_message(self): mock_repo = MagicMock() mock_index = MagicMock() mock_remotes = MagicMock() mock_repo.index = mock_index mock_repo.remotes.origin = mock_remotes with patch.multiple('git', Repo=mock_repo): git = Git('~/path/to/repo') objects = ['simple_object', 'more_complex_one'] git.commit(objects, '')
def run(self): git_dir = os.path.join(self.docs_dir, ".git") if os.path.exists(git_dir): import shutil shutil.rmtree(git_dir) from git import Git g = Git(self.docs_dir) g.init() g.add(".") g.commit("-m {}".format(self.commit_msg)) g.push("--force", self.repo, "master:gh-pages")
class TempClone: """Create a clone in a temp dir used to write and push file changes""" def __init__(self, url: str): import posixpath self.path = join(gettempdir(), posixpath.basename(url)) if not isdir(self.path): Repo.clone_from(url, self.path) self.git = Git(self.path) def write(self, path: str, content: str): with open(join(self.path, path), 'w') as f: f.write(content) self.git.add(path) self.git.commit(amend=True, no_edit=True) self.git.push(force=True)
def setUp(self): self.test_dir = tempfile.mkdtemp() g = Git(self.test_dir) g.init() open('%s/testfile.txt' % (self.test_dir, ), 'w').write('This is a test') g.add('testfile.txt') g.commit('-m', 'First commit') os.mkdir('%s/testdir' % (self.test_dir, )) open('%s/testdir/file2.txt' % (self.test_dir, ), 'w').write('This is another test') g.add('testdir/file2.txt') g.commit('-m', 'Second commit') self.vcs = VcsWrapper.vcs_for_path(self.test_dir)
def addSuperModuleCommit(self, id, hash, url, who, branch, project): self.log.debug("branch: " + branch + ", project:" + project) hasSuperModule = False isSuperModuleBr = False self.log.debug("Project names: " + str(self.config.projects)) projectNames = self.config.projects.keys() for proj in projectNames: self.log.debug("project: " + project + " proj: " + proj) if project.lower() == proj: hasSuperModule = True break for br in self.config.branches: if branch == br: isSuperModuleBr = True break self.log.debug("isSuperModuleBr: " + str(isSuperModuleBr) + " hasSuperModule: " + str(hasSuperModule)) if isSuperModuleBr and hasSuperModule: self.log.debug("Git Profile Path: " + str(self.config.profile)) git = Git(self.config.profile) self.checkoutTrackingBranch(git, branch) git.pull() git.submodule("update","--init") gitSubmoduleProfile = {'git':self.config.superRepoPath + self.config.projects[project.lower()]} gitSubmodule = Git(gitSubmoduleProfile) self.log.debug("checking out hash: " + hash) gitSubmodule.fetch() if self.isOptOut(gitSubmodule, hash): return gitSubmodule.checkout(hash, True) git.add(".") commitMsg = "Auto checkin: " + self.getCommitMessage(gitSubmodule, hash) + "\nuser:"******"\nhash:" + hash + "\nproject: " + project self.log.debug("commiting in super module: " + commitMsg) git.commit(commitMsg) self.log.debug("pushing super module to branch: " + branch) git.push(branch) else: self.log.debug("No super module commit is required.")
class TempClone: """Create a clone in a temp dir used to write and push file changes""" def __init__(self, url: str, branch: str=None): import posixpath self.path = join(gettempdir(), posixpath.basename(url)) if not isdir(self.path): Repo.clone_from(url, self.path) self.git = Git(self.path) self.git.fetch() if branch: self.git.checkout(branch) def write(self, path: str, content: str): with open(join(self.path, path), 'w') as f: f.write(content) self.git.add(path) self.git.commit(message="Automatic update of skill-metadata.json") self.git.push() def delete(self): shutil.rmtree(self.path)
def created_quark(self, sender, new_quark): options = sender.options if self.git_is_installed(): g = Git(options['path']) g.init() default_ignore = os.path.join(os.path.dirname(__file__), '..', 'default_gitignore.txt') ignore_path = os.path.join(options['path'], '.gitignore') with open(ignore_path, 'w') as ignore_file, open(default_ignore, 'r') as default_file: ignore_file.write(default_file.read()) user_ignores = settings.user_gitignore() if user_ignores: ignore_file.write('\n#SUPPLIED FROM ~/.qpmignore\n\n') ignore_file.write(user_ignores) g.add(new_quark.get_quarkfile_path(options['path'])) g.commit(m='Created quarkfile.') g.add('.gitignore') g.commit(m='Created gitignore.') g.add('.') if Repo(options['path']).is_dirty(): g.commit(m='Initial commit of existing files.') g.tag(options['version']) else: self.msg( 'WARNING: Git is not installed. Try http://git-scm.com/ or run the following:\n' + '\truby -e "$(curl -fsSL https://raw.github.com/Homebrew/homebrew/go/install)"; brew install git' )
def test_commit_succesfully_with_multiple_objects(self): mock_repo = MagicMock() mock_index = MagicMock() mock_remotes = MagicMock() mock_repo.index = mock_index mock_repo.remotes.origin = mock_remotes with patch.multiple('git', Repo=MagicMock(return_value=mock_repo)): git = Git('~/path/to/repo') objects = ['simple_object', 'more_complex_one'] commit_message = 'simple commit message' git.commit(objects, commit_message) git.commit(objects, commit_message, action='remove') mock_index.add.assert_called_once_with(objects) mock_index.remove.assert_called_once_with(objects) mock_index.commit.has_calls([call(commit_message), call(commit_message)]) eq_(mock_remotes.fetch.call_count, 2) eq_(mock_remotes.pull.call_count, 2) eq_(mock_remotes.push.call_count, 2)
if not os.path.isdir(dirpath): os.mkdir(dirpath) g = Git(dirpath) # If is not Git repository, git init if not git.repo.fun.is_git_dir(dirpath): # bare repository if args.__dict__.get('b'): repo = g.init(dirpath, bare=True) else: # local repository g.init() # Make .gitignore if not os.path.isfile(dirpath + '/.gitignore'): f = open(dirpath + '/.gitignore', 'w') f.write('') f.close() # git add .gitignore and first commit if g.untracked_files or g.is_dirty(): # git add g.add('.gitignore') # git commit g.commit(m='First commit') except RuntimeError as e: sys.stderr.write("ERROR: %s\n" % e)
def initialize(path, branch): try: repo = Repo(path) git = Git(path) except InvalidGitRepositoryError as err: print "Not a valid Git repo: %s" % err sys.exit("Run `git init %s` first" % err) output = ['Initializing in %s [%s]' % (path, branch)] current_branch = repo.active_branch if branch in [b.name for b in repo.branches]: output.append("error: A branch named '%s' already exists in this repo!" % branch) return output output.append("--> Stashing current branch contents [%s]" % current_branch) try: cmd = git.stash(u=True) for line in cmd.splitlines(): output.append(" %s" % line) except GitCommandError as err: output.append("error: %s" % err) return output output.append("--> Switching to branch '%s'" % branch) try: git.checkout(B=branch) except GitCommandError as err: output.append("error: %s" % err) return output output.append("--> Clearing current files and committing") try: files = os.listdir(path) files.remove('.git') for entry in files: git.rm(entry, r=True, f=True) cmd = git.commit(m="Clearing files in preparation for git-pm") for line in cmd.splitlines(): output.append(" %s" % line) except GitCommandError as err: output.append("error: %s" % err) return output output.append("--> Creating git-pm file structure and committing") try: for directory in DIRECTORIES: dir_path = os.path.join(path, directory) gitkeep = os.path.join(dir_path, '.gitkeep') os.mkdir(dir_path) with open(gitkeep, 'a'): os.utime(gitkeep, None) cmd = git.commit(m="Created git-pm file structure") for line in cmd.splitlines(): output.append(" %s" % line) except GitCommandError as err: output.append("error: %s" % err) return output output.append("--> Returning to previous branch and popping stash") try: git.checkout(current_branch) git.stash("pop") except GitCommandError as err: output.append("error: %s" % err) return output return output
def main() -> None: aparser = argparse.ArgumentParser( description='import account statement PDFs into hledger') aparser.add_argument('--force', dest='force', default=False, action='store_true', help='overwrite existing ledgers') aparser.add_argument('--dry-run', dest='dry_run', default=False, action='store_true', help='run parsers without writing any output files') aparser.add_argument('--regenerate-includes', dest='regenerate_includes', default=False, action='store_true', help='only regenerate include files; don\'t import ' 'new bank statements') aparser.add_argument('--no-merge', dest='merge', default=True, action='store_false', help='don\'t merge import branch after import') args = aparser.parse_args() xdg = getXDGdirectories('bank-statement-parser') config_file = xdg['config'] / 'import.cfg' config = ImportConfig.read_from_file(config_file) if args.regenerate_includes: regenerate_includes(Path.cwd(), config) exit(0) selection_script = xdg['config'] / 'select_ledger.py' select_ledger: Callable[[BankStatementMetadata], str] if selection_script.exists(): with open(selection_script, 'r') as f: content = f.read() parse_globals: dict[str, Any] = { 'BankStatementMetadata': BankStatementMetadata, } exec(compile(content, selection_script, 'exec'), parse_globals) if 'select_ledger' not in parse_globals: print(f'{selection_script} doesn\'t contain select_ledger' ' function.', file=sys.stderr) exit(1) select_ledger = parse_globals['select_ledger'] elif len(config.ledgers) == 1: ledger_name = next(iter(config.ledgers)) def select_ledger(meta: BankStatementMetadata) -> str: return ledger_name else: print(f'Error: {config_file} contains more than one ledger,' f' but {selection_script} is missing.', file=sys.stderr) exit(1) incoming_statements = get_metadata_of_incoming_statements( config.incoming_dir) classified = sort_incoming_statements_to_ledger_dirs( incoming_statements, select_ledger, ) if any(key not in config.ledgers for key in classified.keys()): for key, statements in classified.items(): if key in config.ledgers: continue mismatched_files = ', '.join(str(s.statement_path) for s in statements) print(f'Error: {mismatched_files} were assigned to unknown ledger' f' configuration {key}. Please check {selection_script}.', file=sys.stderr) exit(1) for key, statements in classified.items(): ledger_config = config.ledgers[key] print(f'Importing bank statements to {ledger_config.ledger_dir}.') # change working directory for git status to work correctly os.chdir(ledger_config.ledger_dir) git: BaseGit if ledger_config.git_dir is not None: git = Git(ledger_config.ledger_dir, ledger_config.git_dir) import_branch = ledger_config.import_branch else: git = FakeGit() import_branch = git.current_branch() try: import_incoming_statements(statements, ledger_config.ledger_dir, git, import_branch, args.force, args.dry_run) except DirtyWorkingDirectoryException: print(f'{ledger_config.ledger_dir} contains uncommitted changes,' ' please commit those before continuing.', file=sys.stderr) exit(1) # The import_transaction in import_incoming_statements automatically # resets the branch to the previously checked-out one after importing # to the import_branch. if (args.merge and isinstance(git, Git) and import_branch != git.current_branch()): try: git.merge(import_branch) except GitMergeConflictError as e: conflicting_files = [ledger_config.ledger_dir / c.name for c in e.conflicts] not_autogenerated = [p for p in conflicting_files if p.name != 'journal.hledger'] if not_autogenerated: raise RuntimeError( 'Could not automerge the following files:\n' + '\n'.join(str(p) for p in not_autogenerated)) write_include_files(ledger_config.ledger_dir, git) git.commit(f"Merge branch '{import_branch}'")
class Mounts: """ The Mounts class is the class that does all the ETL from git to updating the file ** This is the magic sauce behind the api """ def __init__(self, app): """ Initilize mounts class by loading yaml into a variable :input app - flask app """ self.app = app self.git = Git(environ['GIT_DIRECTORY']) self.git.pull() self.nfs_file = join(environ['GIT_DIRECTORY'], 'data/common.yaml') with open(self.nfs_file) as yaml_file: self.nfs_info = yaml.safe_load(yaml_file.read()) if not self.nfs_info: raise Exception('error loading mounts file') def commit(self, hostname, option): """ Commits the codes and pushes back to git :input hostname - name changed :input option - option changed """ self.app.logger.info(f"successfully {option} for {hostname}") self.update_current() try: self.git.add(self.nfs_file) self.git.commit('-m', f"'updated - {option} for {hostname}'") self.git.push() except Exception as exc: if 'nothing to commit' in str(exc): raise Exception('No changes made') else: raise exc def check_exists(self, host_type, name, mount) -> bool: """ Check if the mount exists :input host_type - hosts/hostgroups :input name - name of the host or hostgroup :input mount - the mount option :return boolean if exists """ try: for temp_mount in self.nfs_info[f"nfs_mounts::{host_type}"][name]: unmatched_item = set(temp_mount.items()) ^ set(mount.items()) if unmatched_item and dict(unmatched_item) and 'uuid' in dict( unmatched_item).keys(): return True except Exception as exc: self.app.logger.warning(exc) return False def update_current(self): """ Writes to file """ with open(self.nfs_file, 'w') as yaml_file: to_write = yaml.dump(self.nfs_info, default_flow_style=False) yaml_file.write(to_write) def add_nas_share(self, local_path, share_path, options, owner, group, host=None, hostgroup=None): """ Add a new NAS Share to a host/hostgroup """ if not host and not hostgroup: raise Exception('Missing host and hostgroup') else: host_type = 'hosts' if host else 'hostgroups' name = host if host else hostgroup if not self.nfs_info[f"nfs_mounts::{host_type}"]: self.nfs_info[f"nfs_mounts::{host_type}"] = {} if name.lower( ) not in self.nfs_info[f"nfs_mounts::{host_type}"].keys(): self.app.logger.info(f"{name}: has no mounts...appending") self.nfs_info[f"nfs_mounts::{host_type}"].update({name: []}) self.app.logger.info( self.nfs_info[f"nfs_mounts::{host_type}"].keys()) mount = { 'uuid': str(uuid.uuid4()), 'local_path': local_path, 'share_path': share_path, 'options': options, 'owner': owner, 'group': group } if not self.check_exists(host_type, name, mount): self.app.logger.info(f"{name}: adding {mount}") self.nfs_info[f"nfs_mounts::{host_type}"][name].append(mount) self.commit(name, 'add') return self.nfs_info[f"nfs_mounts::{host_type}"][name] else: raise ExistsException('mount point already exists') def update_nas_share(self, uuid_num, replacement_dict, host=None, hostgroup=None): """ Modify an existing new NAS Share to a host/hostgroup """ if not host and not hostgroup: raise Exception('Missing host and hostgroup') else: host_type = 'hosts' if host else 'hostgroups' name = host if host else hostgroup changed = False for idx, val in enumerate( self.nfs_info[f"nfs_mounts::{host_type}"][name]): if uuid_num == val['uuid']: self.nfs_info[f"nfs_mounts::{host_type}"][name][ idx].update(replacement_dict) self.app.logger.info(f"{name}: updating {uuid_num}") changed = True if not changed: raise IndexError('no index matching that uuid found') self.commit(name, 'added') return self.nfs_info[f"nfs_mounts::{host_type}"][name] def delete_host_name(self, name, host_type): """ Remove a host/hostgroup """ del self.nfs_info[f"nfs_mounts::{host_type}"][name] if host_type == 'hostgroups': self.commit(name, 'deleted hostgroup') else: self.commit(name, 'deleted host') def delete_host_mount(self, name, host_type, uuid_num): """ Remove an existing new NAS mount from a host/hostgroup """ self.nfs_info[f"nfs_mounts::{host_type}"][name] = [ x for x in self.nfs_info[f"nfs_mounts::{host_type}"][name] if x['uuid'] != uuid_num ] self.commit(name, 'deleted mount') return self.nfs_info[f"nfs_mounts::{host_type}"][name]