Ejemplo n.º 1
0
class Repo(object):
	def __init__(self, path="", branch="develop"):
		self.path = path if path else os.getcwd()
		# Retrieve the repo if its already defined
		try:
			self.repo = Repository(self.path)
			# This is a little hacky, but we need a git command interface to do
			# certain things
			self.pygitrepo = git.Repo(self.repo.path)
			self.git = self.pygitrepo.git
			self.currentBranch = self.repo.lookup_branch(self.repo.head.shorthand)
			self._user = self.repo.default_signature
		except KeyError:
			self.repo = None
			self.currentBranch = None
			self._user = None
		# TODO Handle this another way
		self.branch = branch

	@property
	def user(self):
		if not self._user and self.repo:
			self._user = self.repo.default_signature
		return self._user

	def clone(self, repourl):
		self.repo = clone_repository(
			repourl,
			self.path,
			checkout_branch=self.branch
		)

	def checkoutBranch(self, name):
		# TODO check if a branch of this name exists
 		developBranch = self.repo.lookup_branch("develop")
		self.repo.checkout(developBranch)
		self.currentBranch = self.repo.create_branch(
			name,
			self.repo.head.get_object()
		)
		self.repo.checkout(self.currentBranch)

	def merge(self, branch, delete=False, push=False):
		pass

	def release(self):
		# Checkout the release branch
		# Need some way to control versioning
		# Internal versioning
		pass
Ejemplo n.º 2
0
def get_changed_contrib_names() -> List[str]:
    """Get all changed files as compared to remote/main"""
    repo = Repository(root_path)
    main_branch = repo.lookup_branch('main')
    if main_branch is None:
        raise RuntimeError("Can't find `main` branch to compare to.")

    file_paths = set(
        itertools.chain.from_iterable(
            (patch.delta.old_file.path, patch.delta.new_file.path)
            for patch in repo.diff(a=main_branch)))
    changed_contribs = set()

    for filepath in file_paths:
        if '__pycache__' not in filepath:
            path_parents = (root_path / Path(filepath)).parents
            for contrib_path in contrib_paths:
                if contrib_path in path_parents:
                    changed_contribs.add(contrib_path.name)
            if test_path in path_parents:
                for contrib_path in contrib_paths:
                    if filepath.endswith(f'test_{contrib_path.parts[-1]}.py'):
                        changed_contribs.add(contrib_path.name)

    return list(changed_contribs)
Ejemplo n.º 3
0
def git_is_clean(srcdir, project):
    repo = Repository(os.path.join(srcdir, project.workspace_path, ".git"))
    for _, b in iteritems(repo.status()):
        if b != GIT_STATUS_IGNORED and b != GIT_STATUS_CURRENT:
            return False, "has uncommitted changes"
    if repo.head_is_detached:
        return False, "has detached HEAD"
    origin = get_origin(repo, project)
    if not origin:
        return False, "has no upstream remote"
    remote_refs = []
    local_refs = {}
    for refname in repo.listall_references():
        if refname.startswith("refs/remotes/%s/" % origin.name):
            ref = repo.lookup_reference(refname)
            if ref.type == GIT_REF_OID:
                remote_refs.append(ref.target)
        elif not refname.startswith("refs/remotes/"):
            ref = repo.lookup_reference(refname)
            if ref.type == GIT_REF_OID:
                local_refs[ref.peel().id] = refname
    if not remote_refs:
        return False, "has no upstream remote branches"
    if not local_refs:
        return False, "has no local branches"
    if not repo.lookup_branch("%s/%s" % (origin.name, project.master_branch), GIT_BRANCH_REMOTE):
        return False, "has no upstream master branch"
    for remote_ref in remote_refs:
        for commit in repo.walk(remote_ref):
            if commit.id in local_refs:
                del local_refs[commit.id]
    if local_refs:
        return False, "has local commits: %s" % ", ".join(["'%s'" % name for _, name in iteritems(local_refs)])
    return True, ""
Ejemplo n.º 4
0
def _list_remote_branches(
        repo: pygit2.Repository,
        remote: str = 'origin',
        exclude_remote_of_head: bool = True) -> Iterator[pygit2.Branch]:
    remote_prefix = remote + '/'
    remote_head = remote + '/HEAD'
    remote_of_local_head = remote + '/' + repo.head.shorthand
    for branch_name in repo.listall_branches(pygit2.GIT_BRANCH_REMOTE):
        if not branch_name.startswith(remote_prefix):
            continue
        if branch_name == remote_head:
            continue
        if exclude_remote_of_head and branch_name == remote_of_local_head:
            continue
        yield repo.lookup_branch(branch_name, pygit2.GIT_BRANCH_REMOTE)
Ejemplo n.º 5
0
class GitRepo:
    def __init__(self, repo_path):
        self.repo = Repository(repo_path)

    def checkout_by(self, commit_id):
        ref = 'refs/tags/t-%s' % commit_id

        if self.repo.references.get(ref) is None:
            self.repo.create_reference(ref, commit_id)

        self.repo.checkout(ref)
        self.repo.references[ref].delete()

    def master(self):
        branch = self.repo.lookup_branch('master')
        self.repo.checkout(branch)

    def get_all_commit_id(self):
        self.master()
        return self.repo.walk(self.repo.head.target,
                              GIT_SORT_TOPOLOGICAL | GIT_SORT_REVERSE)
Ejemplo n.º 6
0
def git_is_clean(srcdir, project):
    repo = Repository(os.path.join(srcdir, project.workspace_path, ".git"))
    for _, b in iteritems(repo.status()):
        if b != GIT_STATUS_IGNORED and b != GIT_STATUS_CURRENT:
            return False, "has uncommitted changes"
    if repo.head_is_detached:
        return False, "has detached HEAD"
    origin = get_origin(repo, project)
    if not origin:
        return False, "has no upstream remote"
    remote_refs = []
    local_refs = {}
    for refname in repo.listall_references():
        if refname.startswith("refs/remotes/%s/" % origin.name):
            ref = repo.lookup_reference(refname)
            if ref.type == GIT_REF_OID:
                remote_refs.append(ref.target)
        elif not refname.startswith("refs/remotes/"):
            ref = repo.lookup_reference(refname)
            if ref.type == GIT_REF_OID:
                local_refs[ref.peel().id] = refname
    if not remote_refs:
        return False, "has no upstream remote branches"
    if not local_refs:
        return False, "has no local branches"
    if not repo.lookup_branch("%s/%s" % (origin.name, project.master_branch),
                              GIT_BRANCH_REMOTE):
        return False, "has no upstream master branch"
    for remote_ref in remote_refs:
        for commit in repo.walk(remote_ref):
            if commit.id in local_refs:
                del local_refs[commit.id]
    if local_refs:
        return False, "has local commits: %s" % ", ".join(
            ["'%s'" % name for _, name in iteritems(local_refs)])
    return True, ""
Ejemplo n.º 7
0
config = repo.config

remote_url = repo.remotes[args.remote].url
pass_path = None
for glob in credentials_mapping.keys():
    if fnmatch.fnmatch(remote_url, glob):
        pass_path = credentials_mapping[glob]["target"]

# FIXME: user identity (name + email) is not always set at repo level
# that said, we need a SPOT for git identities as used/implemented
# in git-identity emacs package
source_branch_name = args.update_source_branch if args.update_source_branch != "" else get_active_branch(
    repo)
remote = resolve_remote(repo, args.remote)
if not remote:
    log_error(f"cannot find remote '{args.remote}'")
    sys.exit(1)
if args.update_op == "fetch":
    remote.fetch(refspecs=[f"refs/heads/*:refs/heads/*"])
elif args.update_op == "merge":
    source_branch_head = repo.references[source_branch_name].resolve().target
    repo.merge(source_branch_head)
elif args.update_op == "rebase":
    source_branch = repo.lookup_branch(source_branch_name, GIT_BRANCH_REMOTE)
    dest_branch = repo.lookup_branch(get_active_branch(repo))
    dest_branch.set_target(source_branch.target)
    # Fast-forwarding with set_target() leaves the index and the working tree
    # in their old state. That's why we need to checkout() and reset()
    repo.checkout(f"refs/heads/{dest_branch.name}")
    repo.reset(dest_branch.target, GIT_RESET_HARD)
Ejemplo n.º 8
0
class RepositoryInfo(object):
    """ wraps an pygit2.Repository object
    """

    def __init__(self, repo_path):
        self._repo = Repository(repo_path)
        self.count_unmodified = 0
        self.count_wt_modified = 0
        self.count_wt_new = 0
        self.count_wt_deleted = 0
        self.count_index_modified = 0
        self.count_index_new = 0
        self.count_index_deleted = 0
        self._count()

    @property
    def path(self):
        sep = '/'
        splitted = self._repo.path.split(sep)[0:-2]
        return sep.join(splitted)

    @property
    def has_workingtree_changes(self):
        return self.count_wt_deleted > 0 or self.count_wt_modified > 0 or self.count_wt_new > 0

    @property
    def has_index_changes(self):
        return self.count_index_deleted > 0 or self.count_index_modified > 0 or self.count_index_new > 0

    def _count(self):
        _status = self._repo.status()
        for file_path, flags in _status.items():
            if flags == GIT_STATUS_CURRENT:
                self.count_unmodified += 1
            elif flags == GIT_STATUS_WT_MODIFIED:
                self.count_wt_modified += 1
            elif flags == GIT_STATUS_WT_NEW:
                self.count_wt_new += 1
            elif flags == GIT_STATUS_INDEX_NEW:
                self.count_index_new += 1
            elif flags == GIT_STATUS_INDEX_MODIFIED:
                self.count_index_modified += 1
            elif flags == GIT_STATUS_INDEX_DELETED:
                self.count_index_deleted += 1
            elif flags == GIT_STATUS_WT_DELETED:
                self.count_wt_deleted += 1

    @property
    def current_branch_name(self):
        # ToDo: Why does self._repo.head.shorthand not work?
        head = self._repo.head
        head_name = head.name.split('/')[-1:]
        return head_name[0]

    @property
    def is_head_upstream_branch(self):
        """ determines if current head is the same commit as the remote commit
        """
        if self._repo.head_is_detached:
            return False
        current_branch_name = self.current_branch_name
        head = self._repo.head
        remote_branch = self._repo.lookup_branch(current_branch_name).upstream
        if remote_branch:
            return remote_branch.target.hex == head.target.hex
        return False

    @property
    def is_head_detached(self):
        return self._repo.head_is_detached
Ejemplo n.º 9
0
class GitStorage(BaseStorage):

    _backend = None

    def __init__(self, context, repo_path=None):
        self.context = context
        rp = IStorageInfo(context).path

        try:
            self.repo = Repository(discover_repository(rp))
        except KeyError:
            # discover_repository may have failed.
            raise PathNotFoundError('repository does not exist at path')

        self.checkout()  # defaults to HEAD.

    @property
    def empty_root(self):
        return {'': '_empty_root'}

    def _get_empty_root(self):
        return self.empty_root

    def _get_obj(self, path, cls=None):
        if path == '' and self._commit is None:
            # special case
            return self._get_empty_root()

        if self._commit is None:
            raise PathNotFoundError('repository is empty')

        root = self._commit.tree
        try:
            breadcrumbs = []
            fragments = list(reversed(path.split('/')))
            node = root
            oid = None
            while fragments:
                fragment = fragments.pop()
                if not fragment == '':
                    # no empty string entries, also skips over '//' and
                    # leaves the final node (if directory) as the tree.
                    oid = node[fragment].oid
                    node = self.repo.get(oid)
                breadcrumbs.append(fragment)
                if node is None:
                    # strange.  Looks like it's either submodules only
                    # have entry nodes or pygit2 doesn't fully support
                    # this.  Try to manually resolve the .gitmodules
                    # file.
                    if cls is None:
                        # Only return this if a specific type was not
                        # expected.
                        submods = parse_gitmodules(self.repo.get(
                            root[GIT_MODULE_FILE].oid).data)
                        submod = submods.get('/'.join(breadcrumbs))
                        if submod:
                            fragments.reverse()
                            return {
                                '': '_subrepo',
                                'location': submod,
                                'path': '/'.join(fragments),
                                'rev': oid.hex,
                            }

            if node and (cls is None or isinstance(node, cls)):
                return node
        except KeyError:
            # can't find what is needed in repo, raised by pygit2
            raise PathNotFoundError('path not found')

        # not what we were looking for.
        if cls == Tree:
            raise PathNotDirError('path not dir')
        elif cls == Blob:
            raise PathNotFileError('path not file')
        raise PathNotFoundError('path not found')

    @property
    def _commit(self):
        return self.__commit

    @property
    def rev(self):
        if self.__commit:
            return self.__commit.hex
        return None

    @property
    def shortrev(self):
        # TODO this is an interim solution.
        if self.rev:
            return self.rev[:12]

    def basename(self, name):
        return name.split('/')[-1]

    def checkout(self, rev=None):
        # None maps to the default revision.
        if rev is None:
            rev = 'HEAD'

        try:
            self.__commit = self.repo.revparse_single(rev)
        except KeyError:
            if rev == 'HEAD':
                # probably a new repo.
                self.__commit = None
                return
            raise RevisionNotFoundError('revision %s not found' % rev)
            # otherwise a RevisionNotFoundError should be raised.

    def files(self):
        def _files(tree, current_path=None):
            results = []
            for node in tree:
                if current_path:
                    name = '/'.join([current_path, node.name])
                else:
                    name = node.name

                obj = self.repo.get(node.oid)
                if isinstance(obj, Blob):
                    results.append(name)
                elif isinstance(obj, Tree):
                    results.extend(_files(obj, name))
            return results

        if not self._commit:
            return []
        results = _files(self._commit.tree)
        return results

    def file(self, path):
        return self._get_obj(path, Blob).data

    def listdir(self, path):
        if path:
            tree = self._get_obj(path, Tree)
        else:
            if self._commit is None:
                return []
            tree = self._commit.tree

        return [entry.name for entry in tree]

    def format(self, **kw):
        # XXX backwards compatibility??
        return kw

    def log(self, start, count, branch=None, shortlog=False):
        """
        start and branch are literally the same thing.
        """

        def _log(iterator):
            for pos, commit in iterator:
                if pos == count:
                    raise StopIteration
                yield {
                    'author': commit.committer.name,
                    'email': self._commit.committer.email,
                    'date': self.strftime(committer_dt(commit.committer)),
                    'node': commit.hex,
                    'rev': commit.hex,
                    'desc': commit.message
                }

        if start is None:
            # assumption.
            start = 'HEAD'
            try:
                self.repo.revparse_single(start)
            except KeyError:
                return []

        try:
            rev = self.repo.revparse_single(start).hex
        except KeyError:
            raise RevisionNotFoundError('revision %s not found' % start)

        iterator = enumerate(self.repo.walk(rev, GIT_SORT_TIME))

        return list(_log(iterator))

    def pathinfo(self, path):
        obj = self._get_obj(path)
        if isinstance(obj, Blob):
            return self.format(**{
                'type': 'file',
                'basename': self.basename(path),
                'size': obj.size,
                'date': self.strftime(committer_dt(self._commit.committer)),
            })
        elif isinstance(obj, dict):
            # special cases are represented as dict.
            if obj[''] == '_subrepo':
                return self.format(**{
                    'type': 'subrepo',
                    'date': '',
                    'size': 0,
                    'basename': self.basename(path),
                    # extra field.
                    'obj': obj,
                })

            elif obj[''] == '_empty_root':
                return self.format(**{
                    'type': 'folder',
                    'date': '',
                    'size': 0,
                    'basename': self.basename(path),
                })

        # Assume this is a Tree.
        return self.format(**{
            'basename': self.basename(path),
            'size': 0,
            'type': 'folder',
            'date': '',
        })

    def branches(self):
        return tuple(
            (b, self.repo.lookup_branch(b).target.hex)
            for b in self.repo.listall_branches()
        )

    def tags(self):
        return tuple(
            (b[10:], self.repo.lookup_reference(b).target.hex)
            for b in self.repo.listall_references()
            if b.startswith('refs/tags')
        )
Ejemplo n.º 10
0
def show_status(srcdir,
                packages,
                projects,
                other_git,
                ws_state,
                show_up_to_date=True,
                cache=None):
    def create_upstream_status(repo, head_branch, master_branch,
                               master_remote_branch, tracking_branch):
        status = []
        if not repo.head_is_detached and not has_pending_merge(repo):
            if tracking_branch is not None:
                if master_remote_branch is not None:
                    if tracking_branch.remote_name != master_remote_branch.remote_name:
                        status.append("@!@{rf}remote '%s'" %
                                      tracking_branch.remote_name)
                if need_push(repo, head_branch):
                    status.append("@!@{yf}needs push")
                elif need_pull(repo, head_branch):
                    status.append("@!@{cf}needs pull")
                elif not is_up_to_date(repo, head_branch):
                    status.append("@!@{yf}needs pull -M")
            else:
                if head_branch:
                    status.append("@!on branch '%s'" % repo.head.shorthand)
                else:
                    status.append("empty branch")
                if master_remote_branch is None:
                    status.append("@!@{rf}no remote")
                elif master_branch is None:
                    status.append("@!@{rf}untracked remote")
                if is_up_to_date(repo, master_branch) or need_push(
                        repo, master_branch):
                    if need_pull(repo, head_branch, master_branch):
                        status.append("@!@{cf}needs pull -L")
                    else:
                        if not is_ancestor(repo, master_branch, head_branch):
                            status.append("@!@{yf}needs merge --from-master")
                        if not is_up_to_date(repo, head_branch, master_branch):
                            status.append("@!@{yf}needs merge --to-master")
            if master_branch is not None and master_remote_branch is not None and (
                    tracking_branch is None
                    or tracking_branch.name != master_remote_branch.name):
                if need_push(repo, master_branch):
                    status.append("@!@{yf}%s needs push" %
                                  master_branch.shorthand)
                elif need_pull(repo, master_branch):
                    status.append("@!@{cf}%s needs pull" %
                                  master_branch.shorthand)
                elif not is_up_to_date(repo, master_branch):
                    status.append("@!@{yf}%s needs merge" %
                                  master_branch.shorthand)
        return status

    def create_local_status(repo, upstream_status, is_dirty):
        status = []
        if repo.head_is_detached:
            status.append("@!@{rf}detached HEAD")
            return status
        if has_pending_merge(repo):
            if repo.index.conflicts:
                status.append("@!@{rf}merge conflicts")
            else:
                status.append("@!@{yf}merged, needs commit")
            return status
        if is_dirty:
            status.append("@!@{yf}needs commit")
        status += upstream_status
        if not status:
            if not show_up_to_date:
                return None
            status.append("@!@{gf}up-to-date")
        return status

    table = TableView("Package", "Path", "Status")

    found_packages = set()
    for project in projects:
        repo = Repository(os.path.join(srcdir, project.workspace_path, ".git"))
        dirty_files = [
            a for a, b in iteritems(repo.status())
            if b != GIT_STATUS_IGNORED and b != GIT_STATUS_CURRENT
        ]
        head_branch = get_head_branch(repo)
        tracking_branch = head_branch.upstream if head_branch else None
        master_remote = get_origin(repo, project)
        if master_remote is not None:
            master_remote_branch = repo.lookup_branch(
                "%s/%s" % (master_remote.name, project.master_branch),
                GIT_BRANCH_REMOTE)
            master_branch = None
            if master_remote_branch is not None:
                for name in repo.listall_branches(GIT_BRANCH_LOCAL):
                    b = repo.lookup_branch(name, GIT_BRANCH_LOCAL)
                    if b.upstream and b.upstream.branch_name == master_remote_branch.branch_name:
                        master_branch = b
                        break
        else:
            master_remote_branch = None
            master_branch = None
        ws_packages = find_catkin_packages(srcdir,
                                           project.workspace_path,
                                           cache=cache)
        found_packages |= set(ws_packages.keys())
        upstream_status = create_upstream_status(repo, head_branch,
                                                 master_branch,
                                                 master_remote_branch,
                                                 tracking_branch)
        for name, pkg_list in iteritems(ws_packages):
            if name not in packages:
                continue
            for pkg in pkg_list:
                is_dirty = False
                local_path = os.path.relpath(pkg.workspace_path,
                                             project.workspace_path)
                if dirty_files and local_path == ".":
                    is_dirty = True
                else:
                    for fpath in dirty_files:
                        if path_has_prefix(fpath, local_path):
                            is_dirty = True
                            break
                status = create_local_status(repo, upstream_status, is_dirty)
                if status is not None:
                    head, tail = os.path.split(pkg.workspace_path)
                    pkg_path = escape(head + "/" if tail ==
                                      name else pkg.workspace_path)
                    table.add_row(escape(name), pkg_path, status)

    for path in other_git:
        repo = Repository(os.path.join(srcdir, path, ".git"))
        dirty_files = [
            a for a, b in iteritems(repo.status())
            if b != GIT_STATUS_IGNORED and b != GIT_STATUS_CURRENT
        ]
        head_branch = get_head_branch(repo)
        tracking_branch = head_branch.upstream if head_branch else None
        ws_packages = find_catkin_packages(srcdir, path, cache=cache)
        found_packages |= set(ws_packages.keys())
        upstream_status = create_upstream_status(repo, head_branch, None, None,
                                                 tracking_branch)
        for name, pkg_list in iteritems(ws_packages):
            if name not in packages:
                continue
            for pkg in pkg_list:
                is_dirty = False
                local_path = os.path.relpath(pkg.workspace_path, path)
                if dirty_files and local_path == ".":
                    is_dirty = True
                else:
                    for fpath in dirty_files:
                        if path_has_prefix(fpath, local_path):
                            is_dirty = True
                            break
                status = create_local_status(repo, upstream_status, is_dirty)
                if status is not None:
                    head, tail = os.path.split(pkg.workspace_path)
                    pkg_path = escape(head + "/" if tail ==
                                      name else pkg.workspace_path)
                    table.add_row(escape(name), pkg_path, status)

    missing = set(packages) - found_packages
    for name in missing:
        path_list = []
        status = "no git"
        if name in ws_state.ws_packages:
            for pkg in ws_state.ws_packages[name]:
                if not os.path.isdir(os.path.join(srcdir, pkg.workspace_path)):
                    status = "@{rf}deleted"
                head, tail = os.path.split(pkg.workspace_path)
                path_list.append(
                    escape(head + "/" if tail == name else pkg.workspace_path))
        table.add_row(escape(name), path_list, status)
    if table.empty():
        if found_packages:
            msg("Everything is @!@{gf}up-to-date@|.\n")
        else:
            warning("no Git repositories\n")
    else:
        table.sort(0)
        table.write(sys.stdout)
Ejemplo n.º 11
0
def show_status(srcdir, packages, projects, other_git, ws_state, show_up_to_date=True, cache=None):
    def create_upstream_status(repo, head_branch, master_branch, master_remote_branch, tracking_branch):
        status = []
        if not repo.head_is_detached and not has_pending_merge(repo):
            if tracking_branch is not None:
                if master_remote_branch is not None:
                    if tracking_branch.remote_name != master_remote_branch.remote_name:
                        status.append("@!@{rf}remote '%s'" % tracking_branch.remote_name)
                if need_push(repo, head_branch):
                    status.append("@!@{yf}needs push")
                elif need_pull(repo, head_branch):
                    status.append("@!@{cf}needs pull")
                elif not is_up_to_date(repo, head_branch):
                    status.append("@!@{yf}needs pull -M")
            else:
                if head_branch:
                    status.append("@!on branch '%s'" % repo.head.shorthand)
                else:
                    status.append("empty branch")
                if master_remote_branch is None:
                    status.append("@!@{rf}no remote")
                elif master_branch is None:
                    status.append("@!@{rf}untracked remote")
                if is_up_to_date(repo, master_branch) or need_push(repo, master_branch):
                    if need_pull(repo, head_branch, master_branch):
                        status.append("@!@{cf}needs pull -L")
                    else:
                        if not is_ancestor(repo, master_branch, head_branch):
                            status.append("@!@{yf}needs merge --from-master")
                        if not is_up_to_date(repo, head_branch, master_branch):
                            status.append("@!@{yf}needs merge --to-master")
            if master_branch is not None and master_remote_branch is not None and (tracking_branch is None or tracking_branch.name != master_remote_branch.name):
                if need_push(repo, master_branch):
                    status.append("@!@{yf}%s needs push" % master_branch.shorthand)
                elif need_pull(repo, master_branch):
                    status.append("@!@{cf}%s needs pull" % master_branch.shorthand)
                elif not is_up_to_date(repo, master_branch):
                    status.append("@!@{yf}%s needs merge" % master_branch.shorthand)
        return status

    def create_local_status(repo, upstream_status, is_dirty):
        status = []
        if repo.head_is_detached:
            status.append("@!@{rf}detached HEAD")
            return status
        if has_pending_merge(repo):
            if repo.index.conflicts:
                status.append("@!@{rf}merge conflicts")
            else:
                status.append("@!@{yf}merged, needs commit")
            return status
        if is_dirty:
            status.append("@!@{yf}needs commit")
        status += upstream_status
        if not status:
            if not show_up_to_date:
                return None
            status.append("@!@{gf}up-to-date")
        return status

    table = TableView("Package", "Path", "Status")

    found_packages = set()
    for project in projects:
        repo = Repository(os.path.join(srcdir, project.workspace_path, ".git"))
        dirty_files = [a for a, b in iteritems(repo.status()) if b != GIT_STATUS_IGNORED and b != GIT_STATUS_CURRENT]
        head_branch = get_head_branch(repo)
        tracking_branch = head_branch.upstream if head_branch else None
        master_remote = get_origin(repo, project)
        if master_remote is not None:
            master_remote_branch = repo.lookup_branch("%s/%s" % (master_remote.name, project.master_branch), GIT_BRANCH_REMOTE)
            master_branch = None
            if master_remote_branch is not None:
                for name in repo.listall_branches(GIT_BRANCH_LOCAL):
                    b = repo.lookup_branch(name, GIT_BRANCH_LOCAL)
                    if b.upstream and b.upstream.branch_name == master_remote_branch.branch_name:
                        master_branch = b
                        break
        else:
            master_remote_branch = None
            master_branch = None
        ws_packages = find_catkin_packages(srcdir, project.workspace_path, cache=cache)
        found_packages |= set(ws_packages.keys())
        upstream_status = create_upstream_status(repo, head_branch, master_branch, master_remote_branch, tracking_branch)
        for name, pkg_list in iteritems(ws_packages):
            if name not in packages:
                continue
            for pkg in pkg_list:
                is_dirty = False
                local_path = os.path.relpath(pkg.workspace_path, project.workspace_path)
                if dirty_files and local_path == ".":
                    is_dirty = True
                else:
                    for fpath in dirty_files:
                        if path_has_prefix(fpath, local_path):
                            is_dirty = True
                            break
                status = create_local_status(repo, upstream_status, is_dirty)
                if status is not None:
                    head, tail = os.path.split(pkg.workspace_path)
                    pkg_path = escape(head + "/" if tail == name else pkg.workspace_path)
                    table.add_row(escape(name), pkg_path, status)

    for path in other_git:
        repo = Repository(os.path.join(srcdir, path, ".git"))
        dirty_files = [a for a, b in iteritems(repo.status()) if b != GIT_STATUS_IGNORED and b != GIT_STATUS_CURRENT]
        head_branch = get_head_branch(repo)
        tracking_branch = head_branch.upstream if head_branch else None
        ws_packages = find_catkin_packages(srcdir, path, cache=cache)
        found_packages |= set(ws_packages.keys())
        upstream_status = create_upstream_status(repo, head_branch, None, None, tracking_branch)
        for name, pkg_list in iteritems(ws_packages):
            if name not in packages:
                continue
            for pkg in pkg_list:
                is_dirty = False
                local_path = os.path.relpath(pkg.workspace_path, path)
                if dirty_files and local_path == ".":
                    is_dirty = True
                else:
                    for fpath in dirty_files:
                        if path_has_prefix(fpath, local_path):
                            is_dirty = True
                            break
                status = create_local_status(repo, upstream_status, is_dirty)
                if status is not None:
                    head, tail = os.path.split(pkg.workspace_path)
                    pkg_path = escape(head + "/" if tail == name else pkg.workspace_path)
                    table.add_row(escape(name), pkg_path, status)

    missing = set(packages) - found_packages
    for name in missing:
        path_list = []
        status = "no git"
        if name in ws_state.ws_packages:
            for pkg in ws_state.ws_packages[name]:
                if not os.path.isdir(os.path.join(srcdir, pkg.workspace_path)):
                    status = "@{rf}deleted"
                head, tail = os.path.split(pkg.workspace_path)
                path_list.append(escape(head + "/" if tail == name else pkg.workspace_path))
        table.add_row(escape(name), path_list, status)
    if table.empty():
        if found_packages:
            msg("Everything is @!@{gf}up-to-date@|.\n")
        else:
            warning("no Git repositories\n")
    else:
        table.sort(0)
        table.write(sys.stdout)
Ejemplo n.º 12
0
class GitRepo(object):

    ''' git repo class '''

    def __init__(self, path):
        try:
            self.__repo = Repository(path)
        except Exception as e:
            self.__repo = None
            print(e)

    def get_info(self):
        if not self.__repo:
            return None
        signature = self.__repo.default_signature
        result = {
            'path': self.__repo.path,
            'workdir': self.__repo.workdir,
            'bare': self.__repo.is_bare,
            'empty': self.__repo.is_empty,
            'name': signature.name,
            'email': signature.email,
            'time': signature.time,
            'offset': signature.offset,
        }
        return result

    def get_all_references(self):
        return self.__repo.listall_references()

    def get_reference(self, name):
        if not self.__repo:
            return None
        ref = None
        try:
            ref = self.__repo.lookup_reference(name)
        except Exception as e:
            print(e)
        return ref

    def get_all_branches(self, branch_type=None):
        if not self.__repo:
            return None
        if branch_type:
            return self.__repo.listall_branches(branch_type)
        r = self.__repo.listall_branches(GIT_BRANCH_LOCAL | GIT_BRANCH_REMOTE)
        return r

    def get_branch(self, name, branch_type=GIT_BRANCH_LOCAL):
        if not self.__repo:
            return None
        return self.__repo.lookup_branch(name, branch_type)

    def check_branch(self, name, branch_type=None):
        if not branch_type:
            if '/' in name:
                branch_type = GIT_BRANCH_REMOTE
            else:
                branch_type = GIT_BRANCH_LOCAL
        try:
            result = self.get_branch(name, branch_type)
            return result
        except Exception as e:
            print(e)
            return False

    def get_current_commit(self):
        if not self.__repo:
            return None
        commit = self.__repo.revparse_single('HEAD')
        return self.get_commit(commit)

    def get_commit_by_branch(self, branch):
        if not self.__repo:
            return None
        query = 'refs/'
        if hasattr(branch, 'remote_name'):
            query += 'remotes/'
        else:
            query += 'heads/'
        query += branch.branch_name
        try:
            ref = self.get_reference(query)
            commit = ref.target
            return self.get_commit(commit)
        except Exception as e:
            print(e)
            return None

    def get_commit_by_tag(self, tag):
        if self.__repo is None:
            return None
        if tag:
            commit = tag.get_object()
            return self.get_commit(commit)
        return None

    def get_commit(self, oid_or_commit):
        ''' return a commit w/ json '''
        if not self.__repo or not oid_or_commit:
            return None
        try:
            commit = oid_or_commit
            if not isinstance(oid_or_commit, Commit):
                commit = self.__repo.get(oid_or_commit)
            if commit and commit.type == GIT_OBJ_COMMIT:
                # t1 = self.__repo.revparse_single('HEAD^')
                # t2 = self.__repo.revparse_single('HEAD^^')
                # patches = self.__repo.diff(t1, t2)
                # for p in patches:
                #     print(p.new_file_path)
                result = {
                    'id': str(commit.id),
                    'author': commit.author.name,
                    'commiter': commit.committer.name,
                    'message': commit.message,
                    'message_encoding': commit.message_encoding,
                    'tree': str(commit.tree_id),
                    'parent': [str(pid) for pid in commit.parent_ids],
                    'time': str(commit.commit_time),
                    'time_offset': str(commit.commit_time_offset),
                }
                return result
        except Exception as e:
            print(e)
        return None

    def get_commits(self, depth=10, oid_or_commit=None):
        result = []
        if depth == 0:
            return result
        if oid_or_commit:
            commit = self.get_commit(oid_or_commit)
        else:
            commit = self.get_current_commit()
        if not commit:
            return result
        # TODO: starting from a commit or its parent
        # TODO: author
        result.append(commit)
        depth -= 1
        if commit and commit['parent']:
            for parent in commit['parent']:
                    result.extend(self.get_commits(depth, parent))
        return result

    def get_commits_by_branch(self, name, path=None):
        if not self.__repo:
            return None
        if self.check_branch(name):
            ref = self.get_reference('refs/heads/' + name)
            if ref:
                commit = ref.target
                commits = self.get_commits(commit)
                result = {}
                for key, val in commits.items():
                    if self.check_commit_by_path(val, path):
                        result[key] = val
                return result
        return None

    def check_tag(self, name):
        try:
            ref = self.get_reference('refs/tags/' + name)
            return ref
        except Exception:
            return False

    def get_commits_by_tag(self, tag, path=None):
        if not self.__repo:
            return None
        if tag:
            commit = tag.target
            commits = self.get_commits(commit)
            result = {}
            for key, val in commits.items():
                if self.check_commit_by_path(val, path):
                    result[key] = val
            return result
        return None

    def check_commit_by_path(self, commit, path):
        if not commit:
            return False
        if path is None or len(path) == 0:
            return True
        result = self.get_tree(commit['tree'])

        if not isinstance(path, list):
            path = path.strip().split('/')

        for name in path:
            name = name.strip()
            if name in result:
                oid = result[name]
                result = self.get_tree(oid)

                if not result:
                    result = self.get_blob(oid)
        return result is not None

    def get_tree(self, oid, ppath=None):
        if not self.__repo:
            return None
        try:
            tree = self.__repo.get(oid)
            if tree and tree.type == GIT_OBJ_TREE:
                result = {}
                for entry in tree:
                    item = {
                        'id': str(entry.id)
                    }
                    obj = self.__repo.get(entry.id)
                    if obj.type == GIT_OBJ_BLOB:
                        item['type'] = 'blob'
                    elif obj.type == GIT_OBJ_TREE:
                        item['type'] = 'tree'
                    item['ppath'] = ppath
                    result[entry.name] = item
                return result
        except Exception as e:
            print(e)
        return None

    def get_tree_by_commit(self, commit, path=None):
        if not commit:
            return None
        result = self.get_tree(commit['tree'])
        if not path:
            return result

        # if not isinstance(path, list):
        #     path = path.strip().split('/')

        try:
            for name in path:
                oid = result[name]['id']
                p = result[name]['ppath']
                p = name if not p else p + '/' + name
                result = self.get_tree(oid, p)
                if not result:
                    break
        except Exception as e:
            print(e)
            result = None
        return result

    def get_current_root(self):
        tree = self.get_current_commit()
        if tree:
            return self.get_tree(tree['tree'])
        return None

    def get_whole_tree(self, oid):
        ''' tree w/ json '''
        if not self.__repo:
            return None
        result = tree_walker(self.__repo, oid)
        return result

    def get_blob(self, oid):
        ''' blob w/ json '''
        if not self.__repo or not oid:
            return None
        try:
            blob = self.__repo.get(oid)
            if blob and blob.type == GIT_OBJ_BLOB:
                content = blob.is_binary and None or blob.data.decode(
                    'utf8', 'ignore')
                result = {
                    'id': str(blob.id),
                    'content': content,
                    'size': blob.size,
                }
                return result
        except Exception as e:
            print(e)
        return None

    def get_blob_by_commit(self, commit, path=None):

        try:
            tree = self.get_tree_by_commit(commit, path[:-1])
            oid = tree[path[-1]]['id']
            result = self.get_blob(oid)
            return result
        except Exception as e:
            print(e)
            return None

    def get_tag(self, oid):
        ''' blob w/ json '''
        if not self.__repo or not oid:
            return None
        try:
            tag = self.__repo.get(oid)
            if tag and tag.type == GIT_OBJ_TAG:
                result = {
                    'id': str(oid),
                    'name': tag.name,
                    'target': str(tag.target.id),
                    'tagger': tag.tagger,
                    'message': tag.message,
                }
                return result
        except Exception as e:
            print(e)
        return None

    def get_patches(self, a=None, b=None):
        try:
            if not a:
                a = 'HEAD'
            if not b:
                b = a + '^'
            t1 = self.__repo.revparse_single(a)
            t2 = self.__repo.revparse_single(b)
            patches = self.__repo.diff(t1, t2)
            result = []
            for patch in patches:
                p = {
                    'old_file_path': patch.old_file_path,
                    'new_file_path': patch.new_file_path,
                    'old_oid': str(patch.old_oid),
                    'new_oid': str(patch.new_oid),
                    'status': patch.status,
                    'similarity': patch.similarity,
                    'additions': patch.additions,
                    'deletions': patch.deletions,
                    'binary': patch.is_binary,
                    'hunks': [],
                }
                for hunk in patch.hunks:
                    h = {
                        'old_start': hunk.old_start,
                        'old_lines': hunk.old_lines,
                        'new_start': hunk.new_start,
                        'new_lines': hunk.new_lines,
                        'lines': hunk.lines,
                    }
                    p['hunks'].append(h)
                result.append(p)
            return result
        except Exception as e:
            print(e)
        return None
Ejemplo n.º 13
0
def get_folders(projectCode, branch, requested_path):
    """
    :projectCode: идентификатор проекта
    :branch: необходимая ветка
    :folderPath: GET параметр путь к папке, получить через request.args.get('folderPath')

    **Response:**
    ```
    {
        "list": [
            {
                "name": "myfile.md",
                "full_path": "/folder/myfile.md",
                "parent": "/folder/"
            }
        ],
        "_meta": {
            "per-page": 12,
            "page": 12,
            "total-pages": 12
        }
    }
    ```
    """
    # Get path
    requested_path = "/" + requested_path
    #    print(requested_path, file=sys.stderr)

    # Set folder
    folder = config["REPO_FOLDER"] + projectCode
    if not os.path.isdir(folder):
        # TODO: throw exception
        return json.dumps({"error": 404, "description": "Project not found"})

    # Checkout branch
    repo = Repository(folder)
    branch = repo.lookup_branch('master')
    ref = repo.lookup_reference(branch.name)
    repo.checkout(ref)
    # TODO: exception if branch not exists

    # Get files it path
    list = []
    for root, dirs, files in os.walk(folder + requested_path):
        for filename in files:
            if root == folder + "/":
                list.append({
                    "name": filename,
                    "full_path": "/" + filename,
                    "parent": "/"
                })
            else:
                list.append({
                    "name": filename,
                    "full_path": root[len(folder):] + "/" + filename,
                    "parent": root[len(folder):] + "/"
                })

    response = {
        "list": list,
        "_meta": {
            "per-page": 99999,  # TODO: make pagination?
            "page": 1,
            "total-pages": 1
        }
    }
    return json.dumps(response)
Ejemplo n.º 14
0
                        help="full commit id to start from",
                        type=str)
    parser.add_argument("-f",
                        "--finish",
                        help="full commit id to end with",
                        type=str)

    args = parser.parse_args()
    if args.path:
        # verify path existence
        if path.exists(args.path) and args.path.endswith('.git'):
            proj_path = args.path[:-4]
            repo = Repository(args.path)
            if args.branch and args.branch in list(repo.branches.local):
                branch = repo.lookup_branch(
                    list(repo.branches.local)[list(repo.branches.local).index(
                        args.branch)])
                ref = repo.lookup_reference(branch.name)
                repo.checkout(ref)
                print('Current Branch:')
                print(repo.head.shorthand)
                if args.list:
                    pprint.pprint(get_commit_id_list(args.finish, args.start))
                if args.start and args.finish:
                    pprint.pprint(
                        git_perform_analysis(args.start, args.finish,
                                             proj_path))
            else:
                # pyprint avaliable branches
                print('Specify one of local avaliable local branches:')
                print(*list(repo.branches.local), sep="\n")
Ejemplo n.º 15
0
class GitMixin(object):

    tag_or_remote_regex = re.compile('^refs/(tags|remotes)/(.*)')

    def __init__(self):
        where = GitOperations.get_repository_location(self.user, self.name)
        self.ondisk = Repository(where)

    def refresh(self):
        creds = GitOperations.get_credentials(self.git_user, self.user)
        for remote in self.ondisk.remotes:
            remote.credentials = creds
            remote.fetch()
        # update current reference
        master_ref = self.ondisk.lookup_reference('refs/heads/master')
        remote_ref = self.ondisk.lookup_reference('refs/remotes/origin/master')
        master_ref.set_target(remote_ref.target)

    def filter_references(self, regex):
        return [ref for ref in self.ondisk.listall_references()
                if regex.match(ref)]

    def get_commit_time(self, name):
        ref = self.ondisk.revparse_single(name)
        if isinstance(ref, Tag):
            return ref.get_object().commit_time
        if isinstance(ref, Commit):
            return ref.commit_time
        raise GitException('invalid reference: commit time could not be found.') # pragma: no cover

    def get_latest_refs(self, count=None):
        info = self.filter_references(GitMixin.tag_or_remote_regex)
        refs = list(zip(info, map(self.get_commit_time, info)))
        refs.sort(key=itemgetter(1), reverse=True)
        def ref_info(info):
            (ref, commit_time) = info
            what, name = GitMixin.tag_or_remote_regex.findall(ref)[0]
            return (what, name, commit_time)
        refs = map(ref_info, refs)
        if not count:
            return refs
        return islice(refs, count)

    def filter_commits(self, flags=0):
        all_commits = self.ondisk.walk(self.ondisk.head.target, flags)
        emails = [ue.email for ue in self.user.emails.all()]
        return filter(lambda commit: commit.author.email in emails, all_commits)

    def get_commits(self, count=None):
        all_commits = self.filter_commits(GIT_SORT_TOPOLOGICAL)
        if not count:
            return all_commits
        return islice(all_commits, count)

    def get_commit_count(self):
        return len(list(self.filter_commits()))

    def get_shorthand_of_branch(self, branch):
        commit = self.ondisk.lookup_branch(branch)
        if commit:
            return commit.shorthand
        return '(none)'

    def get_sha1_of_branch(self, branch):
        commit = self.ondisk.lookup_branch(branch)
        if commit:
            return str(commit.get_object().id)[:6]
        return '(none)'

    def get_numstat(self, commit):
        diff = None
        try:
            previous_commit = self.ondisk.revparse_single(str(commit.id) + '^')
            diff = self.ondisk.diff(previous_commit, commit)
        except KeyError:
            # likely we hit the very first commit.
            diff = commit.tree.diff_to_tree(swap=True)
        additions, deletions = 0, 0
        for patch in diff:
            additions += patch.additions
            deletions += patch.deletions
        return (len(diff), additions, deletions)

    def get_first_updated(self):
        all_commits = self.ondisk.walk(self.ondisk.head.target,
                                       GIT_SORT_TIME | GIT_SORT_REVERSE)
        first_commit = next(all_commits)
        return first_commit.commit_time

    def get_last_updated(self):
        all_commits = self.ondisk.walk(self.ondisk.head.target,
                                       GIT_SORT_TIME)
        last_commit = next(all_commits)
        return last_commit.commit_time

    def get_file_count(self):
        diff = self.ondisk.head.get_object().tree.diff_to_tree()
        return len([patch.old_file_path for patch in diff])

    def get_line_count(self):
        diff = self.ondisk.head.get_object().tree.diff_to_tree()
        return sum([patch.deletions for patch in diff])

    def get_author_count(self):
        commits = self.filter_commits()
        return len(set([commit.author.email for commit in commits]))

    def commits_between(self, start, end):
        all_commits = self.filter_commits(GIT_SORT_TIME | GIT_SORT_REVERSE)
        starting = dropwhile(lambda obj: obj.commit_time < start, all_commits)
        return takewhile(lambda obj: obj.commit_time <= end, starting)

    @staticmethod
    def by_day(obj):
        # we want to group our commit times by the day. so convert
        # timestamp -> date -> timestamp
        new_date = date.fromtimestamp(obj.commit_time)
        new_date += timedelta(days=1)
        return timegm(new_date.timetuple())

    @staticmethod
    def group_by(series):
        result = groupby(series, GitMixin.by_day)
        return [{'date': commit_date,
                 'value': len(list(commits))}
                for commit_date, commits in result]

    def histogram(self, start, end):
        series = self.commits_between(start, end)
        return GitMixin.group_by(series)