Ejemplo n.º 1
0
	def __div__(self, file):
		"""Find the named object in this tree's contents
		:return: ``git.Blob`` or ``git.Tree`` or ``git.Submodule``
		
		:raise KeyError: if given file or tree does not exist in tree"""
		msg = "Blob or Tree named %r not found"
		if '/' in file:
			tree = self
			item = self
			tokens = file.split('/')
			for i,token in enumerate(tokens):
				item = tree[token]
				if item.type == 'tree':
					tree = item
				else:
					# safety assertion - blobs are at the end of the path
					if i != len(tokens)-1:
						raise KeyError(msg % file)
					return item
				# END handle item type
			# END for each token of split path
			if item == self:
				raise KeyError(msg % file)
			return item
		else:
			for info in self._cache:
				if info[2] == file:		# [2] == name
					return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2]))
			# END for each obj
			raise KeyError( msg % file )
Ejemplo n.º 2
0
 def iter_items(cls, repo, common_path=None, remote=None):
     """Iterate remote references, and if given, constrain them to the given remote"""
     common_path = common_path or cls._common_path_default
     if remote is not None:
         common_path = join_path(common_path, str(remote))
     # END handle remote constraint
     return super(RemoteReference, cls).iter_items(repo, common_path)
Ejemplo n.º 3
0
 def iter_items(cls, repo, common_path=None, remote=None):
     """Iterate remote references, and if given, constrain them to the given remote"""
     common_path = common_path or cls._common_path_default
     if remote is not None:
         common_path = join_path(common_path, str(remote))
     # END handle remote constraint
     return super(RemoteReference, cls).iter_items(repo, common_path)
Ejemplo n.º 4
0
    def _iter_items(cls, repo, common_path=None):
        if common_path is None:
            common_path = cls._common_path_default
        rela_paths = set()

        # walk loose refs
        # Currently we do not follow links
        for root, dirs, files in os.walk(join_path_native(repo.git_dir, common_path)):
            if 'refs/' not in root:  # skip non-refs subfolders
                refs_id = [d for d in dirs if d == 'refs']
                if refs_id:
                    dirs[0:] = ['refs']
            # END prune non-refs folders

            for f in files:
                abs_path = to_native_path_linux(join_path(root, f))
                rela_paths.add(abs_path.replace(to_native_path_linux(repo.git_dir) + '/', ""))
            # END for each file in root directory
        # END for each directory to walk

        # read packed refs
        for sha, rela_path in cls._iter_packed_refs(repo):
            if rela_path.startswith(common_path):
                rela_paths.add(rela_path)
            # END relative path matches common path
        # END packed refs reading

        # return paths in sorted order
        for path in sorted(rela_paths):
            try:
                yield cls.from_path(repo, path)
            except ValueError:
                continue
Ejemplo n.º 5
0
def localize_submodules(repo, source_repo, recursive=True):
	"""Localize the given submodules to use urls as derived from the given source_repo,
	which is assumed to contain a full checkout of all the modules and submodules.
	:param recursive: If True, submodules will be handled recursively. Modules
		will be checked-out as required to get access to the child modules"""
	assert not source_repo.bare, "Source-Repository must not be bare"
	sms = repo.submodules
	for sm in sms:
		# need an update, as we commit after each iteration
		sm.set_parent_commit(sm.repo.head.commit)
		smp = join_path(source_repo.working_tree_dir, sm.path)
		sm.config_writer().set_value('url', smp)
		
		if recursive:
			if not sm.module_exists():
				sm.update(recursive=False)
			#END get submodule
			
			localize_submodules(sm.module(), Repo(smp), recursive=True)
		#END handle recursion
		
		# commit after each sm - performance will be fine
		# update sm to the latest head sha
		sm.binsha = sm.module().head.commit.binsha
		sm.repo.index.commit("Localized submodule paths of submodule %s" % sm.name)
	# END for each submodule
	
	# our submodules changed, commit the change to 'fix' it
	repo.index.add(sms)
	repo.index.commit("Committing submodule changes")
Ejemplo n.º 6
0
    def _iter_items(cls, repo, common_path=None):
        if common_path is None:
            common_path = cls._common_path_default
        rela_paths = set()

        # walk loose refs
        # Currently we do not follow links
        for root, dirs, files in os.walk(join_path_native(repo.git_dir, common_path)):
            if 'refs' not in root.split(os.sep):  # skip non-refs subfolders
                refs_id = [d for d in dirs if d == 'refs']
                if refs_id:
                    dirs[0:] = ['refs']
            # END prune non-refs folders

            for f in files:
                if f == 'packed-refs':
                    continue
                abs_path = to_native_path_linux(join_path(root, f))
                rela_paths.add(abs_path.replace(to_native_path_linux(repo.git_dir) + '/', ""))
            # END for each file in root directory
        # END for each directory to walk

        # read packed refs
        for sha, rela_path in cls._iter_packed_refs(repo):  # @UnusedVariable
            if rela_path.startswith(common_path):
                rela_paths.add(rela_path)
            # END relative path matches common path
        # END packed refs reading

        # return paths in sorted order
        for path in sorted(rela_paths):
            try:
                yield cls.from_path(repo, path)
            except ValueError:
                continue
Ejemplo n.º 7
0
 def _iter_convert_to_object(self, iterable):
     """Iterable yields tuples of (binsha, mode, name), which will be converted
     to the respective object representation"""
     for binsha, mode, name in iterable:
         path = join_path(self.path, name)
         try:
             yield self._map_id_to_type[mode >> 12](self.repo, binsha, mode, path)
         except KeyError:
             raise TypeError("Unknown mode %o found in tree data for path '%s'" % (mode, path))
Ejemplo n.º 8
0
 def _iter_convert_to_object(self, iterable):
     """Iterable yields tuples of (binsha, mode, name), which will be converted
     to the respective object representation"""
     for binsha, mode, name in iterable:
         path = join_path(self.path, name)
         try:
             yield self._map_id_to_type[mode >> 12](self.repo, binsha, mode, path)
         except KeyError:
             raise TypeError("Unknown mode %o found in tree data for path '%s'" % (mode, path))
Ejemplo n.º 9
0
    def join(self, file, parent_commit=None):
        """Find the named object in this tree's contents
        :return: ``git.Blob`` or ``git.Tree`` or ``git.Submodule``

        :raise KeyError: if given file or tree does not exist in tree"""
        msg = "Blob or Tree named %r not found"
        if '/' in file:
            tree = self
            item = self
            tokens = file.split('/')
            for i, token in enumerate(tokens):
                item = tree.join(token, parent_commit=parent_commit)
                if item.type == 'tree':
                    tree = item
                else:
                    # safety assertion - blobs are at the end of the path
                    if i != len(tokens) - 1:
                        raise KeyError(msg % file)
                    return item
                # END handle item type
            # END for each token of split path
            if item == self:
                raise KeyError(msg % file)
            return item
        else:
            for info in self._cache:
                if info[2] == file:  # [2] == name
                    type_id = info[1] >> 12
                    typ = self._map_id_to_type[type_id]
                    if typ == Submodule:
                        binsha = info[0]
                        mode = info[1]
                        path = join_path(self.path, info[2])
                        return typ(self.repo,
                                   binsha=binsha,
                                   mode=mode,
                                   path=path,
                                   parent_commit=parent_commit)
                    else:
                        return typ(self.repo, info[0], info[1],
                                   join_path(self.path, info[2]))
            # END for each obj
            raise KeyError(msg % file)
Ejemplo n.º 10
0
 def _iter_convert_to_object(self, iterable: Iterable[Tuple[bytes, int, str]]
                             ) -> Iterator[Union[Blob, 'Tree', Submodule]]:
     """Iterable yields tuples of (binsha, mode, name), which will be converted
     to the respective object representation"""
     for binsha, mode, name in iterable:
         path = join_path(self.path, name)
         try:
             yield self._map_id_to_type[mode >> 12](self.repo, binsha, mode, path)
         except KeyError as e:
             raise TypeError("Unknown mode %o found in tree data for path '%s'" % (mode, path)) from e
Ejemplo n.º 11
0
def print_repo_header(path, repo, only_if_counts=False):
    active_branch = repo.active_branch
    tracking_branch = repo.active_branch.tracking_branch() or 'HEAD'
    local_count = len(
        list(repo.iter_commits('{}..{}'.format(tracking_branch,
                                               active_branch))))
    remote_count = len(
        list(repo.iter_commits('{}..{}'.format(active_branch,
                                               tracking_branch))))

    if not only_if_counts or local_count or remote_count:
        header = (
            Fore.YELLOW,
            Style.BRIGHT,
            path,
            Style.RESET_ALL,
        )
        header += (
            ' ',
            Fore.LIGHTBLACK_EX,
            '[ ',
        )
        header += (
            active_branch,
            format_count(local_count, Fore.LIGHTRED_EX, Fore.LIGHTBLACK_EX),
        )

        if tracking_branch != 'HEAD':
            header += (' -> ', )
            header += (
                tracking_branch,
                format_count(remote_count, Fore.LIGHTGREEN_EX,
                             Fore.LIGHTBLACK_EX),
            )

        for remote in repo.remotes:
            if remote.name == 'upstream':
                upstream_branch = join_path(remote.name, active_branch.name)
                upstream_count = len(
                    list(
                        repo.iter_commits('{}..{}'.format(
                            tracking_branch, upstream_branch))))
                header += (' -> ', )
                header += (
                    upstream_branch,
                    format_count(remote_count, Fore.LIGHTGREEN_EX,
                                 Fore.LIGHTBLACK_EX),
                )
                break

        header += (' ]', )

        print(*header, Style.RESET_ALL, sep='')
        return True
    return False
Ejemplo n.º 12
0
    def __getitem__(self, item):
        if isinstance(item, int):
            info = self._cache[item]
            return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2]))

        if isinstance(item, str):
            # compatibility
            return self.join(item)
        # END index is basestring

        raise TypeError("Invalid index type: %r" % item)
Ejemplo n.º 13
0
 def __getitem__(self, item):
     if isinstance(item, int):
         info = self._cache[item]
         return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2]))
     
     if isinstance(item, basestring):
         # compatability
         return self.__div__(item)
     # END index is basestring 
     
     raise TypeError( "Invalid index type: %r" % item )
Ejemplo n.º 14
0
	def tracking_branch(self):
		"""
		:return: The remote_reference we are tracking, or None if we are 
			not a tracking branch"""
		reader = self.config_reader()
		if reader.has_option(self.k_config_remote) and reader.has_option(self.k_config_remote_ref):
			ref = Head(self.repo, Head.to_full_path(reader.get_value(self.k_config_remote_ref)))
			remote_refpath = self.RemoteReferenceCls.to_full_path(join_path(reader.get_value(self.k_config_remote), ref.name))
			return self.RemoteReferenceCls(self.repo, remote_refpath)
		# END handle have tracking branch
		
		# we are not a tracking branch
		return None
Ejemplo n.º 15
0
def get_inventories(project, rev):
    tree = get_tree(project, rev)

    inventories = []
    try:
        inventories_tree = tree["inventories"]
        for tree in inventories_tree.trees:
            if join_path(tree.path, "hosts") in tree:
                inventories.append(tree.name)
    except:
        pass

    return inventories
Ejemplo n.º 16
0
 def iter_items(cls,
                repo: 'Repo',
                common_path: Union[PathLike, None] = None,
                remote: Union['Remote', None] = None,
                *args: Any,
                **kwargs: Any) -> Iterator['RemoteReference']:
     """Iterate remote references, and if given, constrain them to the given remote"""
     common_path = common_path or cls._common_path_default
     if remote is not None:
         common_path = join_path(common_path, str(remote))
     # END handle remote constraint
     # super is Reference
     return super(RemoteReference, cls).iter_items(repo, common_path)
Ejemplo n.º 17
0
    def tracking_branch(self):
        """
        :return: The remote_reference we are tracking, or None if we are
            not a tracking branch"""
        from .remote import RemoteReference
        reader = self.config_reader()
        if reader.has_option(self.k_config_remote) and reader.has_option(self.k_config_remote_ref):
            ref = Head(self.repo, Head.to_full_path(reader.get_value(self.k_config_remote_ref)))
            remote_refpath = RemoteReference.to_full_path(join_path(reader.get_value(self.k_config_remote), ref.name))
            return RemoteReference(self.repo, remote_refpath)
        # END handle have tracking branch

        # we are not a tracking branch
        return None
Ejemplo n.º 18
0
 def __contains__(self, item):
     if isinstance(item, IndexObject):
         for info in self._cache:
             if item.binsha == info[0]:
                 return True
             # END compare sha
         # END for each entry
     # END handle item is index object
     # compatability
     
     # treat item as repo-relative path
     path = self.path
     for info in self._cache:
         if item == join_path(path, info[2]):
             return True
     # END for each item
     return False
Ejemplo n.º 19
0
    def __contains__(self, item):
        if isinstance(item, IndexObject):
            for info in self._cache:
                if item.binsha == info[0]:
                    return True
                # END compare sha
            # END for each entry
        # END handle item is index object
        # compatibility

        # treat item as repo-relative path
        path = self.path
        for info in self._cache:
            if item == join_path(path, info[2]):
                return True
        # END for each item
        return False
Ejemplo n.º 20
0
    def __deleteOldBranch(self,
                          aBranchPath,
                          aRemote='local',
                          aShouldDeleteLocal=True):
        """
    Delete a given branch from a remote, the local repository, or both.

    :param aBranchPath: The path of the branch to remove.
    :param aRemote: The name of the remote repository on which to work (default:
                    'local', representing the local repository).
    :param aShouldDeleteLocal: If True and aRemote != 'local', then local branches
           will be removed as well as the remote ones; otherwise, only the remote
           branches will be removed.
    """
        log = self.__getConfig().getLog()
        repo = self.__getConfig().getRepo()

        # Cowardly refuse to remove the special 'master' branch
        if aBranchPath.split('/')[-1] == 'master':
            log.warn("Cowardly refusing to delete master branch")
            return

        log.debug("Saw deletion request for remote named: " + str(aRemote))
        if aRemote == 'local':
            log.debug("Going to delete LOCAL branch: " + aBranchPath)
            if aBranchPath.split('/')[-1] in repo.heads:
                branchRef = Reference(repo, aBranchPath)

                # NOTE: Head.delete has a parameter in it's kwargs that allows you to
                #       specify whether to "force" this to be deleted, even if it hasn't
                #       been merged into the main development trunk.
                Head.delete(repo, branchRef)
        else:
            log.debug("Going to delete REMOTE branch: " + aBranchPath)
            branchRef = RemoteReference(repo, aBranchPath)
            log.debug("Ready to delete: " + str(branchRef))
            RemoteReference.delete(repo, branchRef)

            # Now, delete the corresponding local branch, if it exists
            if aShouldDeleteLocal:
                self.__deleteOldBranch(
                    join_path('refs/heads', branchRef.remote_head))
Ejemplo n.º 21
0
  def __deleteOldBranch(self, aBranchPath, aRemote='local', aShouldDeleteLocal=True):
    """
    Delete a given branch from a remote, the local repository, or both.

    :param aBranchPath: The path of the branch to remove.
    :param aRemote: The name of the remote repository on which to work (default:
                    'local', representing the local repository).
    :param aShouldDeleteLocal: If True and aRemote != 'local', then local branches
           will be removed as well as the remote ones; otherwise, only the remote
           branches will be removed.
    """
    log = self.__getConfig().getLog()
    repo = self.__getConfig().getRepo()

    # Cowardly refuse to remove the special 'master' branch
    if aBranchPath.split('/')[-1] == 'master':
      log.warn("Cowardly refusing to delete master branch")
      return

    log.debug("Saw deletion request for remote named: " + str(aRemote))
    if aRemote == 'local':
      log.debug("Going to delete LOCAL branch: " + aBranchPath)
      if aBranchPath.split('/')[-1] in repo.heads:
        branchRef = Reference(repo, aBranchPath)

        # NOTE: Head.delete has a parameter in it's kwargs that allows you to
        #       specify whether to "force" this to be deleted, even if it hasn't
        #       been merged into the main development trunk.
        Head.delete(repo, branchRef)
    else:
      log.debug("Going to delete REMOTE branch: " + aBranchPath)
      branchRef = RemoteReference(repo, aBranchPath)
      log.debug("Ready to delete: " + str(branchRef))
      RemoteReference.delete(repo, branchRef)

      # Now, delete the corresponding local branch, if it exists
      if aShouldDeleteLocal:
        self.__deleteOldBranch(join_path('refs/heads', branchRef.remote_head))
Ejemplo n.º 22
0
    def _from_line(cls, repo, line, fetch_line):
        """Parse information from the given line as returned by git-fetch -v
        and return a new FetchInfo object representing this information.

        We can handle a line as follows
        "%c %-*s %-*s -> %s%s"

        Where c is either ' ', !, +, -, *, or =
        ! means error
        + means success forcing update
        - means a tag was updated
        * means birth of new branch or tag
        = means the head was up to date ( and not moved )
        ' ' means a fast-forward

        fetch line is the corresponding line from FETCH_HEAD, like
        acb0fa8b94ef421ad60c8507b634759a472cd56c    not-for-merge   branch '0.1.7RC' of /tmp/tmpya0vairemote_repo"""
        match = cls.re_fetch_result.match(line)
        if match is None:
            raise ValueError("Failed to parse line: %r" % line)

        # parse lines
        control_character, operation, local_remote_ref, remote_local_ref, note = match.groups()
        try:
            new_hex_sha, fetch_operation, fetch_note = fetch_line.split("\t")
            ref_type_name, fetch_note = fetch_note.split(' ', 1)
        except ValueError:  # unpack error
            raise ValueError("Failed to parse FETCH_HEAD line: %r" % fetch_line)

        # parse flags from control_character
        flags = 0
        try:
            flags |= cls._flag_map[control_character]
        except KeyError:
            raise ValueError("Control character %r unknown as parsed from line %r" % (control_character, line))
        # END control char exception hanlding

        # parse operation string for more info - makes no sense for symbolic refs, but we parse it anyway
        old_commit = None
        is_tag_operation = False
        if 'rejected' in operation:
            flags |= cls.REJECTED
        if 'new tag' in operation:
            flags |= cls.NEW_TAG
            is_tag_operation = True
        if 'tag update' in operation:
            flags |= cls.TAG_UPDATE
            is_tag_operation = True
        if 'new branch' in operation:
            flags |= cls.NEW_HEAD
        if '...' in operation or '..' in operation:
            split_token = '...'
            if control_character == ' ':
                split_token = split_token[:-1]
            old_commit = repo.rev_parse(operation.split(split_token)[0])
        # END handle refspec

        # handle FETCH_HEAD and figure out ref type
        # If we do not specify a target branch like master:refs/remotes/origin/master,
        # the fetch result is stored in FETCH_HEAD which destroys the rule we usually
        # have. In that case we use a symbolic reference which is detached
        ref_type = None
        if remote_local_ref == "FETCH_HEAD":
            ref_type = SymbolicReference
        elif ref_type_name == "tag" or is_tag_operation:
            # the ref_type_name can be branch, whereas we are still seeing a tag operation. It happens during
            # testing, which is based on actual git operations
            ref_type = TagReference
        elif ref_type_name in ("remote-tracking", "branch"):
            # note: remote-tracking is just the first part of the 'remote-tracking branch' token.
            # We don't parse it correctly, but its enough to know what to do, and its new in git 1.7something
            ref_type = RemoteReference
        elif '/' in ref_type_name:
            # If the fetch spec look something like this '+refs/pull/*:refs/heads/pull/*', and is thus pretty
            # much anything the user wants, we will have trouble to determine what's going on
            # For now, we assume the local ref is a Head
            ref_type = Head
        else:
            raise TypeError("Cannot handle reference type: %r" % ref_type_name)
        # END handle ref type

        # create ref instance
        if ref_type is SymbolicReference:
            remote_local_ref = ref_type(repo, "FETCH_HEAD")
        else:
            # determine prefix. Tags are usually pulled into refs/tags, they may have subdirectories.
            # It is not clear sometimes where exactly the item is, unless we have an absolute path as indicated
            # by the 'ref/' prefix. Otherwise even a tag could be in refs/remotes, which is when it will have the
            # 'tags/' subdirectory in its path.
            # We don't want to test for actual existence, but try to figure everything out analytically.
            ref_path = None
            remote_local_ref = remote_local_ref.strip()
            if remote_local_ref.startswith(Reference._common_path_default + "/"):
                # always use actual type if we get absolute paths
                # Will always be the case if something is fetched outside of refs/remotes (if its not a tag)
                ref_path = remote_local_ref
                if ref_type is not TagReference and not \
                   remote_local_ref.startswith(RemoteReference._common_path_default + "/"):
                    ref_type = Reference
                # END downgrade remote reference
            elif ref_type is TagReference and 'tags/' in remote_local_ref:
                # even though its a tag, it is located in refs/remotes
                ref_path = join_path(RemoteReference._common_path_default, remote_local_ref)
            else:
                ref_path = join_path(ref_type._common_path_default, remote_local_ref)
            # END obtain refpath

            # even though the path could be within the git conventions, we make
            # sure we respect whatever the user wanted, and disabled path checking
            remote_local_ref = ref_type(repo, ref_path, check_path=False)
        # END create ref instance

        note = (note and note.strip()) or ''

        return cls(remote_local_ref, flags, note, old_commit, local_remote_ref)
Ejemplo n.º 23
0
	def _from_line(cls, repo, line, fetch_line):
		"""Parse information from the given line as returned by git-fetch -v
		and return a new FetchInfo object representing this information.
		
		We can handle a line as follows
		"%c %-*s %-*s -> %s%s"
		
		Where c is either ' ', !, +, -, *, or =
		! means error
		+ means success forcing update
		- means a tag was updated
		* means birth of new branch or tag
		= means the head was up to date ( and not moved )
		' ' means a fast-forward
		
		fetch line is the corresponding line from FETCH_HEAD, like
		acb0fa8b94ef421ad60c8507b634759a472cd56c	not-for-merge	branch '0.1.7RC' of /tmp/tmpya0vairemote_repo"""
		match = cls.re_fetch_result.match(line)
		if match is None:
			raise ValueError("Failed to parse line: %r" % line)
			
		# parse lines
		control_character, operation, local_remote_ref, remote_local_ref, note = match.groups()
		try:
			new_hex_sha, fetch_operation, fetch_note = fetch_line.split("\t")
			ref_type_name, fetch_note = fetch_note.split(' ', 1)
		except ValueError:	# unpack error
			raise ValueError("Failed to parse FETCH__HEAD line: %r" % fetch_line)
		
		# handle FETCH_HEAD and figure out ref type
		# If we do not specify a target branch like master:refs/remotes/origin/master, 
		# the fetch result is stored in FETCH_HEAD which destroys the rule we usually
		# have. In that case we use a symbolic reference which is detached 
		ref_type = None
		if remote_local_ref == "FETCH_HEAD":
			ref_type = SymbolicReference
		elif ref_type_name	== "branch":
			ref_type = RemoteReference
		elif ref_type_name == "tag":
			ref_type = TagReference
		else:
			raise TypeError("Cannot handle reference type: %r" % ref_type_name)
			
		# create ref instance
		if ref_type is SymbolicReference:
			remote_local_ref = ref_type(repo, "FETCH_HEAD") 
		else:
			remote_local_ref = Reference.from_path(repo, join_path(ref_type._common_path_default, remote_local_ref.strip()))
		# END create ref instance 
		
		note = ( note and note.strip() ) or ''
		
		# parse flags from control_character
		flags = 0
		try:
			flags |= cls._flag_map[control_character]
		except KeyError:
			raise ValueError("Control character %r unknown as parsed from line %r" % (control_character, line))
		# END control char exception hanlding 
		
		# parse operation string for more info - makes no sense for symbolic refs
		old_commit = None
		if isinstance(remote_local_ref, Reference):
			if 'rejected' in operation:
				flags |= cls.REJECTED
			if 'new tag' in operation:
				flags |= cls.NEW_TAG
			if 'new branch' in operation:
				flags |= cls.NEW_HEAD
			if '...' in operation or '..' in operation:
				split_token = '...'
				if control_character == ' ':
					split_token = split_token[:-1]
				old_commit = repo.rev_parse(operation.split(split_token)[0])
			# END handle refspec
		# END reference flag handling
		
		return cls(remote_local_ref, flags, note, old_commit)
Ejemplo n.º 24
0
    def _from_line(cls, repo, line, fetch_line):
        """Parse information from the given line as returned by git-fetch -v
		and return a new FetchInfo object representing this information.
		
		We can handle a line as follows
		"%c %-*s %-*s -> %s%s"
		
		Where c is either ' ', !, +, -, *, or =
		! means error
		+ means success forcing update
		- means a tag was updated
		* means birth of new branch or tag
		= means the head was up to date ( and not moved )
		' ' means a fast-forward
		
		fetch line is the corresponding line from FETCH_HEAD, like
		acb0fa8b94ef421ad60c8507b634759a472cd56c	not-for-merge	branch '0.1.7RC' of /tmp/tmpya0vairemote_repo"""
        match = cls.re_fetch_result.match(line)
        if match is None:
            raise ValueError("Failed to parse line: %r" % line)

        # parse lines
        control_character, operation, local_remote_ref, remote_local_ref, note = match.groups(
        )
        try:
            new_hex_sha, fetch_operation, fetch_note = fetch_line.split("\t")
            ref_type_name, fetch_note = fetch_note.split(' ', 1)
        except ValueError:  # unpack error
            raise ValueError("Failed to parse FETCH__HEAD line: %r" %
                             fetch_line)

        # handle FETCH_HEAD and figure out ref type
        # If we do not specify a target branch like master:refs/remotes/origin/master,
        # the fetch result is stored in FETCH_HEAD which destroys the rule we usually
        # have. In that case we use a symbolic reference which is detached
        ref_type = None
        if remote_local_ref == "FETCH_HEAD":
            ref_type = SymbolicReference
        elif ref_type_name == "branch":
            ref_type = RemoteReference
        elif ref_type_name == "tag":
            ref_type = TagReference
        else:
            raise TypeError("Cannot handle reference type: %r" % ref_type_name)

        # create ref instance
        if ref_type is SymbolicReference:
            remote_local_ref = ref_type(repo, "FETCH_HEAD")
        else:
            remote_local_ref = Reference.from_path(
                repo,
                join_path(ref_type._common_path_default,
                          remote_local_ref.strip()))
        # END create ref instance

        note = (note and note.strip()) or ''

        # parse flags from control_character
        flags = 0
        try:
            flags |= cls._flag_map[control_character]
        except KeyError:
            raise ValueError(
                "Control character %r unknown as parsed from line %r" %
                (control_character, line))
        # END control char exception hanlding

        # parse operation string for more info - makes no sense for symbolic refs
        old_commit = None
        if isinstance(remote_local_ref, Reference):
            if 'rejected' in operation:
                flags |= cls.REJECTED
            if 'new tag' in operation:
                flags |= cls.NEW_TAG
            if 'new branch' in operation:
                flags |= cls.NEW_HEAD
            if '...' in operation or '..' in operation:
                split_token = '...'
                if control_character == ' ':
                    split_token = split_token[:-1]
                old_commit = repo.rev_parse(operation.split(split_token)[0])
            # END handle refspec
        # END reference flag handling

        return cls(remote_local_ref, flags, note, old_commit)
Ejemplo n.º 25
0
    def _from_line(cls, repo, line, fetch_line):
        """Parse information from the given line as returned by git-fetch -v
        and return a new FetchInfo object representing this information.

        We can handle a line as follows
        "%c %-*s %-*s -> %s%s"

        Where c is either ' ', !, +, -, *, or =
        ! means error
        + means success forcing update
        - means a tag was updated
        * means birth of new branch or tag
        = means the head was up to date ( and not moved )
        ' ' means a fast-forward

        fetch line is the corresponding line from FETCH_HEAD, like
        acb0fa8b94ef421ad60c8507b634759a472cd56c    not-for-merge   branch '0.1.7RC' of /tmp/tmpya0vairemote_repo"""
        match = cls.re_fetch_result.match(line)
        if match is None:
            raise ValueError("Failed to parse line: %r" % line)

        # parse lines
        control_character, operation, local_remote_ref, remote_local_ref, note = match.groups(
        )
        try:
            new_hex_sha, fetch_operation, fetch_note = fetch_line.split(
                "\t")  # @UnusedVariable
            ref_type_name, fetch_note = fetch_note.split(' ', 1)
        except ValueError:  # unpack error
            raise ValueError("Failed to parse FETCH_HEAD line: %r" %
                             fetch_line)

        # parse flags from control_character
        flags = 0
        try:
            flags |= cls._flag_map[control_character]
        except KeyError:
            raise ValueError(
                "Control character %r unknown as parsed from line %r" %
                (control_character, line))
        # END control char exception hanlding

        # parse operation string for more info - makes no sense for symbolic refs, but we parse it anyway
        old_commit = None
        is_tag_operation = False
        if 'rejected' in operation:
            flags |= cls.REJECTED
        if 'new tag' in operation:
            flags |= cls.NEW_TAG
            is_tag_operation = True
        if 'tag update' in operation:
            flags |= cls.TAG_UPDATE
            is_tag_operation = True
        if 'new branch' in operation:
            flags |= cls.NEW_HEAD
        if '...' in operation or '..' in operation:
            split_token = '...'
            if control_character == ' ':
                split_token = split_token[:-1]
            old_commit = repo.rev_parse(operation.split(split_token)[0])
        # END handle refspec

        # handle FETCH_HEAD and figure out ref type
        # If we do not specify a target branch like master:refs/remotes/origin/master,
        # the fetch result is stored in FETCH_HEAD which destroys the rule we usually
        # have. In that case we use a symbolic reference which is detached
        ref_type = None
        if remote_local_ref == "FETCH_HEAD":
            ref_type = SymbolicReference
        elif ref_type_name == "tag" or is_tag_operation:
            # the ref_type_name can be branch, whereas we are still seeing a tag operation. It happens during
            # testing, which is based on actual git operations
            ref_type = TagReference
        elif ref_type_name in ("remote-tracking", "branch"):
            # note: remote-tracking is just the first part of the 'remote-tracking branch' token.
            # We don't parse it correctly, but its enough to know what to do, and its new in git 1.7something
            ref_type = RemoteReference
        elif '/' in ref_type_name:
            # If the fetch spec look something like this '+refs/pull/*:refs/heads/pull/*', and is thus pretty
            # much anything the user wants, we will have trouble to determine what's going on
            # For now, we assume the local ref is a Head
            ref_type = Head
        else:
            raise TypeError("Cannot handle reference type: %r" % ref_type_name)
        # END handle ref type

        # create ref instance
        if ref_type is SymbolicReference:
            remote_local_ref = ref_type(repo, "FETCH_HEAD")
        else:
            # determine prefix. Tags are usually pulled into refs/tags, they may have subdirectories.
            # It is not clear sometimes where exactly the item is, unless we have an absolute path as indicated
            # by the 'ref/' prefix. Otherwise even a tag could be in refs/remotes, which is when it will have the
            # 'tags/' subdirectory in its path.
            # We don't want to test for actual existence, but try to figure everything out analytically.
            ref_path = None
            remote_local_ref = remote_local_ref.strip()
            if remote_local_ref.startswith(Reference._common_path_default +
                                           "/"):
                # always use actual type if we get absolute paths
                # Will always be the case if something is fetched outside of refs/remotes (if its not a tag)
                ref_path = remote_local_ref
                if ref_type is not TagReference and not \
                   remote_local_ref.startswith(RemoteReference._common_path_default + "/"):
                    ref_type = Reference
                # END downgrade remote reference
            elif ref_type is TagReference and 'tags/' in remote_local_ref:
                # even though its a tag, it is located in refs/remotes
                ref_path = join_path(RemoteReference._common_path_default,
                                     remote_local_ref)
            else:
                ref_path = join_path(ref_type._common_path_default,
                                     remote_local_ref)
            # END obtain refpath

            # even though the path could be within the git conventions, we make
            # sure we respect whatever the user wanted, and disabled path checking
            remote_local_ref = ref_type(repo, ref_path, check_path=False)
        # END create ref instance

        note = (note and note.strip()) or ''

        return cls(remote_local_ref, flags, note, old_commit, local_remote_ref)
Ejemplo n.º 26
0
    def __div__(self, file):
        """Find the named object in this tree's contents
        :return: ``git.Blob`` or ``git.Tree`` or ``git.Submodule``

        :raise KeyError: if given file or tree does not exist in tree"""
        msg = "Blob or Tree named %r not found"
        if '/' in file:
            tree = self
            item = self
            tokens = file.split('/')
            for i, token in enumerate(tokens):
                item = tree[token]
                if item.type == 'tree':
                    tree = item
                else:
                    # safety assertion - blobs are at the end of the path
                    if i != len(tokens) - 1:
                        raise KeyError(msg % file)
                    return item
                # END handle item type
            # END for each token of split path
            if item == self:
                raise KeyError(msg % file)
            return item
        else:
            for info in self._cache:
                if info[2] == file:     # [2] == name
                    return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2]))
            # END for each obj
            raise KeyError(msg % file)