def pack_info_create(pack_data, pack_index): pack = Pack.from_objects(pack_data, pack_index) info = {} for obj in pack.iterobjects(): # Commit if obj.type_num == Commit.type_num: info[obj.id] = (obj.type_num, obj.parents, obj.tree) # Tree elif obj.type_num == Tree.type_num: shas = [(s, n, not stat.S_ISDIR(m)) for n, m, s in obj.iteritems() if not S_ISGITLINK(m)] info[obj.id] = (obj.type_num, shas) # Blob elif obj.type_num == Blob.type_num: info[obj.id] = None # Tag elif obj.type_num == Tag.type_num: info[obj.id] = (obj.type_num, obj.object[1]) return zlib.compress(json_dumps(info))
def listdir(self, commit, path): """Return a list of directories and files in given directory.""" submodules, dirs, files = [], [], [] for entry in self.get_blob_or_tree(commit, path).items(): entry_path = decode_from_git(entry.path) name, entry = entry_path, entry.in_path(encode_for_git(path)) if S_ISGITLINK(entry.mode): submodules.append((name.lower(), name, entry_path, entry.sha)) elif stat.S_ISDIR(entry.mode): dirs.append((name.lower(), name, entry_path)) else: files.append((name.lower(), name, entry_path)) files.sort() dirs.sort() if path: dirs.insert(0, (None, '..', parent_directory(path))) return {'submodules': submodules, 'dirs': dirs, 'files': files}
def cleanup_mode(mode: int) -> int: """Cleanup a mode value. This will return a mode that can be stored in a tree object. Args: mode: Mode to clean up. Returns: mode """ if stat.S_ISLNK(mode): return stat.S_IFLNK elif stat.S_ISDIR(mode): return stat.S_IFDIR elif S_ISGITLINK(mode): return S_IFGITLINK ret = stat.S_IFREG | 0o644 if mode & 0o100: ret |= 0o111 return ret
def next(self): while True: if not self.objects_to_send: return None (sha, name, leaf) = self.objects_to_send.pop() if sha not in self.sha_done: break if not leaf: o = self.object_store[sha] if isinstance(o, Commit): self.add_todo([(o.tree, "", False)]) elif isinstance(o, Tree): self.add_todo([(s, n, not stat.S_ISDIR(m)) for n, m, s in o.iteritems() if not S_ISGITLINK(m)]) elif isinstance(o, Tag): self.add_todo([(o.object[1], None, False)]) if sha in self._tagged: self.add_todo([(self._tagged[sha], None, True)]) self.sha_done.add(sha) self.progress("counting objects: %d\r" % len(self.sha_done)) return (sha, name)
def build_index_from_tree( root_path, index_path, object_store, tree_id, honor_filemode=True, validate_path_element=validate_path_element_default, ): """Generate and materialize index from a tree Args: tree_id: Tree to materialize root_path: Target dir for materialized index files index_path: Target path for generated index object_store: Non-empty object store holding tree contents honor_filemode: An optional flag to honor core.filemode setting in config file, default is core.filemode=True, change executable bit validate_path_element: Function to validate path elements to check out; default just refuses .git and .. directories. Note: existing index is wiped and contents are not merged in a working dir. Suitable only for fresh clones. """ index = Index(index_path) if not isinstance(root_path, bytes): root_path = os.fsencode(root_path) for entry in object_store.iter_tree_contents(tree_id): if not validate_path(entry.path, validate_path_element): continue full_path = _tree_to_fs_path(root_path, entry.path) if not os.path.exists(os.path.dirname(full_path)): os.makedirs(os.path.dirname(full_path)) # TODO(jelmer): Merge new index into working tree if S_ISGITLINK(entry.mode): if not os.path.isdir(full_path): os.mkdir(full_path) st = os.lstat(full_path) # TODO(jelmer): record and return submodule paths else: obj = object_store[entry.sha] st = build_file_from_blob(obj, entry.mode, full_path, honor_filemode=honor_filemode) # Add file to index if not honor_filemode or S_ISGITLINK(entry.mode): # we can not use tuple slicing to build a new tuple, # because on windows that will convert the times to # longs, which causes errors further along st_tuple = ( entry.mode, st.st_ino, st.st_dev, st.st_nlink, st.st_uid, st.st_gid, st.st_size, st.st_atime, st.st_mtime, st.st_ctime, ) st = st.__class__(st_tuple) index[entry.path] = index_entry_from_stat(st, entry.sha, 0) index.write()
def parse_tree(self, tree): self.add_todo([(sha, name, not stat.S_ISDIR(mode)) for (mode, name, sha) in tree.entries() if not S_ISGITLINK(mode)])
def import_git_tree(texts, mapping, path, name, hexshas, base_bzr_tree, parent_id, revision_id, parent_bzr_trees, lookup_object, modes, store_updater, lookup_file_id, allow_submodules=False): """Import a git tree object into a bzr repository. :param texts: VersionedFiles object to add to :param path: Path in the tree (str) :param name: Name of the tree (str) :param tree: A git tree object :param base_bzr_tree: Base inventory against which to return inventory delta :return: Inventory delta for this subtree """ (base_hexsha, hexsha) = hexshas (base_mode, mode) = modes if not isinstance(path, bytes): raise TypeError(path) if not isinstance(name, bytes): raise TypeError(name) if base_hexsha == hexsha and base_mode == mode: # If nothing has changed since the base revision, we're done return [], {} invdelta = [] file_id = lookup_file_id(osutils.safe_unicode(path)) # We just have to hope this is indeed utf-8: ie = InventoryDirectory(file_id, name.decode("utf-8"), parent_id) tree = lookup_object(hexsha) if base_hexsha is None: base_tree = None old_path = None # Newly appeared here else: base_tree = lookup_object(base_hexsha) old_path = path.decode("utf-8") # Renames aren't supported yet new_path = path.decode("utf-8") if base_tree is None or type(base_tree) is not Tree: ie.revision = revision_id invdelta.append((old_path, new_path, ie.file_id, ie)) texts.insert_record_stream([ ChunkedContentFactory((ie.file_id, ie.revision), (), None, [])]) # Remember for next time existing_children = set() child_modes = {} for name, child_mode, child_hexsha in tree.iteritems(): existing_children.add(name) child_path = posixpath.join(path, name) if type(base_tree) is Tree: try: child_base_mode, child_base_hexsha = base_tree[name] except KeyError: child_base_hexsha = None child_base_mode = 0 else: child_base_hexsha = None child_base_mode = 0 if stat.S_ISDIR(child_mode): subinvdelta, grandchildmodes = import_git_tree( texts, mapping, child_path, name, (child_base_hexsha, child_hexsha), base_bzr_tree, file_id, revision_id, parent_bzr_trees, lookup_object, (child_base_mode, child_mode), store_updater, lookup_file_id, allow_submodules=allow_submodules) elif S_ISGITLINK(child_mode): # submodule if not allow_submodules: raise SubmodulesRequireSubtrees() subinvdelta, grandchildmodes = import_git_submodule( texts, mapping, child_path, name, (child_base_hexsha, child_hexsha), base_bzr_tree, file_id, revision_id, parent_bzr_trees, lookup_object, (child_base_mode, child_mode), store_updater, lookup_file_id) else: if not mapping.is_special_file(name): subinvdelta = import_git_blob( texts, mapping, child_path, name, (child_base_hexsha, child_hexsha), base_bzr_tree, file_id, revision_id, parent_bzr_trees, lookup_object, (child_base_mode, child_mode), store_updater, lookup_file_id) else: subinvdelta = [] grandchildmodes = {} child_modes.update(grandchildmodes) invdelta.extend(subinvdelta) if child_mode not in (stat.S_IFDIR, DEFAULT_FILE_MODE, stat.S_IFLNK, DEFAULT_FILE_MODE | 0o111, S_IFGITLINK): child_modes[child_path] = child_mode # Remove any children that have disappeared if base_tree is not None and type(base_tree) is Tree: invdelta.extend(remove_disappeared_children( base_bzr_tree, old_path, base_tree, existing_children, lookup_object)) store_updater.add_object(tree, (file_id, revision_id), path) return invdelta, child_modes
def parse_tree(self, tree): self.add_todo([(sha, name, not stat.S_ISDIR(mode)) for name, mode, sha in tree.iteritems() if not S_ISGITLINK(mode)])