def write_index(entries): """Write list of IndexEntry objects to git index file.""" try: repo_root_path = get_repo_root_path() except NoRepositoryError as nre: raise NoRepositoryError(nre) packed_entries = [] for entry in entries: entry_head = struct.pack('!LLLLLLLLLL20sH', entry.ctime_s, entry.ctime_n, entry.mtime_s, entry.mtime_n, entry.dev, entry.ino & 0xFFFFFFFF, entry.mode, entry.uid, entry.gid, entry.size, entry.sha1, entry.flags) path = entry.path.encode() # from ctime to object name it is 62 bytes # this // is integer devison length = ((62 + len(path) + 8) // 8) * 8 packed_entry = entry_head + path + b'\x00' * (length - 62 - len(path)) packed_entries.append(packed_entry) header = struct.pack('!4sLL', b'DIRC', 2, len(entries)) all_data = header + b''.join(packed_entries) digest = hashlib.sha1(all_data).digest() write_file(os.path.join(repo_root_path, '.git', 'index'), all_data + digest)
def hash_object(data: bytes, obj_type: str, write: bool = True) -> str: """ Compute hash of object data of given type and write to object store if "write" is True. Return SHA-1 object hash as hex string. Parameters: data (bytes): data to be hashed obj_type (string): Type of object to be hashed write (bool): Whether to write the result to file or not Returns: str: SHA-1 object hash as hex string Raises: NoRepositoryError: If no repository is found """ try: repo_root_path = get_repo_root_path() except NoRepositoryError as nre: raise NoRepositoryError(nre) header = '{} {}'.format(obj_type, len(data)).encode() full_data = header + b'\x00' + data sha1 = hashlib.sha1(full_data).hexdigest() if write: path = os.path.join(repo_root_path, '.git', 'objects', sha1[:2], sha1[2:]) if not os.path.exists(path): os.makedirs(os.path.dirname(path), exist_ok=True) write_file(path, zlib.compress(full_data)) return sha1
def get_active_branch(): """ Returns the branch name the HEAD is pointing to. """ try: repo_root_path = get_repo_root_path() except NoRepositoryError as nre: print(nre) exit(1) return read_file('{}/.git/HEAD'.format(repo_root_path)).decode( "utf-8").split('ref:')[1].strip().split('/')[-1]
def check_if_remote_ahead(remote_sha1): """ Check if the remote repository is ahead. It get's the remote sha1 hash and checks if the file exists in the .git/objects directory. If it does not exist, the remote repository is ahead of the local repository #TODO: WE will have to give this function a new name """ if remote_sha1 == None: return False root_path = get_repo_root_path() path_to_check = os.path.join(root_path, '.git', 'objects', remote_sha1[:2], remote_sha1[2:]) return not os.path.isfile(path_to_check)
def find_object(sha1_prefix): """Find object with given SHA-1 prefix and return path to object in object store, or raise ValueError if there are no objects or multiple objects with this prefix. """ if len(sha1_prefix) < 2: raise ValueError('hash prefix must be 2 or more characters') repo_root_path = get_repo_root_path() obj_dir = os.path.join(repo_root_path, '.git', 'objects', sha1_prefix[:2]) rest = sha1_prefix[2:] objects = [name for name in os.listdir(obj_dir) if name.startswith(rest)] if not objects: raise ValueError('object {!r} not found'.format(sha1_prefix)) if len(objects) >= 2: raise ValueError('multiple objects ({}) with prefix {!r}'.format( len(objects), sha1_prefix)) return os.path.join(obj_dir, objects[0])
def createBranch(command, newBranchName): """ This function creates a new branch head named <name> which points to the current HEAD. command arg is used to distinguish if it has been called by checkout or branch, since it behaves a bit differently. """ try: repo_root_path = get_repo_root_path() except NoRepositoryError as nre: print(nre) exit(1) pathToRef = '{}/.git/refs/heads/{}'.format(repo_root_path, newBranchName) # check if branch already exists if os.path.isfile(pathToRef): print('fatal: A branch named {} already exists.'.format(newBranchName)) exit(1) # If not, currentHeadRef = read_file('{}/.git/HEAD'.format(repo_root_path)).decode( "utf-8").split('ref:')[1].strip() # check if the file under the refs/heads/ directory exists if os.path.isfile('{}/.git/{}'.format(repo_root_path, currentHeadRef)): # if file exists, then we can read the content # and write it into the new file commitHash = read_file('{}/.git/{}'.format( repo_root_path, currentHeadRef)).decode("utf-8") write_file('{}/.git/refs/heads/{}'.format(repo_root_path, newBranchName), commitHash, binary='') else: # if the user executes git branch, an error is thrown if command == 'branch': print('fatal: Not a valid object name: \'{}\'.'.format( currentHeadRef.split('/')[-1])) exit(1) if command == 'checkout': # in case of git switch or checkout, the HEAD file is updated update_HEAD(repo_root_path, newBranchName) print('Switched to a new branch \'{}\''.format(newBranchName))
def listBranches(remotes): """Add all file paths to git index.""" try: repo_root_path = get_repo_root_path() except NoRepositoryError as nre: print(nre) exit(1) # if remotes flag is set, read remote branches from packed-refs file if remotes: packed_refs_content = read_file( '{}/.git/packed-refs'.format(repo_root_path)).decode('utf-8') branches = re.findall('refs\/remotes\/origin\/(\w*)', packed_refs_content) else: branches = list_files_in_dir( '{}/.git/refs/heads'.format(repo_root_path)) branches.sort() result = '' for branch in branches: result += '* {}\n'.format(branch) print(result)
def read_index() -> List[IndexEntry]: """ Read git index file and return list of IndexEntry objects. Returns: List of IndexEntry objects. If no index file is found, an empty list is returned. Raises: NoRepositoryError: If no git repository is found. """ try: repo_root_path = get_repo_root_path() data = read_file(os.path.join(repo_root_path, '.git', 'index')) except FileNotFoundError: return [] except NoRepositoryError as nre: raise NoRepositoryError(nre) digest = hashlib.sha1(data[:-20]).digest() assert digest == data[-20:], 'invalid index checksum' signature, version, num_entries = struct.unpack('!4sLL', data[:12]) assert signature == b'DIRC', \ 'invalid index signature {}'.format(signature) assert version == 2, 'unknown index version {}'.format(version) entry_data = data[12:-20] entries = [] i = 0 while i + 62 < len(entry_data): fields_end = i + 62 fields = struct.unpack('!LLLLLLLLLL20sH', entry_data[i:fields_end]) path_end = entry_data.index(b'\x00', fields_end) path = entry_data[fields_end:path_end] entry = IndexEntry(*(fields + (path.decode(), ))) entries.append(entry) entry_len = ((62 + len(path) + 8) // 8) * 8 i += entry_len assert len(entries) == num_entries return entries
def merge(source_branch): """ Merges two branches. If the source_branch parameter is set, the source branch is merged into the current branch. If the parameter is not set, a merge from FETCH_HEAD is performed. """ had_conflict = False repo_root_path = get_repo_root_path() # if no source branch for merge is give, we go through the FETCH_HEAD file if source_branch is None: fetch_head_path = os.path.join(repo_root_path, '.git/FETCH_HEAD') if not os.path.exists(fetch_head_path): print('Nothing to merge. Have you called fetch before?') return fetch_head_content = read_file(fetch_head_path).decode('utf-8') findings = re.findall('^([ABCDEFabcdef0-9]+)\s+branch (\w|\')+', fetch_head_content) if len(findings) == 0: remote_sha1 = None else: remote_sha1 = findings[0][0] else: # otherwise we are looking for the refs file first. source_branch_head_path = os.path.join(repo_root_path, '.git/refs/heads/', source_branch) if not os.path.exists(source_branch_head_path): # if the refs file does not exist, we are having a look if the packed-refs file exits # git doesn't use the FETCH_HEAD file when a branch name is given! packed_refs_path = os.path.join(repo_root_path, '.git/packed-refs') if not os.path.exists(packed_refs_path): # if not, we are printing an error message and return remote_sha1 = None # otherwise we read the packed-refs file packed_refs_content = read_file(packed_refs_path).decode('utf-8') # and read the commit hash findings = re.findall( '([ABCDEFabcdef0-9]*) refs\/remotes\/origin\/{}'.format( source_branch), packed_refs_content) if len(findings) == 0: remote_sha1 = None else: remote_sha1 = findings[0] else: # if the file exists, we read the sha1 from it remote_sha1 = read_file(source_branch_head_path).decode('utf-8') if remote_sha1 is None: print('merge: {} - not something we can merge'.format(source_branch)) exit(1) activeBranch = get_current_branch_name() local_sha1 = get_active_branch_hash() remote_sha1 = remote_sha1.strip() local_sha1 = local_sha1.strip() if remote_sha1 == local_sha1: return remote_commits = get_all_local_commits(remote_sha1) local_commits = get_all_local_commits(local_sha1) difference = set(local_commits) - set(remote_commits) if len(difference) == 0: #fast forward strategy path = os.path.join(repo_root_path, '.git/refs/heads/{}'.format(activeBranch)) write_file(path, "{}\n".format(remote_sha1).encode()) obj_type, commit_data = read_object(remote_sha1.strip()) tree_sha1 = commit_data.decode().splitlines()[0][5:45] unpack_object(tree_sha1, repo_root_path, repo_root_path) return # non fast forward strategy intersection = set(local_commits).intersection(remote_commits) for commit_hash in remote_commits: if commit_hash in intersection: ancestor = commit_hash break # We need to find an ancestor and run 3-way merge on these files! # than we need to create a new tree and a commit object with 2 parents obj_type, ancestor_commit = read_object(ancestor) obj_type, a_commit = read_object(local_commits[0]) obj_type, b_commit = read_object(remote_commits[0]) # list for the 3 branches ancestor_entries = [] a_entries = [] b_entries = [] # here we get a list in the following format [(filename, sha1), (filename, sha2), ...] get_subtree_entries(ancestor_commit.splitlines()[0][5:45].decode(), '', ancestor_entries) get_subtree_entries(a_commit.splitlines()[0][5:45].decode(), '', a_entries) get_subtree_entries(b_commit.splitlines()[0][5:45].decode(), '', b_entries) merge = {} # wo go through each list and use the filename as key and create a list of hashed for e in ancestor_entries: if e[0] not in merge: merge[e[0]] = [e[1]] for e in a_entries: if e[0] not in merge: merge[e[0]] = [None, e[1]] else: merge[e[0]].append(e[1]) for e in b_entries: if e[0] not in merge: merge[e[0]] = [None, None, e[1]] else: merge[e[0]].append(e[1]) # if all hashes are the same, there is nothing we have to do # In case the second and third entry are not None, but the first one is: I am not sure if this case actually is possible conflict_files = [] for f in merge: if len(merge[f]) == 2 and merge[f][0] != merge[f][1]: # if there are only two entries, the remote branch does not have the file and we will add it to the repository obj_type, data = read_object(merge[f][1]) path = os.path.join(repo_root_path, f) if not os.path.exists(path): os.makedirs(os.path.dirname(path), exist_ok=True) write_file(path, data) elif merge[f][0] == None and merge[f][1] == None: # if there are three entries and the first two entries are none, the local repository does not have the file # so we add it. obj_type, data = read_object(merge[f][2]) path = os.path.join(repo_root_path, f) if not os.path.exists(path): os.makedirs(os.path.dirname(path), exist_ok=True) write_file(path, data) elif len(set(merge[f])) == 3: # all entries are different, so 3-way merge # read the content of each file obj_type, base_data = read_object(merge[f][0]) obj_type, local_data = read_object(merge[f][1]) obj_type, remote_data = read_object(merge[f][2]) #do the 3-way merge had_conflict, merged_lines = three_way_merge( base_data.decode().splitlines(), local_data.decode().splitlines(), remote_data.decode().splitlines(), "HEAD", merge[f][2]) # writing the merged lines into the file with open(os.path.join(repo_root_path, f), 'w') as file: for line in merged_lines: file.write('{}\n'.format(line)) if had_conflict: # adding file to list, so that we don't add it to the index conflict_files.append(f) path = os.path.join(repo_root_path, '.git/ORIG_HEAD') write_file(path, '{}\n'.format(local_sha1).encode()) path = os.path.join(repo_root_path, '.git/MERGE_HEAD') write_file(path, '{}\n'.format(fetch_head[:40].decode()).encode()) path = os.path.join(repo_root_path, '.git/MERGE_MODE') write_file(path, b'') path = os.path.join(repo_root_path, '.git/MERGE_MSG') if os.path.exists(path): # append file name to conflict with open(path, 'a') as f: f.write('# \t{}'.format(f)) else: repo_name = read_repo_name() if not repo_name.startswith('location:'): # Need to check if the return is handled by the calling function print('.git/name file has an error. Exiting...') return False tmp = repo_name.split('location:')[1].split(':') network = tmp[0].strip() user_key = tmp[1].strip() git_factory = get_factory_contract(network) repository = git_factory.functions.getRepository( user_key).call() write_file( path, 'Merge branch \'{}\' of {} into {}\n\n# Conflicts\n# \t{}\n' .format(source_branch, repository[2], activeBranch, f).encode()) # adding all the files to the index. TODO: can be more efficient if we add it to the previous loop files_to_add = [] pwd = os.getcwd() os.chdir(repo_root_path) for path, subdirs, files in os.walk('.'): for name in files: # we don't want to add the files under .git to the index if not path.startswith('./.git') and name not in conflict_files: files_to_add.append(os.path.join(path, name)[2:]) os.chdir(pwd) add(files_to_add) # creating a commit object with two parents if not had_conflict: commit('Merging {} into {}'.format(source_branch, activeBranch), parent1=local_commits[0], parent2=remote_commits[0])
def pull(): print('Pulling') changed, _, _ = get_status_workspace() # we are checking if there a changed files in the working copy or files staged which have not been committed. # if one case is true, pull won't be executed if len(changed) > 0 or not is_stage_empty(): print("You have local changes. Add and commit those first") return repo_name = read_repo_name() if not repo_name.startswith('location:'): print('.git/name file has an error. Exiting...') return False tmp = repo_name.split('location:')[1].split(':') network = tmp[0].strip() user_key = tmp[1].strip() git_factory = get_factory_contract(network) repository = git_factory.functions.getRepository(user_key).call() if not repository[0]: print('No such repository') return git_repo_address = repository[2] activeBranch = get_current_branch_name() branch_contract = get_facet_contract("GitBranch", git_repo_address, network) branch = branch_contract.functions.getBranch(activeBranch).call() headCid = branch[1] remote_commits = get_all_remote_commits(headCid) #extract only the sha1 hash remote_commits_sha1 = [e['sha1'] for e in remote_commits] root_path = get_repo_root_path() local_commit = get_active_branch_hash() local_commits = get_all_local_commits(local_commit) if local_commits[0] == remote_commits_sha1[0]: print('Already up to date') return remote_to_local_difference = set(remote_commits_sha1) - set(local_commits) local_to_remote_difference = set(local_commits) - set(remote_commits_sha1) if len(remote_to_local_difference ) == 0 and len(local_to_remote_difference) > 0: print('You are ahead of remote branch') return elif len(remote_to_local_difference) == 0 and len( local_to_remote_difference) == 0: print('Nothing to pull') return elif len(local_to_remote_difference) == 0: # alright, we filtered what needs to be downloaded and unpacked # check clone on how to do that! remote_commits = list( filter(lambda x: x['sha1'] in remote_to_local_difference, remote_commits)) repo_name = root_path.split('/')[-1] #unpack files from the newest commit first = True for commit in remote_commits: unpack_files_of_commit(root_path, commit, first) first = False refs_path = os.path.join(root_path, '.git', 'refs', 'heads', activeBranch) write_file(refs_path, (remote_commits[0]['sha1'] + '\n').encode()) # we are deleting all the files in the repo # there might be a better way, where we iterate over all of the files, # hash and compare the hashes. If there is no difference, leave as is, otherwise # overwrite. We would also need to check for files which are not in the index! # Maybe something at a later point in time :) # Same at checkout commit_entries = read_commit_entries(remote_commits[0]['sha1']) remove_files_from_repo() files_to_add = [] for filename in commit_entries: object_type, data = read_object(commit_entries[filename]) assert object_type == 'blob' write_file('{}/{}'.format(root_path, filename), data.decode('utf-8'), binary='') files_to_add.append(filename) # remove index file os.remove('{}/.git/index'.format(root_path)) add(files_to_add)
def commit(message: str, author: str = None, parent1: str = None, parent2: str = None) -> str: """ Commit the current state of the index to active branch with given message. Returns the hash of the commit object. Parameters: message (str): The message for the commit. author (str): The author of the commit. parent1 (str): The first parent of the commit. parent2 (str): The second parent of the commit. Returns: Return hash of commit object. """ try: index = read_index() # we are working on write tree tree = hash_object(b''.join(write_tree(index)), 'tree') except NoRepositoryError as nre: print(nre) exit(1) if parent1 == None: # even the get_active_branch_hash throws a NoRepositoryError # we don't have to catch it, since we are doing it already further up in the code # If there is no repository, we won't reach this code here anyways. parent = get_active_branch_hash() else: parent = parent1 # check if there is a MERGE_HEAD file. If there is, parent2 is set to the sha1 hash merge_head_path = os.path.join(get_repo_root_path(), '.git', 'MERGE_HEAD') if os.path.exists(merge_head_path): parent2 = read_file(merge_head_path).decode().strip() if author is None: # get_value_from_config_file throws a NoRepositoryError # but the same as above, we don't have to catch it user_name = get_value_from_config_file('name') user_email = get_value_from_config_file('email') author = '{} <{}>'.format(user_name, user_email) timestamp = int(time.mktime(time.localtime())) utc_offset = -time.timezone author_time = '{} {}{:02}{:02}'.format(timestamp, '+' if utc_offset > 0 else '-', abs(utc_offset) // 3600, (abs(utc_offset) // 60) % 60) lines = ['tree ' + tree] if parent: lines.append('parent ' + parent) if parent2 != None: lines.append('parent ' + parent2) lines.append('author {} {}'.format(author, author_time)) lines.append('committer {} {}'.format(author, author_time)) lines.append('') lines.append(message) lines.append('') data = '\n'.join(lines).encode() sha1 = hash_object(data, 'commit') repo_root_path = get_repo_root_path() activeBranch = get_current_branch_name() branch_path = os.path.join(repo_root_path, '.git', 'refs', 'heads', activeBranch) write_file(branch_path, (sha1 + '\n').encode()) # remove the merge files from the .git directory if committed if parent2 != None and os.path.exists(merge_head_path): os.remove(merge_head_path) merge_mode_path = merge_head_path.replace('MERGE_HEAD', 'MERGE_MODE') os.remove(merge_mode_path) merge_msg_path = merge_head_path.replace('MERGE_HEAD', 'MERGE_MSG') os.remove(merge_msg_path) #TODO: git returns the number of files added and changed. Would be good too print('[{} {}] {}'.format(activeBranch, sha1[:7], message)) print('Author: {}'.format(author)) return sha1
def checkout(branch): if branch is None: print('fatal: Branch name not given.') exit(1) try: repo_root_path = get_repo_root_path() except NoRepositoryError as nre: print(nre) exit(1) active_branch = get_active_branch() if active_branch == branch: print('Already on \'{}\''.format(branch)) exit(0) # boolen to see if the commit hash is taken from the packed-refs file from_packed_refs = False target_commit_hash = None # check if branch exists # first we check if git/refs/heads exists. If it does exist if os.path.isfile('{}/.git/refs/heads/{}'.format(repo_root_path, branch)): # we load the commit hash target_commit_hash = read_file('{}/.git/refs/heads/{}'.format( repo_root_path, branch)).decode("utf-8").strip() else: # if it doesn't exist, we check if the FETCH_HEAD file exists if os.path.isfile('{}/.git/FETCH_HEAD'.format(repo_root_path)): fetch_head_content = read_file( '{}/.git/FETCH_HEAD'.format(repo_root_path)).decode('utf-8') target_commit_hash = fetch_head_content.split( 'branch \'{}\''.format(branch))[0].split('\n')[-1].split( '\t')[0].strip() # if it does not exist, we check if packed-refs exists elif os.path.isfile('{}/.git/packed-refs'.format(repo_root_path)): # in case it exists, we check if the branch exists in packed-refs packed_refs_content = read_file( '{}/.git/packed-refs'.format(repo_root_path)).decode("utf-8") if branch in packed_refs_content: # get the commit hash from_packed_refs = True target_commit_hash = packed_refs_content.split( 'refs/remotes/origin/{}\n'.format(branch))[0].split( '\n')[-1].strip() else: # if does not exist, we exit print( 'error: pathspec \'{}\' did not match any file(s) known to git3' .format(branch)) exit(1) current_commit_hash = read_file('{}/.git/refs/heads/{}'.format( repo_root_path, active_branch)).decode("utf-8").strip() # if the commit hash has been taken from the packed-refs, we need to write # the .git/refs/heads/<branch> file if from_packed_refs: print( 'Branch \'{}\' set up to track remote branch \'{}\' from \'origin\'.' .format(branch, branch)) write_file('{}/.git/refs/heads/{}'.format(repo_root_path, branch), target_commit_hash, binary='') if current_commit_hash == target_commit_hash: # switch branch when the hashes are the same. # we don't have to do anything else write_file('{}/.git/HEAD'.format(repo_root_path, branch), 'ref: refs/heads/{}'.format(branch), binary='') exit(0) changed, new, deleted = get_status_workspace() if len(changed) is not 0 or len(new) is not 0 or len(deleted) is not 0: print_checkout_error(changed, new, deleted) changed, new, deleted = get_status_commit() if len(changed) is not 0 or len(new) is not 0 or len(deleted) is not 0: print_checkout_error(changed, new, deleted) commit_entries = read_commit_entries(target_commit_hash) # we are deleting all the files in the repo # there might be a better way, where we iterate over all of the files, # hash and compare the hashes. If there is no difference, leave as is, otherwise # overwrite. We would also need to check for files which are not in the index! # Maybe something at a later point in time :) remove_files_from_repo() files_to_add = [] for filename in commit_entries: object_type, data = read_object(commit_entries[filename]) assert object_type == 'blob' write_file('{}/{}'.format(repo_root_path, filename), data.decode('utf-8'), binary='') files_to_add.append(filename) # remove index file os.remove('{}/.git/index'.format(repo_root_path)) add(files_to_add) update_HEAD(repo_root_path, branch) print('Switched to branch \'{}\''.format(branch))
def add(paths: List[str]) -> None: """ Add all file paths to git index. Args: paths (List): List of files to be added to the git index. Raises: NoRepositoryError: If not git repository is found. FileNotFoundError: If a file to be added to the index is not found. """ try: repo_root_path = get_repo_root_path() except NoRepositoryError as nre: print(nre) exit(1) paths = [p.replace('\\', '/') for p in paths] all_entries = [] # transfer paths to relative paths. Relative to the repository root # in case we are in a subdirectory and add a file paths = list( map( lambda path: os.path.relpath(os.path.abspath(path), repo_root_path ), paths)) all_entries = read_index() entries = [e for e in all_entries if e.path not in paths] for path in paths: file_path = repo_root_path + '/' + path try: data = read_file(file_path) except FileNotFoundError: print( 'fatal: pathspec \'{}\' did not match any files'.format(path)) exit(1) sha1 = hash_object(data, 'blob') st = os.stat(file_path) #TODO: We will need to check for the file mode properly! # the spec says, that the mode is 32 bits and consists of 4 bits object type, 3 unused bits, # and the 9 bits of the unix permission. # the 4 bits has the following binary value: 1000 (regular file), 1010 (symbolic link), and 1110 (gitlink) # the 9 bit unix permission can be 0755 and 0644 for regular files. Symbolic links and gitLinks have # value 0 # TODO: We don't do this step poperly yet. We assume, that we use a regular file! # TODO: this seems to cover everything! We should have a proper check regarding this! mode = st.st_mode # get the length of the file flags = len(file_path.split('/')[-1].encode()) #TODO: we have to check regarding the assume-valid flag what is means! #TODO: I believe this is the test of flags < 0xFFF. We need to make this part clearer! assert flags < (1 << 12) # gets the relative path to the repository root folder for the index file relative_path = os.path.relpath(os.path.abspath(file_path), repo_root_path) # st.st_ctime_ns % 1000000000 this part gets only the nanosecond fraction of the timestamp entry = IndexEntry(int(st.st_ctime), st.st_ctime_ns % 1000000000, int(st.st_mtime), st.st_mtime_ns % 1000000000, st.st_dev, st.st_ino, mode, st.st_uid, st.st_gid, st.st_size, bytes.fromhex(sha1), flags, relative_path) entries.append(entry) entries.sort(key=operator.attrgetter('path')) write_index(entries)
def fetch(branchName): """ Downloads commits and objects from the remote repository """ repo_name = read_repo_name() if not repo_name.startswith('location:'): # Need to check if the return is handled by the calling function print('.git/name file has an error. Exiting...') return False tmp = repo_name.split('location:')[1].split(':') network = tmp[0].strip() user_key = tmp[1].strip() git_factory = get_factory_contract(network) active_branch = get_current_branch_name() repository = git_factory.functions.getRepository(user_key).call() if not repository[0]: print('No such repository') return git_repo_address = repository[2] branch_contract = get_facet_contract("GitBranch", git_repo_address, network) # fetch_data will contain tuples in the following format # (branch_name, head_cid, head_commit_sha1 of branch) fetch_data = [] # if branchName is none, the user called git3 fetch # so we collect data from all branches if branchName is None: branches = branch_contract.functions.getBranchNames().call() for branch_name in branches: # returns tuple (bool, headcid) branch = branch_contract.functions.getBranch(branch_name).call() branch_commit_hash = get_branch_hash(branch_name) fetch_data.append((branch_name, branch[1], branch_commit_hash)) else: # returns tuple (bool, headcid) branch = branch_contract.functions.getBranch(branchName).call() if not branch[1]: print('fatal: couldn\'t find remote ref {}'.format(branchName)) return False branch_commit_hash = get_branch_hash(branch_name) fetch_data.append((branch_name, branch[1], branch_commit_hash)) repo_root_path = get_repo_root_path() fetch_head_data = '' # get all remote commits for data in fetch_data: remote_commits = get_all_remote_commits(data[1]) #extract only the sha1 hash remote_commits_sha1 = [e['sha1'] for e in remote_commits] local_commits = get_all_local_commits(data[2]) if data[0] != active_branch: not_for_merge = 'not-for-merge' else: not_for_merge = '' # preparing FETCH_HEAD file content fetch_head_data = '{}{}\t{}\t{}\'{}\' of {}\n'.format( fetch_head_data, remote_commits_sha1[0], not_for_merge, 'branch ', data[0], git_repo_address) # write the remote commit to the refs/remotes/origin/[branchName] file write_file( os.path.join(repo_root_path, '.git/refs/remotes/origin/', data[0]), '{}\n'.format(remote_commits_sha1[0]), '') # check if we have any local commits # if local_commits length is zero, there are no local commits for that particular branch # so we need to download those! # if the first sha1 are equal, we don't need to download anything if len(local_commits ) > 0 and local_commits[0] == remote_commits_sha1[0]: continue remote_to_local_difference = set(remote_commits_sha1) - set( local_commits) # transfer the data from ipfs into git objects on the local machine for commit_hash in remote_to_local_difference: for commit in remote_commits: if commit['sha1'] == commit_hash: unpack_files_of_commit(repo_root_path, commit, False) path = os.path.join(repo_root_path, '.git', 'FETCH_HEAD') write_file(path, fetch_head_data, '')